Changeset 46598 in webkit


Ignore:
Timestamp:
Jul 30, 2009, 1:57:44 PM (16 years ago)
Author:
[email protected]
Message:

Merged nitro-extreme branch into trunk.

Location:
trunk
Files:
3 added
63 edited

Legend:

Unmodified
Added
Removed
  • trunk/JavaScriptCore/API/APICast.h

    r43165 r46598  
    2727#define APICast_h
    2828
    29 #include "JSNumberCell.h"
     29#include "JSAPIValueWrapper.h"
    3030#include "JSValue.h"
    3131#include <wtf/Platform.h>
     
    5959}
    6060
    61 inline JSC::JSValue toJS(JSC::ExecState* exec, JSValueRef v)
     61inline JSC::JSValue toJS(JSC::ExecState*, JSValueRef v)
    6262{
    63     JSC::JSValue jsValue = JSC::JSValue::decode(reinterpret_cast<JSC::EncodedJSValue>(const_cast<OpaqueJSValue*>(v)));
    64 #if USE(ALTERNATE_JSIMMEDIATE)
    65     UNUSED_PARAM(exec);
     63#if USE(JSVALUE32_64)
     64    JSC::JSCell* jsCell = reinterpret_cast<JSC::JSCell*>(const_cast<OpaqueJSValue*>(v));
     65    if (!jsCell)
     66        return JSC::JSValue();
     67    if (jsCell->isAPIValueWrapper())
     68        return static_cast<JSC::JSAPIValueWrapper*>(jsCell)->value();
     69    return jsCell;
    6670#else
    67     if (jsValue && jsValue.isNumber()) {
    68         ASSERT(jsValue.isAPIMangledNumber());
    69         return JSC::jsNumber(exec, jsValue.uncheckedGetNumber());
    70     }
     71    return JSC::JSValue::decode(reinterpret_cast<JSC::EncodedJSValue>(const_cast<OpaqueJSValue*>(v)));
    7172#endif
    72     return jsValue;
    7373}
    7474
     
    9090inline JSValueRef toRef(JSC::ExecState* exec, JSC::JSValue v)
    9191{
    92 #if USE(ALTERNATE_JSIMMEDIATE)
     92#if USE(JSVALUE32_64)
     93    if (!v)
     94        return 0;
     95    if (!v.isCell())
     96        return reinterpret_cast<JSValueRef>(asCell(JSC::jsAPIValueWrapper(exec, v)));
     97    return reinterpret_cast<JSValueRef>(asCell(v));
     98#else
    9399    UNUSED_PARAM(exec);
    94 #else
    95     if (v && v.isNumber()) {
    96         ASSERT(!v.isAPIMangledNumber());
    97         return reinterpret_cast<JSValueRef>(JSC::JSValue::encode(JSC::jsAPIMangledNumber(exec, v.uncheckedGetNumber())));
    98     }
     100    return reinterpret_cast<JSValueRef>(JSC::JSValue::encode(v));
    99101#endif
    100     return reinterpret_cast<JSValueRef>(JSC::JSValue::encode(v));
    101102}
    102103
  • trunk/JavaScriptCore/API/JSCallbackObjectFunctions.h

    r43160 r46598  
    319319    for (JSClassRef jsClass = classRef(); jsClass; jsClass = jsClass->parentClass) {
    320320        if (JSObjectHasInstanceCallback hasInstance = jsClass->hasInstance) {
     321            JSValueRef valueRef = toRef(exec, value);
    321322            JSValueRef exception = 0;
    322323            bool result;
    323324            {
    324325                JSLock::DropAllLocks dropAllLocks(exec);
    325                 result = hasInstance(execRef, thisRef, toRef(exec, value), &exception);
     326                result = hasInstance(execRef, thisRef, valueRef, &exception);
    326327            }
    327328            exec->setException(toJS(exec, exception));
     
    429430                value = convertToType(ctx, thisRef, kJSTypeNumber, &exception);
    430431            }
    431             exec->setException(toJS(exec, exception));
    432             if (value) {
    433                 double dValue;
    434                 return toJS(exec, value).getNumber(dValue) ? dValue : NaN;
    435             }
     432            if (exception) {
     433                exec->setException(toJS(exec, exception));
     434                return 0;
     435            }
     436
     437            double dValue;
     438            return toJS(exec, value).getNumber(dValue) ? dValue : NaN;
    436439        }
    437440           
     
    453456                value = convertToType(ctx, thisRef, kJSTypeString, &exception);
    454457            }
    455             exec->setException(toJS(exec, exception));
    456             if (value)
    457                 return toJS(exec, value).getString();
    458             if (exception)
     458            if (exception) {
     459                exec->setException(toJS(exec, exception));
    459460                return "";
     461            }
     462            return toJS(exec, value).getString();
    460463        }
    461464           
  • trunk/JavaScriptCore/API/tests/testapi.c

    r43692 r46598  
    384384        return NULL;
    385385    JSValueRef value = JSObjectCallAsFunction(context, function, object, 0, NULL, exception);
    386     if (!value)
    387         return (JSValueRef)JSStringCreateWithUTF8CString("convertToType failed");
     386    if (!value) {
     387        JSStringRef errorString = JSStringCreateWithUTF8CString("convertToType failed");
     388        JSValueRef errorStringRef = JSValueMakeString(context, errorString);
     389        JSStringRelease(errorString);
     390        return errorStringRef;
     391    }
    388392    return value;
    389393}
  • trunk/JavaScriptCore/AllInOneFile.cpp

    r44508 r46598  
    3535#include "runtime/JSFunction.cpp"
    3636#include "runtime/Arguments.cpp"
     37#include "runtime/JSAPIValueWrapper.cpp"
    3738#include "runtime/JSGlobalObjectFunctions.cpp"
    3839#include "runtime/PrototypeFunction.cpp"
  • trunk/JavaScriptCore/ChangeLog

    r46528 r46598  
     1=== End merge of nitro-extreme branch 2009-07-30 ===
     2
     32009-07-20  Geoffrey Garen  <[email protected]>
     4
     5        Fixed a post-review typo in r46066 that caused tons of test failures.
     6       
     7        SunSpider reports no change.
     8
     9        * runtime/JSArray.cpp:
     10        (JSC::JSArray::JSArray): Initialize the full vector capacity, to avoid
     11        uninitialized members at the end.
     12
     132009-07-20  Geoffrey Garen  <[email protected]>
     14
     15        Windows WebKit build fix: Added some missing exports.
     16
     17        * JavaScriptCore.vcproj/JavaScriptCore/JavaScriptCore.def:
     18        * JavaScriptCore.vcproj/JavaScriptCore/JavaScriptCore_debug.def:
     19
     202009-07-17  Geoffrey Garen  <[email protected]>
     21
     22        Reviewed by Sam Weinig.
     23
     24        Get the branch working on windows.
     25        https://bugs.webkit.org/show_bug.cgi?id=27391
     26       
     27        SunSpider says 0.3% faster.
     28
     29        * JavaScriptCore.vcproj/JavaScriptCore/JavaScriptCore.def:
     30        * JavaScriptCore.vcproj/JavaScriptCore/JavaScriptCore_debug.def: Updated
     31        MSVC export lists to fix linker errors.
     32
     33        * JavaScriptCore.vcproj/JavaScriptCore/JavaScriptCore.vcproj: Added / removed
     34        new / old project files.
     35
     36        * jit/JIT.cpp:
     37        (JSC::JIT::privateCompileCTIMachineTrampolines): Used #pragma pack to tell
     38        MSVC that these structures represent actual memory layout, and should not be
     39        automatically aligned. Changed the return value load to load a 64bit quantity
     40        into the canonical registers.
     41
     42        * jit/JIT.h: Moved OBJECT_OFFSETOF definition to StdLibExtras.h because
     43        it's needed by more than just the JIT, and it supplements a standard library
     44        macro (offsetof).
     45
     46        * jit/JITCall.cpp:
     47        (JSC::JIT::compileOpCallInitializeCallFrame): Fixed an incorrectly signed
     48        cast to resolve an MSVC warning.
     49
     50        * jit/JITStubs.h: Used #pragma pack to tell MSVC that these structures
     51        represent actual memory layout, and should not be automatically aligned.
     52
     53        * runtime/JSArray.cpp:
     54        (JSC::JSArray::JSArray): Replaced memset_pattern8 with a for loop, since
     55        memset_pattern8 is not portable. (I verified that this version of the loop
     56        gives the best performance / generated code in GCC.)
     57
     58        * runtime/JSObject.h:
     59        (JSC::JSObject::JSObject): Removed accidental usage of FIELD_OFFSET --
     60        OBJECT_OFFSETOF is our new macro name. (FIELD_OFFSET conflicts with a
     61        definition in winnt.h.)
     62
     63        * runtime/JSValue.cpp: Added some headers needed by non-all-in-one builds.
     64       
     65        * runtime/JSValue.h:
     66        (JSC::JSValue::): Made the tag signed, to match MSVC's signed enum values.
     67        (GCC doesn't seem to care one way or the other.)
     68
     69        * wtf/MainThread.cpp: Moved the StdLibExtras.h #include -- I did this a
     70        while ago to resolve a conflict with winnt.h. I can't remember if it's truly
     71        still needed, but what the heck.
     72
     73        * wtf/StdLibExtras.h: Moved OBJECT_OFFSETOF definition here.
     74
     752009-07-06  Geoffrey Garen  <[email protected]>
     76
     77        Reviewed by Sam Weinig (?).
     78       
     79        Fixed an assertion seen during the stress test.
     80       
     81        Don't assume that, if op1 is constant, op2 is not, and vice versa. Sadly,
     82        not all constants get folded.
     83
     84        * jit/JITArithmetic.cpp:
     85        (JSC::JIT::emit_op_jnless):
     86        (JSC::JIT::emitSlow_op_jnless):
     87        (JSC::JIT::emit_op_jnlesseq):
     88        (JSC::JIT::emitSlow_op_jnlesseq):
     89
     902009-07-06  Geoffrey Garen  <[email protected]>
     91
     92        Reviewed by Sam Weinig.
     93       
     94        Include op_convert_this in result caching.
     95       
     96        No change on SunSpider or v8.
     97
     98        * jit/JITOpcodes.cpp:
     99        (JSC::JIT::emit_op_convert_this):
     100
     101        * jit/JITStubs.cpp:
     102        (JSC::DEFINE_STUB_FUNCTION):
     103        * jit/JITStubs.h:
     104        (JSC::): Made the op_convert_this JIT stub return an EncodedJSValue, so
     105        to maintain the result caching contract that { tag, payload } can be
     106        found in { regT1, regT0 }.
     107
     1082009-07-06  Geoffrey Garen  <[email protected]>
     109
     110        Reviewed by Sam Weinig.
     111       
     112        Implemented result chaining.
     113       
     114        1% faster on SunSpider. 4%-5% faster on v8.
     115
     116        * assembler/MacroAssemblerX86Common.h:
     117        (JSC::MacroAssemblerX86Common::move):
     118        * assembler/X86Assembler.h:
     119        (JSC::X86Assembler::movl_rr): Added an optimization to eliminate
     120        no-op mov instructions, to simplify chaining.
     121
     122        * jit/JIT.cpp:
     123        (JSC::JIT::JIT):
     124        * jit/JIT.h: Added data members and helper functions for recording
     125        chained results. We record both a mapping from virtual to machine register
     126        and the opcode for which the mapping is valid, to help ensure that the
     127        mapping isn't used after the mapped register has been stomped by other
     128        instructions.
     129
     130        * jit/JITCall.cpp:
     131        (JSC::JIT::compileOpCallVarargs):
     132        (JSC::JIT::compileOpCallVarargsSlowCase):
     133        (JSC::JIT::emit_op_ret):
     134        (JSC::JIT::emit_op_construct_verify):
     135        (JSC::JIT::compileOpCall):
     136        (JSC::JIT::compileOpCallSlowCase): Chain function call results.
     137
     138        * jit/JITInlineMethods.h:
     139        (JSC::JIT::emitLoadTag):
     140        (JSC::JIT::emitLoadPayload):
     141        (JSC::JIT::emitLoad):
     142        (JSC::JIT::emitLoad2):
     143        (JSC::JIT::isLabeled):
     144        (JSC::JIT::map):
     145        (JSC::JIT::unmap):
     146        (JSC::JIT::isMapped):
     147        (JSC::JIT::getMappedPayload):
     148        (JSC::JIT::getMappedTag): Use helper functions when loading virtual
     149        registers into machine registers, in case the loads can be eliminated
     150        by chaining.
     151
     152        * jit/JITOpcodes.cpp:
     153        (JSC::JIT::emit_op_mov):
     154        (JSC::JIT::emit_op_end):
     155        (JSC::JIT::emit_op_instanceof):
     156        (JSC::JIT::emit_op_get_global_var):
     157        (JSC::JIT::emit_op_put_global_var):
     158        (JSC::JIT::emit_op_get_scoped_var):
     159        (JSC::JIT::emit_op_put_scoped_var):
     160        (JSC::JIT::emit_op_to_primitive):
     161        (JSC::JIT::emit_op_resolve_global):
     162        (JSC::JIT::emit_op_jneq_ptr):
     163        (JSC::JIT::emit_op_next_pname):
     164        (JSC::JIT::emit_op_to_jsnumber):
     165        (JSC::JIT::emit_op_catch): Chain results from these opcodes.
     166
     167        (JSC::JIT::emit_op_profile_will_call):
     168        (JSC::JIT::emit_op_profile_did_call): Load the profiler into regT2 to
     169        avoid stomping a chained result.
     170
     171        * jit/JITPropertyAccess.cpp:
     172        (JSC::JIT::emit_op_method_check):
     173        (JSC::JIT::emit_op_get_by_val):
     174        (JSC::JIT::emit_op_get_by_id): Chain results from these opcodes.
     175
     176        * jit/JITStubCall.h:
     177        (JSC::JITStubCall::addArgument): Always use { regT1, regT0 }, to facilitate
     178        chaining.
     179
     180        (JSC::JITStubCall::call): Unmap all mapped registers, since our callee
     181        stub might stomp them.
     182
     1832009-07-01  Sam Weinig  <[email protected]>
     184
     185        Reviewed by Gavin Barraclough.
     186
     187        Don't reload values in emitBinaryDoubleOp.
     188
     189        SunSpider reports a 0.6% progression.
     190
     191        * jit/JIT.h:
     192        * jit/JITArithmetic.cpp:
     193        (JSC::JIT::emit_op_jnless):
     194        (JSC::JIT::emit_op_jnlesseq):
     195        (JSC::JIT::emitBinaryDoubleOp):
     196
     1972009-07-01  Sam Weinig  <[email protected]>
     198
     199        Reviewed by Geoffrey Garen.
     200
     201        Convert op_div to load op1 and op2 up front.
     202
     203        * jit/JITArithmetic.cpp:
     204        (JSC::JIT::emit_op_div):
     205
     2062009-07-01  Sam Weinig  <[email protected]>
     207
     208        Reviewed by Geoffrey Garen.
     209
     210        Don't emit code in emitBinaryDoubleOp if code is unreachable, observable
     211        via an empty (unlinked) jumplist passed in.  This only effects op_jnless
     212        and op_jnlesseq at present.
     213
     214        * jit/JITArithmetic.cpp:
     215        (JSC::JIT::emitSlow_op_jnless):
     216        (JSC::JIT::emitSlow_op_jnlesseq):
     217        (JSC::JIT::emitBinaryDoubleOp):
     218
     2192009-07-01  Geoffrey Garen  <[email protected]>
     220
     221        Reviewed by Sam Weinig.
     222
     223        Converted op_mod to put { tag, payload } in { regT1, regT0 }, and
     224        tidied up its constant case.
     225       
     226        SunSpider reports a 0.2% regression, but a micro-benchmark of op_mod
     227        shows a 12% speedup, and the SunSpider test that uses op_mod most should
     228        benefit a lot from result caching in the end, since it almost always
     229        performs (expression) % constant.
     230
     231        * jit/JITArithmetic.cpp:
     232        (JSC::JIT::emit_op_mod):
     233        (JSC::JIT::emitSlow_op_mod):
     234
     2352009-06-30  Sam Weinig  <[email protected]>
     236
     237        Reviewed by Geoffrey Garen.
     238
     239        Converted some more arithmetic ops to put { tag, payload } in
     240        { regT1, regT0 }.
     241
     242        * jit/JITArithmetic.cpp:
     243        (JSC::JIT::emit_op_mul):
     244        (JSC::JIT::emitSlow_op_mul):
     245
     2462009-06-30  Geoffrey Garen  <[email protected]>
     247
     248        Reviewed by Sam Weinig.
     249
     250        Converted some more arithmetic ops to put { tag, payload } in
     251        { regT1, regT0 }, and added a case for subtract constant.
     252       
     253        SunSpider says no change. v8 says 0.3% slower.
     254
     255        * jit/JIT.h:
     256        * jit/JITArithmetic.cpp:
     257        (JSC::JIT::emit_op_add):
     258        (JSC::JIT::emitAdd32Constant):
     259        (JSC::JIT::emitSlow_op_add):
     260        (JSC::JIT::emit_op_sub):
     261        (JSC::JIT::emitSub32Constant):
     262        (JSC::JIT::emitSlow_op_sub):
     263
     2642009-06-30  Gavin Barraclough  <[email protected]>
     265
     266        Reviewed by Sam Weinig.
     267
     268        Remove more uses of addressFor(), load double constants directly from
     269        the constantpool in the CodeBlock, rather than from the register file.
     270
     271        * jit/JITArithmetic.cpp:
     272        (JSC::JIT::emitAdd32Constant):
     273        (JSC::JIT::emitBinaryDoubleOp):
     274
     2752009-06-30  Geoffrey Garen  <[email protected]>
     276
     277        Reviewed by Sam Weinig.
     278       
     279        Fixed a bug in postfix ops, where we would treat x = x++ and x = x--
     280        as a no-op, even if x were not an int, and the ++/-- could have side-effects.
     281
     282        * jit/JITArithmetic.cpp:
     283        (JSC::JIT::emit_op_post_inc):
     284        (JSC::JIT::emitSlow_op_post_inc):
     285        (JSC::JIT::emit_op_post_dec):
     286        (JSC::JIT::emitSlow_op_post_dec):
     287
     2882009-06-30  Geoffrey Garen  <[email protected]>
     289
     290        Reviewed by Sam Weinig.
     291       
     292        Converted some arithmetic ops to put { tag, payload } in
     293        { regT1, regT0 }.
     294       
     295        SunSpider says 0.7% faster. v8 says no change.
     296
     297        * jit/JIT.h:
     298        * jit/JITArithmetic.cpp:
     299        (JSC::JIT::emit_op_jnless):
     300        (JSC::JIT::emit_op_jnlesseq):
     301        (JSC::JIT::emit_op_lshift):
     302        (JSC::JIT::emit_op_rshift):
     303        (JSC::JIT::emit_op_bitand):
     304        (JSC::JIT::emit_op_bitor):
     305        (JSC::JIT::emit_op_bitxor):
     306        * jit/JITInlineMethods.h:
     307        (JSC::JIT::isOperandConstantImmediateInt):
     308        (JSC::JIT::getOperandConstantImmediateInt):
     309
     3102009-06-30  Gavin Barraclough  <[email protected]>
     311
     312        Reviewed by Sam Weinig.
     313
     314        Start removing cases of addressFor().
     315
     316        * jit/JIT.h:
     317        * jit/JITArithmetic.cpp:
     318        (JSC::JIT::emitAdd32Constant):
     319        (JSC::JIT::emitBinaryDoubleOp):
     320        (JSC::JIT::emit_op_div):
     321        * jit/JITInlineMethods.h:
     322        (JSC::JIT::emitLoadDouble):
     323        (JSC::JIT::emitLoadInt32ToDouble):
     324        (JSC::JIT::emitStoreDouble):
     325        * jit/JITOpcodes.cpp:
     326        (JSC::JIT::emit_op_jfalse):
     327        (JSC::JIT::emit_op_jtrue):
     328
     3292009-06-30  Geoffrey Garen  <[email protected]>
     330
     331        Rolled back in my last patch with regression fixed.
     332
     333        * jit/JIT.cpp:
     334        (JSC::JIT::privateCompileSlowCases):
     335        * jit/JIT.h:
     336        * jit/JITOpcodes.cpp:
     337        (JSC::JIT::emit_op_loop_if_less):
     338        (JSC::JIT::emit_op_loop_if_lesseq):
     339        (JSC::JIT::emit_op_resolve_global):
     340        (JSC::JIT::emitSlow_op_resolve_global):
     341        (JSC::JIT::emit_op_eq):
     342        (JSC::JIT::emitSlow_op_eq):
     343        (JSC::JIT::emit_op_neq):
     344        (JSC::JIT::emitSlow_op_neq):
     345
     3462009-06-30  Geoffrey Garen  <[email protected]>
     347
     348        Rolled out my last patch because it was a 2% SunSpider regression.
     349
     350        * jit/JIT.cpp:
     351        (JSC::JIT::privateCompileSlowCases):
     352        * jit/JIT.h:
     353        * jit/JITOpcodes.cpp:
     354        (JSC::JIT::emit_op_loop_if_less):
     355        (JSC::JIT::emit_op_loop_if_lesseq):
     356        (JSC::JIT::emit_op_resolve_global):
     357        (JSC::JIT::emit_op_eq):
     358        (JSC::JIT::emitSlow_op_eq):
     359        (JSC::JIT::emit_op_neq):
     360        (JSC::JIT::emitSlow_op_neq):
     361
     3622009-06-30  Geoffrey Garen  <[email protected]>
     363
     364        Reviewed by Gavin "Sam Weinig" Barraclough.
     365       
     366        Standardized the rest of our opcodes to put { tag, payload } in
     367        { regT1, regT0 } where possible.
     368
     369        * jit/JIT.cpp:
     370        (JSC::JIT::privateCompileSlowCases):
     371        * jit/JIT.h:
     372        * jit/JITOpcodes.cpp:
     373        (JSC::JIT::emit_op_loop_if_less):
     374        (JSC::JIT::emit_op_loop_if_lesseq):
     375        (JSC::JIT::emit_op_resolve_global):
     376        (JSC::JIT::emitSlow_op_resolve_global):
     377        (JSC::JIT::emit_op_eq):
     378        (JSC::JIT::emitSlow_op_eq):
     379        (JSC::JIT::emit_op_neq):
     380        (JSC::JIT::emitSlow_op_neq):
     381
     3822009-06-30  Gavin Barraclough  <[email protected]>
     383
     384        Reviewed by Geoffrey Garen.
     385
     386        Replace calls to store32(tagFor()) and store32(payloadFor())
     387        with emitStoreInt32(), emitStoreBool(), and emitStoreCell().
     388
     389        * jit/JIT.h:
     390        * jit/JITArithmetic.cpp:
     391        (JSC::JIT::emit_op_negate):
     392        (JSC::JIT::emit_op_lshift):
     393        (JSC::JIT::emit_op_rshift):
     394        (JSC::JIT::emit_op_bitand):
     395        (JSC::JIT::emitBitAnd32Constant):
     396        (JSC::JIT::emit_op_bitor):
     397        (JSC::JIT::emitBitOr32Constant):
     398        (JSC::JIT::emit_op_bitxor):
     399        (JSC::JIT::emitBitXor32Constant):
     400        (JSC::JIT::emit_op_bitnot):
     401        (JSC::JIT::emit_op_post_inc):
     402        (JSC::JIT::emit_op_post_dec):
     403        (JSC::JIT::emit_op_pre_inc):
     404        (JSC::JIT::emit_op_pre_dec):
     405        (JSC::JIT::emit_op_add):
     406        (JSC::JIT::emitAdd32Constant):
     407        (JSC::JIT::emit_op_sub):
     408        (JSC::JIT::emitSub32ConstantLeft):
     409        (JSC::JIT::emitSub32ConstantRight):
     410        (JSC::JIT::emit_op_mul):
     411        (JSC::JIT::emitSlow_op_mul):
     412        (JSC::JIT::emit_op_div):
     413        (JSC::JIT::emit_op_mod):
     414        * jit/JITCall.cpp:
     415        (JSC::JIT::emit_op_load_varargs):
     416        * jit/JITInlineMethods.h:
     417        (JSC::JIT::emitStoreInt32):
     418        (JSC::JIT::emitStoreCell):
     419        (JSC::JIT::emitStoreBool):
     420        (JSC::JIT::emitStore):
     421        * jit/JITOpcodes.cpp:
     422        (JSC::JIT::emit_op_instanceof):
     423        (JSC::JIT::emit_op_not):
     424        (JSC::JIT::emit_op_eq):
     425        (JSC::JIT::emitSlow_op_eq):
     426        (JSC::JIT::emit_op_neq):
     427        (JSC::JIT::emitSlow_op_neq):
     428        (JSC::JIT::compileOpStrictEq):
     429        (JSC::JIT::emit_op_eq_null):
     430        (JSC::JIT::emit_op_neq_null):
     431        * jit/JITStubCall.h:
     432        (JSC::JITStubCall::call):
     433
     4342009-06-30  Geoffrey Garen  <[email protected]>
     435
     436        Reviewed by Sam Weinig.
     437       
     438        Standardized the rest of the property access instructions to put { tag,
     439        payload } in { regT1, regT0 }.
     440
     441        Small v8 speedup, 0.2% SunSpider slowdown.
     442
     443        * jit/JIT.h:
     444        * jit/JITInlineMethods.h:
     445        (JSC::JIT::emitLoad):
     446        (JSC::JIT::emitLoad2):
     447        * jit/JITPropertyAccess.cpp:
     448        (JSC::JIT::emit_op_get_by_val):
     449        (JSC::JIT::emitSlow_op_get_by_val):
     450        (JSC::JIT::emit_op_put_by_val):
     451        (JSC::JIT::emitSlow_op_put_by_val):
     452        (JSC::JIT::emit_op_put_by_id):
     453        (JSC::JIT::emitSlow_op_put_by_id):
     454        (JSC::JIT::patchPutByIdReplace):
     455
     4562009-06-29  Sam Weinig  <[email protected]>
     457
     458        Reviewed by Gavin Barraclough.
     459
     460        Various cleanups.
     461        - Use fpRegT* instead of X86::xmm*.
     462        - Use a switch statement in emitBinaryDoubleOp instead of a bunch of
     463          if/elses.
     464
     465        * jit/JITArithmetic.cpp:
     466        (JSC::JIT::emitAdd32Constant):
     467        (JSC::JIT::emitBinaryDoubleOp):
     468        (JSC::JIT::emit_op_div):
     469
     4702009-06-29  Sam Weinig  <[email protected]>
     471
     472        Reviewed by Geoffrey Garen.
     473
     474        Add inline code dealing with doubles for op_jfalse and op_jtrue.
     475
     476        * assembler/MacroAssemblerX86Common.h:
     477        (JSC::MacroAssemblerX86Common::):
     478        (JSC::MacroAssemblerX86Common::zeroDouble):
     479        * jit/JITOpcodes.cpp:
     480        (JSC::JIT::emit_op_jfalse):
     481        (JSC::JIT::emit_op_jtrue):
     482
     4832009-06-28  Geoffrey Garen  <[email protected]>
     484
     485        Reviewed by Sam Weinig.
     486
     487        Standardized op_get_by_id to put { tag, payload } in { regT1, regT0 }.
     488       
     489        SunSpider and v8 report maybe 0.2%-0.4% regressions, but the optimization
     490        this enables will win much more than that back.
     491
     492        * jit/JIT.cpp:
     493        (JSC::JIT::privateCompileCTIMachineTrampolines):
     494        * jit/JIT.h:
     495        * jit/JITPropertyAccess.cpp:
     496        (JSC::JIT::emit_op_method_check):
     497        (JSC::JIT::emit_op_get_by_id):
     498        (JSC::JIT::compileGetByIdHotPath):
     499        (JSC::JIT::compileGetByIdSlowCase):
     500        (JSC::JIT::patchGetByIdSelf):
     501        (JSC::JIT::privateCompilePatchGetArrayLength):
     502        (JSC::JIT::privateCompileGetByIdProto):
     503        (JSC::JIT::privateCompileGetByIdSelfList):
     504        (JSC::JIT::privateCompileGetByIdProtoList):
     505        (JSC::JIT::privateCompileGetByIdChainList):
     506        (JSC::JIT::privateCompileGetByIdChain):
     507
     5082009-06-26  Geoffrey Garen  <[email protected]>
     509
     510        Reviewed by Maciej Stachowiak.
     511       
     512        Standardized op_call to put { tag, payload } in { regT1, regT0 }.
     513       
     514        SunSpider and v8 report no change.
     515
     516        * jit/JIT.cpp:
     517        (JSC::JIT::privateCompileCTIMachineTrampolines):
     518        * jit/JITCall.cpp:
     519        (JSC::JIT::compileOpCallInitializeCallFrame):
     520        (JSC::JIT::compileOpCallSetupArgs):
     521        (JSC::JIT::compileOpConstructSetupArgs):
     522        (JSC::JIT::compileOpCallVarargsSetupArgs):
     523        (JSC::JIT::compileOpCallVarargs):
     524        (JSC::JIT::compileOpCall):
     525        (JSC::JIT::compileOpCallSlowCase):
     526
     5272009-06-26  Sam Weinig  <[email protected]>
     528
     529        Reviewed by Geoffrey Garen.
     530
     531        Handle multiplying by zero a little better by
     532        inlining the case that both operands are non-negative
     533        into the slowpath.
     534
     535        * assembler/MacroAssemblerX86Common.h:
     536        (JSC::MacroAssemblerX86Common::branchOr32):
     537        * jit/JITArithmetic.cpp:
     538        (JSC::JIT::emit_op_mul):
     539        (JSC::JIT::emitSlow_op_mul):
     540
     5412009-06-25  Geoffrey Garen  <[email protected]>
     542
     543        Reviewed by Sam Weinig.
     544       
     545        Optimize x++ to ++x inside for loops.
     546       
     547        Sadly, no measurable speedup, but this should help with result chaining.
     548
     549        * parser/Nodes.cpp:
     550        (JSC::ForNode::emitBytecode):
     551
     5522009-06-25  Geoffrey Garen  <[email protected]>
     553
     554        Reviewed by Sam Weinig.
     555       
     556        Standardized some more opcodes to put { tag, payload } in { regT1, regT0 }.
     557
     558        * jit/JITArithmetic.cpp:
     559        (JSC::JIT::emitSlow_op_bitnot):
     560        (JSC::JIT::emit_op_post_inc):
     561
     5622009-06-25  Geoffrey Garen  <[email protected]>
     563
     564        Reviewed by Sam Weinig.
     565       
     566        Standardized some more opcodes to put { tag, payload } in { regT1, regT0 }.
     567
     568        * jit/JITArithmetic.cpp:
     569        (JSC::JIT::emit_op_bitnot):
     570        (JSC::JIT::emit_op_post_dec):
     571        (JSC::JIT::emit_op_pre_inc):
     572        (JSC::JIT::emitSlow_op_pre_inc):
     573        (JSC::JIT::emit_op_pre_dec):
     574        (JSC::JIT::emitSlow_op_pre_dec):
     575
     5762009-06-25  Geoffrey Garen  <[email protected]>
     577
     578        Reviewed by Sam Weinig.
     579       
     580        Standardized some more opcodes to put { tag, payload } in { regT1, regT0 }.
     581
     582        * jit/JITArithmetic.cpp:
     583        (JSC::JIT::emit_op_negate):
     584        (JSC::JIT::emitSlow_op_negate):
     585        * jit/JITCall.cpp:
     586        (JSC::JIT::emit_op_construct_verify):
     587        (JSC::JIT::emitSlow_op_construct_verify):
     588
     5892009-06-25  Geoffrey Garen  <[email protected]>
     590
     591        Reviewed by Sam Weinig.
     592       
     593        Standardized some more opcodes to put { tag, payload } in { regT1, regT0 }.
     594
     595        * jit/JITOpcodes.cpp:
     596        (JSC::JIT::emit_op_loop_if_true):
     597        (JSC::JIT::emit_op_jfalse):
     598        (JSC::JIT::emit_op_jtrue):
     599        (JSC::JIT::emit_op_jeq_null):
     600        (JSC::JIT::emit_op_jneq_null):
     601        (JSC::JIT::emit_op_eq_null):
     602        (JSC::JIT::emit_op_neq_null):
     603
     6042009-06-25  Geoffrey Garen  <[email protected]>
     605
     606        Reviewed by Sam Weinig (sort of, maybe).
     607       
     608        Fixed some ASSERTs in http/tests/security.
     609       
     610        These ASSERTs were introduced by http://trac.webkit.org/changeset/45057,
     611        but the underlying problem was actually older. http://trac.webkit.org/changeset/45057
     612        just exposed the problem by enabling optimization in more cases.
     613       
     614        The ASSERTs fired because we tested PropertySlot::slotBase() for validity,
     615        but slotBase() ASSERTs if it's invalid, so we would ASSERT before
     616        the test could happen. Solution: Remove the ASSERT. Maybe it was valid
     617        once, but it clearly goes against a pattern we've deployed of late.
     618       
     619        The underlying problem was that WebCore would re-use a PropertySlot in
     620        the case of a forwarding access, and the second use would not completely
     621        overwrite the first use. Solution: Make sure to overwrite m_offset when
     622        setting a value on a PropertySlot. (Other values already get implicitly
     623        overwritten during reuse.)
     624
     625        * runtime/PropertySlot.h:
     626        (JSC::PropertySlot::PropertySlot):
     627        (JSC::PropertySlot::setValueSlot):
     628        (JSC::PropertySlot::setValue):
     629        (JSC::PropertySlot::setRegisterSlot):
     630        (JSC::PropertySlot::setUndefined):
     631        (JSC::PropertySlot::slotBase):
     632        (JSC::PropertySlot::clearOffset):
     633
     6342009-06-24  Gavin Barraclough  <[email protected]>
     635
     636        Reviewed by Geoff Garen.
     637
     638        Enable JIT_OPTIMIZE_METHOD_CALLS on the branch, implementation matches current implemenatation in ToT.
     639
     640        * jit/JIT.h:
     641        * jit/JITPropertyAccess.cpp:
     642        (JSC::JIT::emit_op_method_check):
     643        (JSC::JIT::emitSlow_op_method_check):
     644        (JSC::JIT::emit_op_get_by_id):
     645        (JSC::JIT::compileGetByIdHotPath):
     646        (JSC::JIT::emitSlow_op_get_by_id):
     647        (JSC::JIT::compileGetByIdSlowCase):
     648
     6492009-06-23  Geoffrey Garen  <[email protected]>
     650
     651        Reviewed by Sam Weinig.
     652
     653        Bit off a tiny bit more of standardizing opcode behavior to help with result
     654        caching.
     655       
     656        SunSpider reports no change, v8 maybe a tiny speedup.
     657
     658        * jit/JITOpcodes.cpp:
     659        (JSC::JIT::emit_op_to_jsnumber):
     660        (JSC::JIT::emitSlow_op_to_jsnumber):
     661        (JSC::JIT::emit_op_convert_this):
     662        (JSC::JIT::emitSlow_op_convert_this):
     663
     6642009-06-23  Geoffrey Garen  <[email protected]>
     665
     666        Reviewed by Sam Weinig.
     667
     668        Bit off a tiny bit more of standardizing opcode behavior to help with result
     669        caching -- including removing my old enemy, op_resolve_function, because
     670        it was non-standard, and removing it felt better than helping it limp along.
     671       
     672        SunSpider reports no change, v8 maybe a tiny speedup.
     673       
     674        * bytecode/CodeBlock.cpp:
     675        (JSC::CodeBlock::dump):
     676        * bytecode/Opcode.h:
     677        * bytecompiler/BytecodeGenerator.cpp:
     678        * bytecompiler/BytecodeGenerator.h:
     679        * interpreter/Interpreter.cpp:
     680        (JSC::Interpreter::privateExecute):
     681        * jit/JIT.cpp:
     682        (JSC::JIT::privateCompileMainPass):
     683        * jit/JIT.h:
     684        * jit/JITOpcodes.cpp:
     685        (JSC::JIT::emit_op_get_scoped_var):
     686        (JSC::JIT::emit_op_put_scoped_var):
     687        (JSC::JIT::emit_op_to_primitive):
     688        (JSC::JIT::emitSlow_op_to_primitive):
     689        * jit/JITStubs.cpp:
     690        * jit/JITStubs.h:
     691        * parser/Nodes.cpp:
     692        (JSC::FunctionCallResolveNode::emitBytecode):
     693
     6942009-06-23  Geoffrey Garen  <[email protected]>
     695
     696        Reviewed by Sam Weinig.
     697       
     698        Bit off a tiny bit of standardizing opcode behavior to help with result
     699        caching.
     700       
     701        0.6% SunSpider speedup. 0.3% v8 speedup.
     702
     703        * jit/JITInlineMethods.h:
     704        (JSC::JIT::emitLoad): Accomodate a base register that overlaps with payload
     705        by loading tag before payload, to avoid stomping base/payload.
     706
     707        * jit/JITOpcodes.cpp:
     708        (JSC::JIT::emit_op_mov): Abide by the standard "tag in regT1, payload in
     709        regT0" semantics.
     710
     711        (JSC::JIT::emit_op_get_global_var):
     712        (JSC::JIT::emit_op_put_global_var): Ditto. Also, removed some irrelevent
     713        loads while I was at it. The global object's "d" pointer never changes
     714        after construction.
     715
     7162009-06-23  Gavin Barraclough  <[email protected]>
     717
     718        Reviewed by Sam Weinig.
     719
     720        Remove 'arguments' field from Register union (again).
     721        This time do so without breaking tests (radical, I know).
     722
     723        * interpreter/CallFrame.h:
     724        (JSC::ExecState::optionalCalleeArguments):
     725        (JSC::ExecState::setArgumentCount):
     726        (JSC::ExecState::init):
     727        * interpreter/Interpreter.cpp:
     728        (JSC::Interpreter::dumpRegisters):
     729        (JSC::Interpreter::unwindCallFrame):
     730        (JSC::Interpreter::privateExecute):
     731        (JSC::Interpreter::retrieveArguments):
     732        * interpreter/Register.h:
     733        (JSC::Register::withInt):
     734        (JSC::Register::):
     735        (JSC::Register::Register):
     736        (JSC::Register::i):
     737        * jit/JITStubs.cpp:
     738        (JSC::JITStubs::cti_op_tear_off_arguments):
     739        * runtime/Arguments.h:
     740        (JSC::JSActivation::copyRegisters):
     741        (JSC::Register::arguments):
     742        * runtime/JSActivation.cpp:
     743        (JSC::JSActivation::argumentsGetter):
     744        * runtime/JSActivation.h:
     745
     7462009-06-23  Geoffrey Garen  <[email protected]>
     747
     748        Reviewed by Sam Weinig.
     749       
     750        Removed some result register tracking cruft in preparation for a new
     751        result tracking mechanism.
     752       
     753        SunSpider reports no change.
     754
     755        * assembler/AbstractMacroAssembler.h:
     756        * assembler/X86Assembler.h:
     757        (JSC::X86Assembler::JmpDst::JmpDst): No need to track jump targets in
     758        machine code; we already do this in bytecode.
     759
     760        * jit/JIT.cpp:
     761        (JSC::JIT::JIT):
     762        (JSC::JIT::emitTimeoutCheck): Make sure to save and restore the result
     763        registers, so an opcode with a timeout check can still benefit from result
     764        register caching.
     765
     766        (JSC::JIT::privateCompileMainPass):
     767        (JSC::JIT::privateCompileSlowCases): Removed calls to killLastResultRegister()
     768        in preparation for something new.
     769
     770        * jit/JIT.h:
     771        * jit/JITArithmetic.cpp:
     772        (JSC::JIT::emit_op_jnless):
     773        (JSC::JIT::emit_op_jnlesseq):
     774        * jit/JITInlineMethods.h:
     775        (JSC::JIT::emitGetFromCallFrameHeaderPtr):
     776        (JSC::JIT::emitGetFromCallFrameHeader32):
     777        * jit/JITOpcodes.cpp:
     778        (JSC::JIT::emit_op_jmp):
     779        (JSC::JIT::emit_op_jfalse):
     780        (JSC::JIT::emit_op_jtrue):
     781        (JSC::JIT::emit_op_jeq_null):
     782        (JSC::JIT::emit_op_jneq_null):
     783        (JSC::JIT::emit_op_jneq_ptr):
     784        (JSC::JIT::emit_op_jsr):
     785        (JSC::JIT::emit_op_sret):
     786        (JSC::JIT::emit_op_jmp_scopes): ditto
     787
     788        * jit/JITStubCall.h:
     789        (JSC::JITStubCall::JITStubCall):
     790        (JSC::JITStubCall::getArgument): added a mechanism for reloading an argument
     791        you passed to a JIT stub, for use in emitTimeoutCheck.
     792
     7932009-06-23  Sam Weinig  <[email protected]>
     794
     795        Reviewed by Geoffrey Garen.
     796
     797        Remove now-useless inplace variants of binary ops.
     798
     799        * jit/JIT.h:
     800        * jit/JITArithmetic.cpp:
     801        (JSC::JIT::emit_op_bitand):
     802        (JSC::JIT::emit_op_bitor):
     803        (JSC::JIT::emit_op_bitxor):
     804        (JSC::JIT::emit_op_add):
     805        (JSC::JIT::emit_op_sub):
     806        (JSC::JIT::emit_op_mul):
     807
     8082009-06-23  Sam Weinig  <[email protected]>
     809
     810        Reviewed by Geoffrey Garen.
     811
     812        Move off memory operands to aid in re-enabling result caching.
     813
     814        - No regression measured.
     815
     816        * jit/JIT.h:
     817        * jit/JITArithmetic.cpp:
     818        (JSC::JIT::emit_op_negate):
     819        (JSC::JIT::emit_op_jnless):
     820        (JSC::JIT::emit_op_jnlesseq):
     821        (JSC::JIT::emit_op_lshift):
     822        (JSC::JIT::emit_op_rshift):
     823        (JSC::JIT::emit_op_bitand):
     824        (JSC::JIT::emitBitAnd32Constant):
     825        (JSC::JIT::emitBitAnd32InPlace):
     826        (JSC::JIT::emit_op_bitor):
     827        (JSC::JIT::emitBitOr32Constant):
     828        (JSC::JIT::emitBitOr32InPlace):
     829        (JSC::JIT::emit_op_bitxor):
     830        (JSC::JIT::emitBitXor32Constant):
     831        (JSC::JIT::emitBitXor32InPlace):
     832        (JSC::JIT::emit_op_bitnot):
     833        (JSC::JIT::emit_op_post_inc):
     834        (JSC::JIT::emit_op_post_dec):
     835        (JSC::JIT::emit_op_pre_inc):
     836        (JSC::JIT::emitSlow_op_pre_inc):
     837        (JSC::JIT::emit_op_pre_dec):
     838        (JSC::JIT::emitSlow_op_pre_dec):
     839        (JSC::JIT::emit_op_add):
     840        (JSC::JIT::emitAdd32Constant):
     841        (JSC::JIT::emitAdd32InPlace):
     842        (JSC::JIT::emitSlow_op_add):
     843        (JSC::JIT::emitSlowAdd32Constant):
     844        (JSC::JIT::emit_op_sub):
     845        (JSC::JIT::emitSlow_op_sub):
     846        (JSC::JIT::emitSub32ConstantLeft):
     847        (JSC::JIT::emitSub32ConstantRight):
     848        (JSC::JIT::emitSub32InPlaceLeft):
     849        (JSC::JIT::emitSub32InPlaceRight):
     850        (JSC::JIT::emitBinaryDoubleOp):
     851        (JSC::JIT::emit_op_mul):
     852        (JSC::JIT::emitMul32InPlace):
     853        (JSC::JIT::emit_op_div):
     854        (JSC::JIT::emit_op_mod):
     855        * jit/JITCall.cpp:
     856        (JSC::JIT::compileOpCallVarargs):
     857        * jit/JITOpcodes.cpp:
     858        (JSC::JIT::emit_op_loop_if_less):
     859        (JSC::JIT::emit_op_loop_if_lesseq):
     860        (JSC::JIT::emit_op_instanceof):
     861        (JSC::JIT::emit_op_to_primitive):
     862        (JSC::JIT::emit_op_not):
     863        (JSC::JIT::emit_op_jneq_ptr):
     864        (JSC::JIT::emit_op_eq):
     865        (JSC::JIT::emit_op_neq):
     866        (JSC::JIT::emit_op_to_jsnumber):
     867        * jit/JITPropertyAccess.cpp:
     868        (JSC::JIT::emit_op_get_by_val):
     869        (JSC::JIT::emit_op_put_by_val):
     870
     8712009-06-23  Geoffrey Garen  <[email protected]>
     872
     873        Reviewed by Sam Weinig.
     874       
     875        Fixed some missing and/or misplaced labels in bytecode generation, so
     876        we don't have to work around them in JIT code generation.
     877
     878        * bytecompiler/BytecodeGenerator.cpp:
     879        (JSC::BytecodeGenerator::emitJumpSubroutine):
     880        * parser/Nodes.cpp:
     881        (JSC::TryNode::emitBytecode):
     882
     8832009-06-22  Geoffrey Garen  <[email protected]>
     884
     885        Reviewed by Sam Weinig.
     886       
     887        For member function calls, emit "this" directly into the "this" slot
     888        for the function call, instead of moving it there later. This reduces
     889        time spent in op_mov during certain calls, like "a.b.c()".
     890       
     891        1%-2% speedup on v8, mostly richards and delta-blue.
     892
     893        * parser/Nodes.cpp:
     894        (JSC::FunctionCallDotNode::emitBytecode):
     895
     8962009-06-22  Gavin Barraclough  <[email protected]>
     897
     898        Reviewed by Sam Weinig.
     899
     900        Remove 'arguments' field from Register union.  Having JSCell derived types in the union is
     901        dangerous since it opens the possibility for the field to be written as a raw pointer but
     902        then read as a JSValue.  This will lead to statle data being read for the tag, which may
     903        be dangerous.  Having removed Arguments* types form Register, all arguments objects must
     904        always explicitly be stored in the register file as JSValues.
     905
     906        * interpreter/CallFrame.h:
     907        (JSC::ExecState::optionalCalleeArguments):
     908        * interpreter/Interpreter.cpp:
     909        (JSC::Interpreter::unwindCallFrame):
     910        (JSC::Interpreter::privateExecute):
     911        (JSC::Interpreter::retrieveArguments):
     912        * interpreter/Register.h:
     913        (JSC::Register::):
     914        * jit/JITStubs.cpp:
     915        (JSC::JITStubs::cti_op_tear_off_arguments):
     916        * runtime/Arguments.h:
     917        (JSC::JSActivation::copyRegisters):
     918        * runtime/JSActivation.cpp:
     919        (JSC::JSActivation::argumentsGetter):
     920        * runtime/JSActivation.h:
     921
     9222009-06-03  Sam Weinig  <[email protected]>
     923
     924        Reviewed by Geoffrey Garen.
     925
     926        Add back known this value optimization by abstracting
     927        slow case if not JSCell jumps.
     928
     929        * jit/JIT.h:
     930        * jit/JITCall.cpp:
     931        (JSC::JIT::compileOpCallVarargs):
     932        (JSC::JIT::compileOpCallVarargsSlowCase):
     933        (JSC::JIT::compileOpCall):
     934        (JSC::JIT::compileOpCallSlowCase):
     935        * jit/JITInlineMethods.h:
     936        (JSC::JIT::emitJumpSlowCaseIfNotJSCell):
     937        (JSC::JIT::linkSlowCaseIfNotJSCell):
     938        * jit/JITOpcodes.cpp:
     939        (JSC::JIT::emit_op_instanceof):
     940        (JSC::JIT::emitSlow_op_instanceof):
     941        * jit/JITPropertyAccess.cpp:
     942        (JSC::JIT::emit_op_get_by_val):
     943        (JSC::JIT::emitSlow_op_get_by_val):
     944        (JSC::JIT::emit_op_put_by_val):
     945        (JSC::JIT::emitSlow_op_put_by_val):
     946        (JSC::JIT::emit_op_get_by_id):
     947        (JSC::JIT::emitSlow_op_get_by_id):
     948        (JSC::JIT::emit_op_put_by_id):
     949        (JSC::JIT::emitSlow_op_put_by_id):
     950
     9512009-06-01  Geoffrey Garen  <[email protected]>
     952
     953        Reviewed by Sam Weinig.
     954       
     955        Fixed some of the regression in crypto-aes.js. (8.5% speedup in
     956        crypto-aes.js.)
     957       
     958        SunSpider reports no change overall.
     959       
     960        Division was producing double results, which took the slow path through
     961        array access code.
     962       
     963        Strangely, all my attempts at versions of this patch that modified array
     964        access code to accept ints encoded as doubles along the fast or slow paths
     965        were regressions. So I did this instead.
     966
     967        * jit/JITArithmetic.cpp:
     968        (JSC::JIT::emit_op_div): When dividing an int by an int, go ahead and try
     969        to turn the result into an int. Don't just do int division, though, because
     970        testing shows it to be slower than SSE double division, and the corner
     971        cases are pretty complicated / lengthy on top of that. Also, don't try
     972        to canonicalize division of known tiny numerators into ints, since that's a
     973        waste of time.
     974
     9752009-05-26  Geoffrey Garen  <[email protected]>
     976
     977        Reviewed by Oliver Hunt.
     978       
     979        Fixed a regression caused by my recent fix for NaN.
     980
     981        * jit/JITArithmetic.cpp:
     982        (JSC::JIT::emitBinaryDoubleOp): Actually do the comparison in reverse
     983        order, like the ChangeLog said we would, bokay?
     984
     9852009-05-26  Geoffrey Garen  <[email protected]>
     986
     987        Reviewed by Sam Weinig and Oliver Hunt.
     988       
     989        Fixed two edge cases in %:
     990       
     991        - Don't do -2147483648 % x as a fast case, since you might do -2147483648 % -1,
     992        which will signal a hardware exception due to overflow.
     993
     994        - In the case of a zero remainder, be sure to store negative zero if the
     995        dividend was zero.
     996       
     997        SunSpider reports no change.
     998
     999        * jit/JITArithmetic.cpp:
     1000        (JSC::JIT::emit_op_mod):
     1001        (JSC::JIT::emitSlow_op_mod):
     1002
     10032009-05-25  Geoffrey Garen  <[email protected]>
     1004
     1005        Reviewed by Maciej Stachowiak.
     1006       
     1007        Fixed a regression when comparing to NaN.
     1008
     1009        * jit/JITArithmetic.cpp:
     1010        (JSC::JIT::emitBinaryDoubleOp): For op_jnless and op_jnless_eq, do the
     1011        comparison in reverse order, and jump if the result is below or
     1012        below-or-equal. This ensures that we do jump in the case of NaN.
     1013
     10142009-05-25  Geoffrey Garen  <[email protected]>
     1015
     1016        Reviewed by Oliver Hunt.
     1017       
     1018        SunSpider says no change.
     1019       
     1020        Fixed regressions in fast/js/var-declarations-shadowing.html and
     1021        fast/js/equality.html, caused by recent == and != optimizations.
     1022
     1023        * jit/JITStubs.cpp:
     1024        (JSC::JITStubs::cti_op_eq): Don't treat "compare to string" as always
     1025        numeric or string comparison. If the second operand is an object, you
     1026        need to ToPrimitive it, and start all over again. Also, I wrote out each
     1027        of the possible cases explicitly, to cut down on redundant branching.
     1028
     10292009-05-25  Sam Weinig  <[email protected]>
     1030
     1031        Reviewed by Mark Rowe.
     1032
     1033        Fix bug in fast/js/constant-folding.html where we were not negating
     1034        -0 properly.
     1035
     1036        * jit/JITArithmetic.cpp:
     1037        (JSC::JIT::emit_op_negate):
     1038
     10392009-05-23  Geoffrey Garen  <[email protected]>
     1040
     1041        Reviewed by Oliver Hunt.
     1042       
     1043        Refactored new slow case codegen for == and !=.
     1044       
     1045        SunSpider reports no change, maybe a tiny speedup.
     1046
     1047        * jit/JITOpcodes.cpp:
     1048        (JSC::JIT::emitSlow_op_eq):
     1049        (JSC::JIT::emitSlow_op_neq): Made a vptr comparison a *Ptr operation,
     1050        instead of *32, to make it portable to 64bit. Reorganized the string
     1051        and generic cases to make their control flow a little clearer.
     1052
     10532009-05-23  Geoffrey Garen  <[email protected]>
     1054
     1055        Reviewed by Maciej Stachowiak.
     1056       
     1057        Optimized == and != for our new value representation -- especially for strings.
     1058       
     1059        14% speedup on date-format-tofte.
     1060
     1061        * jit/JITOpcodes.cpp:
     1062        (JSC::JIT::emit_op_eq):
     1063        (JSC::JIT::emitSlow_op_eq):
     1064        (JSC::JIT::emit_op_neq):
     1065        (JSC::JIT::emitSlow_op_neq):
     1066        * jit/JITStubCall.h:
     1067        (JSC::JITStubCall::JITStubCall):
     1068        * jit/JITStubs.cpp:
     1069        (JSC::JITStubs::cti_op_eq):
     1070        (JSC::JITStubs::cti_op_eq_strings):
     1071        (JSC::JITStubs::cti_op_call_eval):
     1072        * jit/JITStubs.h:
     1073        (JSC::):
     1074        * runtime/JSValue.h:
     1075
     10762009-05-22  Sam Weinig  <[email protected]>
     1077
     1078        Reviewed by Gavin Barraclough.
     1079
     1080        Fix non-SSE enabled builds.
     1081
     1082        * jit/JITArithmetic.cpp:
     1083        (JSC::JIT::emitSlow_op_add): Don't early return here, we still need to call the JIT stub.
     1084        (JSC::JIT::emitSlow_op_sub): Ditto.
     1085
     10862009-05-22  Geoffrey Garen  <[email protected]>
     1087
     1088        Reviewed by Sam Weinig.
     1089       
     1090        Here's a thought: let's not take a jit stub call just to multiply by 1,
     1091        bokay?
     1092       
     1093        imul doesn't set the zero flag, so to test for a zero result, we need
     1094        an explicit instruction. (Luckily, it does set the overflow flag, so
     1095        we can still use that.)
     1096
     1097        * jit/JIT.h:
     1098        * jit/JITArithmetic.cpp:
     1099        (JSC::JIT::emit_op_mul):
     1100        (JSC::JIT::emitSlow_op_mul):
     1101        (JSC::JIT::emitMul32InPlace):
     1102
     11032009-05-22  Sam Weinig  <[email protected]>
     1104
     1105        Reviewed by Geoffrey "Premature Commit" Garen.
     1106
     1107        Add back constant integer cases for op_add.
     1108
     1109        * jit/JIT.h:
     1110        * jit/JITArithmetic.cpp:
     1111        (JSC::JIT::emit_op_add):
     1112        (JSC::JIT::emitAdd32Constant):
     1113        (JSC::JIT::emitSlow_op_add):
     1114        (JSC::JIT::emitSlowAdd32Constant):
     1115        * jit/JITInlineMethods.h:
     1116        (JSC::JIT::getConstantOperandImmediateDouble):
     1117        (JSC::JIT::isOperandConstantImmediateDouble):
     1118
     11192009-05-22  Geoffrey Garen  <[email protected]>
     1120
     1121        Reviewed by Sam Weinig.
     1122       
     1123        Added fast double cases for op_jnless and op_jnlesseq.
     1124
     1125        * assembler/AbstractMacroAssembler.h:
     1126        (JSC::AbstractMacroAssembler::JumpList::jumps): New accesor, used by
     1127        addSlowCase.
     1128
     1129        * assembler/X86Assembler.h:
     1130        (JSC::X86Assembler::ucomisd_rm): New method for comparing register to
     1131        memory.
     1132
     1133        * jit/JIT.h:
     1134        * jit/JITArithmetic.cpp:
     1135        (JSC::JIT::emit_op_jnless):
     1136        (JSC::JIT::emitSlow_op_jnless):
     1137        (JSC::JIT::emit_op_jnlesseq):
     1138        (JSC::JIT::emitSlow_op_jnlesseq):
     1139        (JSC::JIT::emit_op_add):
     1140        (JSC::JIT::emit_op_sub):
     1141        (JSC::JIT::emitBinaryDoubleOp):
     1142        (JSC::JIT::emit_op_mul):
     1143        (JSC::JIT::emit_op_div): Modified emitBinaryDoubleOp to accept comparison/jump
     1144        operations in addition to operations with explicit result registers.
     1145
     1146        * jit/JITInlineMethods.h:
     1147        (JSC::JIT::addSlowCase): Added an "addSlowCase" for JumpLists, so clients
     1148        can track multiple jumps to the same slow case condition together.
     1149
     11502009-05-21  Sam Weinig  <[email protected]>
     1151
     1152        Reviewed by Gavin Barraclough.
     1153
     1154        Implement op_negate inline fast cases.
     1155
     1156        * assembler/MacroAssemblerX86Common.h:
     1157        (JSC::MacroAssemblerX86Common::neg32):
     1158        * assembler/X86Assembler.h:
     1159        (JSC::X86Assembler::):
     1160        (JSC::X86Assembler::negl_m):
     1161        (JSC::X86Assembler::xorpd_rr):
     1162        * jit/JIT.cpp:
     1163        (JSC::JIT::privateCompileMainPass):
     1164        (JSC::JIT::privateCompileSlowCases):
     1165        * jit/JIT.h:
     1166        * jit/JITArithmetic.cpp:
     1167        (JSC::JIT::emit_op_negate):
     1168        (JSC::JIT::emitSlow_op_negate):
     1169
     11702009-05-20  Sam Weinig  <[email protected]>
     1171
     1172        Reviewed by Gavin Barraclough.
     1173
     1174        Update the patchOffsetGetByIdSlowCaseCall constant for the
     1175        case that OPCODE_SAMPLING is enabled.
     1176
     1177        * jit/JIT.h:
     1178
     11792009-05-20  Geoffrey Garen  <[email protected]>
     1180
     1181        Reviewed by Sam Weinig.
     1182
     1183        Added support for inline subtraction of doubles.
     1184
     1185        * jit/JITArithmetic.cpp:
     1186        (JSC::JIT::emit_op_sub):
     1187        (JSC::JIT::emitSlow_op_sub):
     1188        (JSC::JIT::emitSlowSub32InPlaceLeft):
     1189        (JSC::JIT::emitBinaryDoubleOp):
     1190
     11912009-05-20  Sam Weinig  <[email protected]>
     1192
     1193        Reviewed by Geoffrey Garen.
     1194
     1195        Added support for inline division.
     1196
     1197        * assembler/X86Assembler.h:
     1198        (JSC::X86Assembler::):
     1199        (JSC::X86Assembler::divsd_rr):
     1200        (JSC::X86Assembler::divsd_mr):
     1201        * bytecode/CodeBlock.cpp:
     1202        (JSC::CodeBlock::dump):
     1203        * bytecode/Opcode.h:
     1204        * bytecompiler/BytecodeGenerator.cpp:
     1205        (JSC::BytecodeGenerator::emitBinaryOp):
     1206        * interpreter/Interpreter.cpp:
     1207        (JSC::Interpreter::privateExecute):
     1208        * jit/JIT.cpp:
     1209        (JSC::JIT::privateCompileMainPass):
     1210        (JSC::JIT::privateCompileSlowCases):
     1211        * jit/JIT.h:
     1212        * jit/JITArithmetic.cpp:
     1213        (JSC::JIT::emitBinaryDoubleOp):
     1214        (JSC::JIT::emit_op_div):
     1215        (JSC::JIT::emitSlow_op_div):
     1216
     12172009-05-20  Geoffrey Garen  <[email protected]>
     1218
     1219        Reviewed by Sam Weinig.
     1220
     1221        Added support for inline addition of doubles.
     1222
     1223        * jit/JITArithmetic.cpp:
     1224        (JSC::JIT::emit_op_add):
     1225        (JSC::JIT::emitSlow_op_add):
     1226        (JSC::JIT::emitSlowAdd32InPlace):
     1227        (JSC::JIT::emitBinaryDoubleOp):
     1228        (JSC::JIT::emit_op_mul):
     1229        (JSC::JIT::emitSlow_op_mul):
     1230
     12312009-05-20  Geoffrey Garen  <[email protected]>
     1232
     1233        Reviewed by Sam Weinig.
     1234       
     1235        Factored inline double operations into a helper function, so that we
     1236        can reuse this code for other math operations.
     1237
     1238        * jit/JIT.h:
     1239        * jit/JITArithmetic.cpp:
     1240        (JSC::JIT::emitBinaryDoubleOp):
     1241        (JSC::JIT::emit_op_mul):
     1242        * jit/JITCall.cpp:
     1243        (JSC::JIT::compileOpCallInitializeCallFrame):
     1244
     12452009-05-20  Geoffrey Garen  <[email protected]>
     1246
     1247        Reviewed by Sam Weinig.
     1248       
     1249        Added support for inline multiplication of doubles.
     1250
     1251        * assembler/X86Assembler.h:
     1252        (JSC::X86Assembler::cvtsi2sd_mr): New function, useful for loading an
     1253        int32 into a double register.
     1254
     1255        * jit/JITArithmetic.cpp:
     1256        (JSC::JIT::emit_op_mul):
     1257        (JSC::JIT::emitSlow_op_mul): Filled out these cases for double arithmetic.
     1258
     1259        * jit/JIT.h:
     1260        * jit/JITInlineMethods.h:
     1261        (JSC::JIT::addressFor): New function, useful for addressing a JSValue's
     1262        full 64bits as a double.
     1263
     12642009-05-19  Sam Weinig  <[email protected]>
     1265
     1266        Reviewed by Geoffrey Garen.
     1267
     1268        Implement and enable optimized calls.
     1269
     1270        * jit/JIT.cpp:
     1271        (JSC::JIT::privateCompileCTIMachineTrampolines): Add ENABLE(JIT_OPTIMIZE_CALL) guards
     1272        around the the optimize call only trampolines (virtualCallPreLink and virtualCallLink).
     1273        Update the trampolines to account for the new JSValue representation.
     1274        (JSC::JIT::unlinkCall): Use NULL instead of JSValue noValue.
     1275
     1276        * jit/JITCall.cpp:
     1277        (JSC::JIT::compileOpCall): Update to account for the new JSValue representation
     1278        (JSC::JIT::compileOpCallSlowCase): Ditto.
     1279
     1280        * jit/JITStubs.h: Remove incorrect !ENABLE(JIT_OPTIMIZE_CALL) guard.
     1281
     1282        * wtf/Platform.h: Enable ENABLE_JIT_OPTIMIZE_CALL.
     1283
     12842009-05-19  Sam Weinig  <[email protected]>
     1285
     1286        Reviewed by Geoffrey Garen.
     1287
     1288        Implement and enable optimized property access.
     1289
     1290        * assembler/AbstractMacroAssembler.h: Fix comment.
     1291        * jit/JIT.cpp:
     1292        (JSC::JIT::privateCompileCTIMachineTrampolines): Remove array length trampoline
     1293        and implement the string length trampoline.
     1294        * jit/JIT.h: Add new constants for patch offsets.
     1295        * jit/JITInlineMethods.h: Remove FIELD_OFFSET which is now in StdLibExtras.h.
     1296        * jit/JITPropertyAccess.cpp:
     1297        (JSC::JIT::emit_op_get_by_id):
     1298        (JSC::JIT::emitSlow_op_get_by_id):
     1299        (JSC::JIT::emit_op_put_by_id):
     1300        (JSC::JIT::emitSlow_op_put_by_id):
     1301        (JSC::JIT::compilePutDirectOffset):
     1302        (JSC::JIT::compileGetDirectOffset):
     1303        (JSC::JIT::privateCompilePutByIdTransition):
     1304        (JSC::JIT::patchGetByIdSelf):
     1305        (JSC::JIT::patchPutByIdReplace):
     1306        (JSC::JIT::privateCompilePatchGetArrayLength):
     1307        (JSC::JIT::privateCompileGetByIdProto):
     1308        (JSC::JIT::privateCompileGetByIdSelfList):
     1309        (JSC::JIT::privateCompileGetByIdProtoList):
     1310        (JSC::JIT::privateCompileGetByIdChainList):
     1311        (JSC::JIT::privateCompileGetByIdChain):
     1312        * jit/JITStubCall.h:
     1313        (JSC::JITStubCall::addArgument): Add version of addArgument that takes
     1314        two registers for the tag and payload.
     1315        * jit/JITStubs.cpp:
     1316        (JSC::JITStubs::JITStubs): Remove array length trampoline pointer.
     1317        (JSC::JITStubs::cti_op_get_by_id_self_fail):
     1318        * jit/JITStubs.h:
     1319        * runtime/JSObject.h:
     1320        (JSC::JSObject::JSObject): Move m_inheritorID below the property storage
     1321        to align it to a 16 byte boundary.
     1322        * wtf/Platform.h: Enable ENABLE_JIT_OPTIMIZE_PROPERTY_ACCESS
     1323        * wtf/StdLibExtras.h: Move FIELD_OFFSET here.
     1324
     13252009-05-17  Sam Weinig  <[email protected]>
     1326
     1327        Reviewed by Geoffrey Garen.
     1328
     1329        Remove unneeded ExecState parameter from the number JSValue constructors.
     1330
     1331        * runtime/JSValue.h:
     1332        (JSC::jsNumber):
     1333        (JSC::jsNaN):
     1334        (JSC::JSValue::JSValue):
     1335
     13362009-05-15  Sam Weinig  <[email protected]>
     1337
     1338        Reviewed by Geoffrey Garen.
     1339
     1340        Implemented fast path for op_put_by_val when putting to arrays.
     1341
     1342        * jit/JITPropertyAccess.cpp:
     1343        (JSC::JIT::emit_op_put_by_val):
     1344        (JSC::JIT::emitSlow_op_put_by_val):
     1345
     13462009-05-15  Geoffrey Garen  <[email protected]> (Mostly by Sam)
     1347
     1348        Reviewed by Sam Weinig.
     1349       
     1350        Implemented fast path for op_get_by_val when accessing array.
     1351
     1352        * jit/JIT.cpp:
     1353        * jit/JITPropertyAccess.cpp:
     1354        (JSC::JIT::emit_op_get_by_val):
     1355        (JSC::JIT::emitSlow_op_get_by_val):
     1356
     13572009-05-14  Geoffrey Garen  <[email protected]>
     1358
     1359        Reviewed by Sam Weinig.
     1360       
     1361        Fixed a failure in fast/js/math-transforms.html caused by failing to
     1362        preserve -0 in multiplication.
     1363
     1364        * assembler/X86Assembler.h:
     1365        (JSC::X86Assembler::jz):
     1366        * jit/JITArithmetic.cpp:
     1367        (JSC::JIT::emit_op_mul):
     1368        (JSC::JIT::emitSlow_op_mul):
     1369        (JSC::JIT::emitMul32Constant):
     1370        (JSC::JIT::emitMul32InPlace): Check both for overflow and for zero when
     1371        doing multiplication. Use a slow case to get these right.
     1372
     13732009-05-14  Geoffrey Garen  <[email protected]>
     1374
     1375        Reviewed by Sam Weinig.
     1376       
     1377        Fixed a bug in the varargs calling convention.
     1378
     1379        * jit/JITCall.cpp:
     1380        (JSC::JIT::compileOpCallVarargs): Move the argument count into regT1,
     1381        since that's where ctiVirtualCall expects it to be.
     1382
     13832009-05-14  Geoffrey Garen  <[email protected]>
     1384
     1385        Reviewed by Sam Weinig.
     1386
     1387        Fixed a small bug in instanceof's looping code.
     1388
     1389        * jit/JITOpcodes.cpp:
     1390        (JSC::JIT::emit_op_instanceof): NULL means the object has no prototype,
     1391        so only loop when *not* equal to NULL.
     1392
     13932009-05-14  Geoffrey Garen  <[email protected]>
     1394
     1395        Reviewed by Sam Weinig.
     1396       
     1397        Fixed a small bug in instanceof's result writing code.
     1398
     1399        * jit/JITOpcodes.cpp:
     1400        (JSC::JIT::emit_op_instanceof): Make sure to fill out the payload bits
     1401        in all cases.
     1402
     14032009-05-14  Sam Weinig  <[email protected]>
     1404
     1405        Reviewed by Geoffrey Garen.
     1406
     1407        Removed an invalid assertion in cti_op_urshift which
     1408        depended on a fast path for op_urshift which has
     1409        never existed.
     1410
     1411        * jit/JITStubs.cpp:
     1412        (JSC::JITStubs::cti_op_urshift):
     1413
     14142009-05-14  Geoffrey Garen  <[email protected]>
     1415
     1416        Reviewed by Sam Weinig.
     1417       
     1418        Fixed loop_if_true, which had the same reversed test that jtrue had.
     1419
     1420        * jit/JITOpcodes.cpp:
     1421        (JSC::JIT::emit_op_loop_if_true):
     1422
     14232009-05-14  Sam Weinig  <[email protected]>
     1424
     1425        Reviewed by Geoffrey Garen.
     1426
     1427        In op_neq, we apparently want to check that one value
     1428        does *not* equal another.  Go figure.
     1429
     1430        * jit/JITOpcodes.cpp:
     1431        (JSC::JIT::emit_op_neq):
     1432
     14332009-05-14  Sam Weinig  <[email protected]>
     1434
     1435        Reviewed by Geoffrey Garen.
     1436
     1437        The slow case of op_mod should call op_mod's jit stub,
     1438        not op_mul.  That would be dumb.
     1439
     1440        * jit/JITArithmetic.cpp:
     1441        (JSC::JIT::emitSlow_op_mod):
     1442
     14432009-05-14  Geoffrey Garen  <[email protected]>
     1444
     1445        Reviewed by Sam Weinig.
     1446       
     1447        Fixed problems when using 'arguments' due to a half-initialized register.
     1448
     1449        * interpreter/CallFrame.h:
     1450        (JSC::ExecState::setCalleeArguments):
     1451        (JSC::ExecState::init): Require a full JSValue when setting up the
     1452        'arguments' virtual register, since this register is accessible from JIT
     1453        code and bytecode, and needs to be a true JSValue.
     1454
     1455        * interpreter/CallFrameClosure.h:
     1456        (JSC::CallFrameClosure::resetCallFrame): ditto
     1457
     1458        * interpreter/Interpreter.cpp:
     1459        (JSC::Interpreter::privateExecute): ditto
     1460
     1461        * interpreter/Register.h: Removed the constructor that allowed assignment
     1462        of a JSArguments* to a register. That is not safe. See above.
     1463
     1464        * jit/JITStubs.cpp:
     1465        (JSC::JITStubs::cti_op_create_arguments):
     1466        (JSC::JITStubs::cti_op_create_arguments_no_params): ditto
     1467
     14682009-05-14  Sam Weinig  <[email protected]>
     1469
     1470        Reviewed by Geoffrey Garen.
     1471
     1472        We really want to go to the slow case in op_jfalse and
     1473        op_jtrue if the value is *not* boolean.
     1474
     1475        * jit/JITOpcodes.cpp:
     1476        (JSC::JIT::emit_op_jfalse):
     1477        (JSC::JIT::emit_op_jtrue):
     1478
     14792009-05-14  Sam Weinig  <[email protected]>
     1480
     1481        Reviewed by Geoffrey Garen.
     1482
     1483        Flipped the condition when emitting a an op_loop_if_less or op_loop_if_lesseq
     1484        if the first operand is a constant.
     1485
     1486        * jit/JITOpcodes.cpp:
     1487        (JSC::JIT::emit_op_loop_if_less):
     1488        (JSC::JIT::emit_op_loop_if_lesseq):
     1489
     14902009-05-14  Sam Weinig  <[email protected]>
     1491
     1492        Reviewed by Geoffrey Garen.
     1493
     1494        Added missing return in op_jnless and op_jnlesseq.
     1495
     1496        * jit/JITArithmetic.cpp:
     1497        (JSC::JIT::emit_op_jnless):
     1498        (JSC::JIT::emit_op_jnlesseq):
     1499
     15002009-05-14  Sam Weinig  <[email protected]>
     1501
     1502        Reviewed by Geoffrey Garen.
     1503
     1504        Load constants into the the register file as a temporary measure to
     1505        aid bring up.  This allows us to use to treat constants like any
     1506        other virtual register.
     1507
     1508        * jit/JITOpcodes.cpp:
     1509        (JSC::JIT::emit_op_enter):
     1510        (JSC::JIT::emit_op_enter_with_activation):
     1511
     15122009-05-14  Geoffrey Garen  <[email protected]>
     1513
     1514        Reviewed by Sam Weinig.
     1515       
     1516        Implemented op_strict_eq. Original patch by Snowy, by way of Sam and Gavin.
     1517
     1518        * assembler/MacroAssemblerX86Common.h:
     1519        (JSC::MacroAssemblerX86Common::set8): Added set8, since it's slightly
     1520        faster than set32, and the new value representation usually doesn't
     1521        need set32.
     1522
     1523        * jit/JIT.cpp:
     1524        * jit/JIT.h:
     1525        * jit/JITInlineMethods.h:
     1526        (JSC::JIT::emitLoadTag):
     1527        (JSC::JIT::emitLoadPayload): Added helper functions for dealing with
     1528        constants. Eventually, we should write special cases for all constants,
     1529        but these are helpful in the short term.
     1530
     1531        * jit/JITOpcodes.cpp:
     1532        (JSC::JIT::compileOpStrictEq):
     1533        (JSC::JIT::emitSlow_op_stricteq):
     1534        (JSC::JIT::emitSlow_op_nstricteq): teh opcodez.
     1535
     1536        * runtime/JSValue.h:
     1537        (JSC::JSValue::):
     1538        (JSC::JSValue::isDouble): Added a LowestTag for clarity.
     1539
     15402009-05-13  Geoffrey Garen  <[email protected]>
     1541
     1542        Reviewed by Sam Weinig.
     1543       
     1544        Fixed some bugs in host function calls.
     1545       
     1546        testapi now passes!
     1547
     1548        * jit/JIT.cpp: Changed some registers around to avoid overwriting edx:eax,
     1549        which is how JSValues are now returned. Also changed the code that
     1550        passes thisValue to pass the full 64bits of the value. Also added
     1551        an #error compiler directive to other platform builds, since the JSValue
     1552        return signature probably won't return in edx:eax on those platforms,
     1553        and we'll have to investigate a solution.
     1554
     15552009-05-13  Geoffrey Garen  <[email protected]>
     1556
     1557        Reviewed by Sam Weinig.
     1558       
     1559        Removed parameters from functions that are intended never to use their
     1560        parameters.
     1561
     1562        * jit/JITPropertyAccess.cpp:
     1563        (JSC::JIT::emitSlow_op_get_by_val):
     1564        (JSC::JIT::emitSlow_op_put_by_val):
     1565
     15662009-05-13  Geoffrey Garen  <[email protected]>
     1567
     1568        Reviewed by Sam Weinig.
     1569       
     1570        Ported op_instance_of from TOT. It's basically the same, but some register
     1571        stuff changed to memory stuff.
     1572
     1573        * jit/JITInlineMethods.h:
     1574        (JSC::JIT::emitPutJITStubArgFromVirtualRegister):
     1575        (JSC::JIT::emitStore): Changed to use helper functions.
     1576
     1577        * jit/JITOpcodes.cpp:
     1578        (JSC::JIT::emit_op_instanceof):
     1579        (JSC::JIT::emitSlow_op_instanceof): Ported from TOT.
     1580
     15812009-05-13  Geoffrey Garen  <[email protected]>
     1582
     1583        Reviewed by Gavin Barraclough.
     1584       
     1585        Added a comment to explain an exception-handling subtelty that we found
     1586        hard to remember when reviewing my last patch.
     1587
     1588        * jit/JITOpcodes.cpp:
     1589        (JSC::JIT::emit_op_catch):
     1590
     15912009-05-13  Geoffrey Garen  <[email protected]>
     1592
     1593        Reviewed by Sam Weinig.
     1594       
     1595        Implemented try/catch.
     1596
     1597        * jit/JITOpcodes.cpp:
     1598        (JSC::JIT::emit_op_throw): Updated to use JITStackFrame abstraction.
     1599        (JSC::JIT::emit_op_catch): Filled out.
     1600
     16012009-05-13  Sam Weinig  <[email protected]>
     1602
     1603        Reviewed by Geoffrey Garen.
     1604
     1605        Implemented op_loop_if_true, op_jfalse, op_jtrue, op_jeq_null and op_jneq_null
     1606
     1607        * jit/JITOpcodes.cpp:
     1608        (JSC::JIT::emitSlow_op_instanceof): Moved from below to be next to its
     1609        fast brother.
     1610
     1611        (JSC::JIT::emit_op_loop_if_true): Similar to the old version
     1612        in that it tries to do the integer case first and reduce the
     1613        number of jumps you might need to take.
     1614        (JSC::JIT::emitSlow_op_loop_if_true):
     1615
     1616        (JSC::JIT::emit_op_jfalse): Very similar to op_loop_if_true, only
     1617        the inverse and without a timeout check.
     1618        (JSC::JIT::emitSlow_op_jfalse):
     1619
     1620        (JSC::JIT::emit_op_jtrue): Very similar to op_loop_if_true except
     1621        without the timeout check.
     1622        (JSC::JIT::emitSlow_op_jtrue):
     1623
     1624        (JSC::JIT::emit_op_jeq_null): Very similar to the implementation
     1625        of op_eq, except it takes jumps instead of copying the condition
     1626        to a dst.
     1627        (JSC::JIT::emit_op_jneq_null): Ditto but for op_neq.
     1628
     16292009-05-13  Geoffrey Garen  <[email protected]>
     1630
     1631        Reviewed by Sam Weinig.
     1632       
     1633        Implemented op_call_varargs.
     1634
     1635        * jit/JITCall.cpp:
     1636        (JSC::JIT::compileOpCallVarargsSetupArgs):
     1637        (JSC::JIT::compileOpCallVarargs):
     1638        (JSC::JIT::emit_op_call):
     1639        (JSC::JIT::emit_op_call_eval):
     1640        (JSC::JIT::emit_op_load_varargs):
     1641        (JSC::JIT::emit_op_call_varargs):
     1642        (JSC::JIT::emit_op_construct):
     1643        * jit/JITOpcodes.cpp:
     1644        (JSC::JIT::emit_op_jneq_ptr):
     1645
     16462009-05-13  Geoffrey Garen  <[email protected]>
     1647
     1648        Reviewed by Sam Weinig.
     1649       
     1650        Implemented op_call_eval.
     1651
     1652        * jit/JITCall.cpp:
     1653        (JSC::JIT::compileOpCallVarargsSetupArgs):
     1654        (JSC::JIT::compileOpCall):
     1655        * jit/JITStubCall.h:
     1656        (JSC::CallEvalJITStub::CallEvalJITStub):
     1657
     16582009-05-13  Sam Weinig  <[email protected]>
     1659
     1660        Reviewed by Gavin Barraclough.
     1661
     1662        Implemented op_not. (Gavin did most of the work!)
     1663
     1664        * jit/JITOpcodes.cpp:
     1665        (JSC::JIT::emit_op_not):
     1666        (JSC::JIT::emitSlow_op_not):
     1667
     16682009-05-13  Geoffrey Garen  <[email protected]>
     1669
     1670        Reviewed by Sam Weinig.
     1671       
     1672        Implemented op_global_resolve.
     1673
     1674        * jit/JITOpcodes.cpp:
     1675        (JSC::JIT::emit_op_loop_if_less):
     1676        (JSC::JIT::emit_op_loop_if_lesseq): Added back accidentally removed
     1677        early returns.
     1678
     1679        (JSC::JIT::emit_op_resolve_global):
     1680        * jit/JITStubs.cpp:
     1681        (JSC::JITStubs::cti_op_resolve_global): Pretty similar to the old code,
     1682        but we need two reads and a TimesEight step in order to account for the
     1683        64bit value size.
     1684
     1685        * jit/JITStubs.h:
     1686        (JSC::): Slightly tweaked this code to specialize for a JSGlobalObject*,
     1687        to avoid having to pass an irrelevant tag pointer to the stub.
     1688
     16892009-05-13  Sam Weinig  <[email protected]>
     1690
     1691        Reviewed by Geoffrey Garen.
     1692
     1693        Implemented op_to_jsnumber.
     1694
     1695        * jit/JITOpcodes.cpp:
     1696        (JSC::JIT::emit_op_to_jsnumber):
     1697        (JSC::JIT::emitSlow_op_to_jsnumber):
     1698
     16992009-05-13  Sam Weinig  <[email protected]>
     1700
     1701        Reviewed by Geoffrey Garen.
     1702
     1703        Implemented op_convert_this.
     1704
     1705        * jit/JITOpcodes.cpp:
     1706        (JSC::JIT::emit_op_convert_this):
     1707        (JSC::JIT::emitSlow_op_convert_this):
     1708
     17092009-05-13  Geoffrey Garen  <[email protected]>
     1710
     1711        Reviewed by Sam Weinig.
     1712       
     1713        Got basic JS function and constructor calls working.
     1714
     1715        * jit/JIT.cpp:
     1716        (JSC::JIT::privateCompileCTIMachineTrampolines):
     1717        * jit/JIT.h:
     1718        * jit/JITCall.cpp:
     1719        (JSC::JIT::compileOpCallSetupArgs):
     1720        (JSC::JIT::compileOpCallVarargsSetupArgs):
     1721        (JSC::JIT::compileOpConstructSetupArgs):
     1722        (JSC::JIT::emit_op_ret):
     1723        (JSC::JIT::emit_op_construct_verify):
     1724        (JSC::JIT::emitSlow_op_construct_verify):
     1725        (JSC::JIT::emitSlow_op_call):
     1726        (JSC::JIT::emitSlow_op_call_eval):
     1727        (JSC::JIT::emitSlow_op_call_varargs):
     1728        (JSC::JIT::emitSlow_op_construct):
     1729        (JSC::JIT::compileOpCall): Filled out these cases, with call_eval #if'd out.
     1730
     1731        * jit/JITInlineMethods.h:
     1732        (JSC::JIT::emitPutJITStubArgFromVirtualRegister):
     1733        (JSC::JIT::emitLoad): Restored some legacy "*CTIArg*" functions,
     1734        since I wanted to avoid the complexity of revamping the API here while
     1735        trying to bring it up. Eventually, we should re-remove all of these functions.
     1736
     1737        (JSC::JIT::recordJumpTarget): Removed unnecessary macro cruft. You will
     1738        not silence me, Sam Weinig! The world will know that you are a crufty,
     1739        crufty, crufty programmer!!!
     1740
     1741        * jit/JITOpcodes.cpp:
     1742        * jit/JITStubs.cpp:
     1743        (JSC::):
     1744        * jit/JITStubs.h: Changed up some offsets in the JITStackFrame class, since
     1745        and off-by-one error was causing stack misalignment.
     1746
     17472009-05-13  Sam Weinig  <[email protected]>
     1748
     1749        Reviewed by Geoffrey Garen.
     1750
     1751        Implement op_eq_null and op_neq_null.
     1752
     1753        * assembler/MacroAssemblerX86Common.h:
     1754        (JSC::MacroAssemblerX86Common::set8):
     1755        (JSC::MacroAssemblerX86Common::setTest8):
     1756        * jit/JITOpcodes.cpp:
     1757        (JSC::JIT::emit_op_stricteq):
     1758        (JSC::JIT::emitSlow_op_stricteq):
     1759        (JSC::JIT::emit_op_nstricteq):
     1760        (JSC::JIT::emitSlow_op_nstricteq):
     1761        (JSC::JIT::emit_op_eq_null):
     1762        (JSC::JIT::emit_op_neq_null):
     1763        * jsc.cpp:
     1764
     17652009-05-12  Sam Weinig  <[email protected]>
     1766
     1767        Reviewed by Geoffrey Garen.
     1768
     1769        Implement op_new_error.
     1770
     1771        * jit/JITOpcodes.cpp:
     1772        (JSC::JIT::emit_op_new_error):
     1773        * jit/JITStubCall.h:
     1774        (JSC::JITStubCall::addArgument): Add a version of addArgument
     1775        that takes a constant JSValue.
     1776
     17772009-05-12  Sam Weinig  <[email protected]>
     1778
     1779        Reviewed by Geoffrey Garen.
     1780
     1781        Remove now unused emitGetVariableObjectRegister and emitPutVariableObjectRegister.
     1782
     1783        * jit/JIT.cpp:
     1784        * jit/JIT.h:
     1785
     17862009-05-12  Sam Weinig  <[email protected]>
     1787
     1788        Reviewed by Geoffrey Garen.
     1789
     1790        Implement op_to_primitive and op_next_pname.
     1791
     1792        * jit/JITOpcodes.cpp:
     1793        (JSC::JIT::emitSlow_op_construct_verify):
     1794        (JSC::JIT::emit_op_to_primitive):
     1795        (JSC::JIT::emitSlow_op_to_primitive):
     1796        (JSC::JIT::emitSlow_op_loop_if_true):
     1797        (JSC::JIT::emit_op_jtrue):
     1798        (JSC::JIT::emit_op_next_pname):
     1799
     18002009-05-12  Sam Weinig  <[email protected]>
     1801
     1802        Reviewed by Geoffrey Garen.
     1803
     1804        Add op_get_global_var, op_put_global_var, emit_op_get_scoped_var, emit_op_put_scoped_var and
     1805        op_unexpected_load.
     1806
     1807        * jit/JIT.h:
     1808        * jit/JITInlineMethods.h:
     1809        (JSC::JIT::tagFor):
     1810        (JSC::JIT::payloadFor):
     1811        (JSC::JIT::emitLoad):
     1812        (JSC::JIT::emitStore):
     1813        (JSC::JIT::emitLoadReturnValue):
     1814        * jit/JITOpcodes.cpp:
     1815        (JSC::JIT::emit_op_get_global_var):
     1816        (JSC::JIT::emit_op_put_global_var):
     1817        (JSC::JIT::emit_op_get_scoped_var):
     1818        (JSC::JIT::emit_op_put_scoped_var):
     1819        (JSC::JIT::emit_op_unexpected_load):
     1820
     18212009-05-12  Geoffrey Garen  <[email protected]>
     1822
     1823        Reviewed by Sam Weinig.
     1824
     1825        Added overflow handling to op_sub.
     1826
     1827        * jit/JIT.h:
     1828        * jit/JITArithmetic.cpp:
     1829        (JSC::JIT::emitSlow_op_sub):
     1830        (JSC::JIT::emitSlowSub32InPlaceLeft):
     1831
     18322009-05-12  Sam Weinig  <[email protected]>
     1833
     1834        Reviewed by Geoffrey Garen.
     1835
     1836        Remove a function call by folding op_get_by_id and op_put_by_id into
     1837        their respective compile functions.
     1838
     1839        * jit/JIT.h:
     1840        * jit/JITPropertyAccess.cpp:
     1841        (JSC::JIT::emit_op_get_by_id):
     1842        (JSC::JIT::emitSlow_op_get_by_id):
     1843        (JSC::JIT::emit_op_put_by_id):
     1844        (JSC::JIT::emitSlow_op_put_by_id):
     1845
     18462009-05-12  Sam Weinig  <[email protected]>
     1847
     1848        Reviewed by Geoffrey Garen.
     1849
     1850        Make JITStubCall work in 64bit by making the stack index
     1851        step dependent on the size of void*.
     1852
     1853        * jit/JITStubCall.h:
     1854        (JSC::JITStubCall::JITStubCall):
     1855        (JSC::JITStubCall::addArgument):
     1856
     18572009-05-12  Sam Weinig  <[email protected]>
     1858
     1859        Reviewed by Geoffrey Garen.
     1860
     1861        Implement simple version of property access opcodes
     1862        which just call a stub functions.
     1863
     1864        * jit/JITOpcodes.cpp:
     1865        * jit/JITPropertyAccess.cpp:
     1866        (JSC::JIT::emitSlow_op_put_by_id):
     1867        (JSC::JIT::emitSlow_op_get_by_id):
     1868        (JSC::JIT::emit_op_get_by_val):
     1869        (JSC::JIT::emitSlow_op_get_by_val):
     1870        (JSC::JIT::emit_op_put_by_val):
     1871        (JSC::JIT::emitSlow_op_put_by_val):
     1872        (JSC::JIT::emit_op_put_by_index):
     1873        (JSC::JIT::emit_op_put_getter):
     1874        (JSC::JIT::emit_op_put_setter):
     1875        (JSC::JIT::emit_op_del_by_id):
     1876        (JSC::JIT::compileGetByIdHotPath):
     1877        (JSC::JIT::compilePutByIdHotPath):
     1878        * jit/JITStubCall.h:
     1879        (JSC::JITStubCall::addArgument):
     1880        * jsc.cpp:
     1881
     18822009-05-12  Geoffrey Garen  <[email protected]>
     1883
     1884        Reviewed by Sam Weinig.
     1885       
     1886        Added work-around for XCode debugging echo problem.
     1887
     1888        * jsc.cpp:
     1889        (runInteractive):
     1890
     18912009-05-12  Geoffrey Garen  <[email protected]>
     1892
     1893        Reviewed by Sam Weinig.
     1894       
     1895        Added overflow handling to op_add.
     1896
     1897        * jit/JIT.h:
     1898        * jit/JITArithmetic.cpp:
     1899        (JSC::JIT::emitSlow_op_add):
     1900        (JSC::JIT::emitSlowAdd32InPlace):
     1901
     19022009-05-12  Sam Weinig  <[email protected]>
     1903
     1904        Reviewed by Geoffrey Garen.
     1905
     1906        Add slow cases for op_jnless or emit_op_jnlesseq.
     1907
     1908        * jit/JITArithmetic.cpp:
     1909        (JSC::JIT::emitSlow_op_jnless):
     1910        (JSC::JIT::emitSlow_op_jnlesseq):
     1911
     19122009-05-12  Sam Weinig  <[email protected]>
     1913
     1914        Reviewed by Geoffrey Garen.
     1915
     1916        Add implementations for op_jnless, emit_op_jnlesseq, op_loop_if_less and op_loop_if_lesseq.
     1917        No slow cases for op_jnless or emit_op_jnlesseq yet.
     1918
     1919        * jit/JITArithmetic.cpp:
     1920        (JSC::JIT::emit_op_jnless):
     1921        (JSC::JIT::emitSlow_op_jnless):
     1922        (JSC::JIT::emit_op_jnlesseq):
     1923        (JSC::JIT::emitSlow_op_jnlesseq):
     1924        * jit/JITOpcodes.cpp:
     1925        (JSC::JIT::emit_op_loop_if_less):
     1926        (JSC::JIT::emitSlow_op_loop_if_less):
     1927        (JSC::JIT::emit_op_loop_if_lesseq):
     1928        (JSC::JIT::emitSlow_op_loop_if_lesseq):
     1929
     19302009-05-12  Sam Weinig  <[email protected]>
     1931
     1932        Reviewed by Geoffrey Garen.
     1933
     1934        Turn the RECORD_JUMP_TARGET macro into an inline function.
     1935
     1936        * jit/JIT.h:
     1937        * jit/JITInlineMethods.h:
     1938        (JSC::JIT::recordJumpTarget):
     1939        * jit/JITOpcodes.cpp:
     1940        (JSC::JIT::emit_op_jmp):
     1941        (JSC::JIT::emit_op_jsr):
     1942        (JSC::JIT::emit_op_jmp_scopes):
     1943
     19442009-05-12  Sam Weinig  <[email protected]>
     1945
     1946        Add MacroAssemblerX86Common::set8 to fix the build.
     1947
     1948        * assembler/MacroAssemblerX86Common.h:
     1949        (JSC::MacroAssemblerX86Common::set8):
     1950
     19512009-05-12  Geoffrey Garen  <[email protected]>
     1952
     1953        Reviewed by Sam Weinig.
     1954       
     1955        Added overflow recovery for pre_inc and pre_dec.
     1956       
     1957        Turned some short-circuit code into early returns, as is the WebKit style.
     1958
     1959        * jit/JITArithmetic.cpp:
     1960        (JSC::JIT::emit_op_post_inc):
     1961        (JSC::JIT::emitSlow_op_post_inc):
     1962        (JSC::JIT::emit_op_post_dec):
     1963        (JSC::JIT::emitSlow_op_post_dec):
     1964        (JSC::JIT::emitSlow_op_pre_inc):
     1965        (JSC::JIT::emitSlow_op_pre_dec):
     1966
     19672009-05-12  Sam Weinig  <[email protected]>
     1968
     1969        Reviewed by Geoffrey Garen.
     1970
     1971        Implement op_jmp, op_loop, op_eq and op_neq.
     1972
     1973        * jit/JITOpcodes.cpp:
     1974        (JSC::JIT::emit_op_jmp):
     1975        (JSC::JIT::emit_op_loop):
     1976        (JSC::JIT::emit_op_eq):
     1977        (JSC::JIT::emitSlow_op_eq):
     1978        (JSC::JIT::emit_op_neq):
     1979        (JSC::JIT::emitSlow_op_neq):
     1980        (JSC::JIT::emit_op_enter):
     1981        (JSC::JIT::emit_op_enter_with_activation):
     1982
     19832009-05-12  Sam Weinig  <[email protected]>
     1984
     1985        Reviewed by Geoffrey Garen.
     1986
     1987        Implement the slow cases for arithmetic opcodes.
     1988
     1989        * jit/JITArithmetic.cpp:
     1990        (JSC::JIT::emitSlow_op_lshift):
     1991        (JSC::JIT::emitSlow_op_rshift):
     1992        (JSC::JIT::emitSlow_op_bitand):
     1993        (JSC::JIT::emitSlow_op_bitor):
     1994        (JSC::JIT::emitSlow_op_bitxor):
     1995        (JSC::JIT::emitSlow_op_bitnot):
     1996        (JSC::JIT::emitSlow_op_sub):
     1997        (JSC::JIT::emitSlow_op_mul):
     1998        (JSC::JIT::emitSlow_op_mod):
     1999        (JSC::JIT::emit_op_mod):
     2000
     20012009-05-12  Sam Weinig  <[email protected]>
     2002
     2003        Reviewed by Geoffrey Garen.
     2004
     2005        Implement op_bitnot.
     2006
     2007        * assembler/MacroAssemblerX86Common.h:
     2008        (JSC::MacroAssemblerX86Common::not32):
     2009        * assembler/X86Assembler.h:
     2010        (JSC::X86Assembler::notl_m):
     2011        * jit/JITArithmetic.cpp:
     2012        (JSC::JIT::emit_op_bitnot):
     2013
     20142009-05-12  Sam Weinig  <[email protected]>
     2015
     2016        Reviewed by Geoffrey Garen.
     2017
     2018        Add arithmetic opcode implementations from the old nitro-extreme branch.
     2019
     2020        * jit/JIT.h:
     2021        * jit/JITArithmetic.cpp:
     2022        (JSC::JIT::emit_op_jnless):
     2023        (JSC::JIT::emitSlow_op_jnless):
     2024        (JSC::JIT::emit_op_jnlesseq):
     2025        (JSC::JIT::emitSlow_op_jnlesseq):
     2026        (JSC::JIT::emit_op_lshift):
     2027        (JSC::JIT::emitSlow_op_lshift):
     2028        (JSC::JIT::emit_op_rshift):
     2029        (JSC::JIT::emitSlow_op_rshift):
     2030        (JSC::JIT::emit_op_bitand):
     2031        (JSC::JIT::emitBitAnd32Constant):
     2032        (JSC::JIT::emitBitAnd32InPlace):
     2033        (JSC::JIT::emit_op_bitor):
     2034        (JSC::JIT::emitSlow_op_bitor):
     2035        (JSC::JIT::emitBitOr32Constant):
     2036        (JSC::JIT::emitBitOr32InPlace):
     2037        (JSC::JIT::emit_op_bitxor):
     2038        (JSC::JIT::emitSlow_op_bitxor):
     2039        (JSC::JIT::emitBitXor32Constant):
     2040        (JSC::JIT::emitBitXor32InPlace):
     2041        (JSC::JIT::emit_op_bitnot):
     2042        (JSC::JIT::emitSlow_op_bitnot):
     2043        (JSC::JIT::emit_op_post_inc):
     2044        (JSC::JIT::emitSlow_op_post_inc):
     2045        (JSC::JIT::emit_op_post_dec):
     2046        (JSC::JIT::emitSlow_op_post_dec):
     2047        (JSC::JIT::emit_op_pre_inc):
     2048        (JSC::JIT::emitSlow_op_pre_inc):
     2049        (JSC::JIT::emit_op_pre_dec):
     2050        (JSC::JIT::emitSlow_op_pre_dec):
     2051        (JSC::JIT::emit_op_add):
     2052        (JSC::JIT::emitAdd32Constant):
     2053        (JSC::JIT::emitAdd32InPlace):
     2054        (JSC::JIT::emitSlow_op_add):
     2055        (JSC::JIT::emit_op_sub):
     2056        (JSC::JIT::emitSlow_op_sub):
     2057        (JSC::JIT::emitSub32ConstantLeft):
     2058        (JSC::JIT::emitSub32ConstantRight):
     2059        (JSC::JIT::emitSub32InPlaceLeft):
     2060        (JSC::JIT::emitSub32InPlaceRight):
     2061        (JSC::JIT::emit_op_mul):
     2062        (JSC::JIT::emitSlow_op_mul):
     2063        (JSC::JIT::emitMul32Constant):
     2064        (JSC::JIT::emitMul32InPlace):
     2065        (JSC::JIT::emit_op_mod):
     2066        (JSC::JIT::emitSlow_op_mod):
     2067        * jit/JITOpcodes.cpp:
     2068
     20692009-05-12  Geoffrey Garen  <[email protected]>
     2070
     2071        Removed JIT_OPTIMIZE_ARITHMETIC setting, since it was all about 32bit
     2072        value representations.
     2073       
     2074        Added JSAPIValueWrapper to the repository.
     2075
     2076        * jit/JIT.h:
     2077        * jit/JITArithmetic.cpp:
     2078        * runtime/JSAPIValueWrapper.cpp: Added.
     2079        (JSC::JSAPIValueWrapper::toPrimitive):
     2080        (JSC::JSAPIValueWrapper::getPrimitiveNumber):
     2081        (JSC::JSAPIValueWrapper::toBoolean):
     2082        (JSC::JSAPIValueWrapper::toNumber):
     2083        (JSC::JSAPIValueWrapper::toString):
     2084        (JSC::JSAPIValueWrapper::toObject):
     2085        * runtime/JSAPIValueWrapper.h: Added.
     2086        (JSC::JSAPIValueWrapper::value):
     2087        (JSC::JSAPIValueWrapper::isAPIValueWrapper):
     2088        (JSC::JSAPIValueWrapper::JSAPIValueWrapper):
     2089        (JSC::jsAPIValueWrapper):
     2090        * wtf/Platform.h:
     2091
     20922009-05-12  Geoffrey Garen  <[email protected]>
     2093
     2094        Turned on the JIT and got it building and running the most trivial of
     2095        programs.
     2096       
     2097        All configurable optimizations are turned off, and a few opcodes are ad
     2098        hoc #if'd out.
     2099       
     2100        So far, I've only merged op_mov and op_end, but some stub-reliant
     2101        opcodes work as-is from TOT.
     2102       
     2103        * bytecode/CodeBlock.cpp:
     2104        (JSC::CodeBlock::~CodeBlock):
     2105        * bytecode/CodeBlock.h:
     2106        * jit/JIT.cpp:
     2107        (JSC::JIT::compileOpStrictEq):
     2108        * jit/JIT.h:
     2109        * jit/JITArithmetic.cpp:
     2110        (JSC::JIT::emit_op_lshift):
     2111        (JSC::JIT::emitSlow_op_lshift):
     2112        (JSC::JIT::emit_op_rshift):
     2113        (JSC::JIT::emitSlow_op_rshift):
     2114        (JSC::JIT::emit_op_jnless):
     2115        (JSC::JIT::emitSlow_op_jnless):
     2116        (JSC::JIT::emit_op_jnlesseq):
     2117        (JSC::JIT::emitSlow_op_jnlesseq):
     2118        (JSC::JIT::emit_op_bitand):
     2119        (JSC::JIT::emitSlow_op_bitand):
     2120        (JSC::JIT::emit_op_post_inc):
     2121        (JSC::JIT::emitSlow_op_post_inc):
     2122        (JSC::JIT::emit_op_post_dec):
     2123        (JSC::JIT::emitSlow_op_post_dec):
     2124        (JSC::JIT::emit_op_pre_inc):
     2125        (JSC::JIT::emitSlow_op_pre_inc):
     2126        (JSC::JIT::emit_op_pre_dec):
     2127        (JSC::JIT::emitSlow_op_pre_dec):
     2128        (JSC::JIT::emit_op_mod):
     2129        (JSC::JIT::emitSlow_op_mod):
     2130        (JSC::JIT::emit_op_add):
     2131        (JSC::JIT::emit_op_mul):
     2132        (JSC::JIT::emit_op_sub):
     2133        (JSC::JIT::compileBinaryArithOpSlowCase):
     2134        (JSC::JIT::emitSlow_op_add):
     2135        (JSC::JIT::emitSlow_op_mul):
     2136        * jit/JITCall.cpp:
     2137        (JSC::JIT::compileOpCallInitializeCallFrame):
     2138        (JSC::JIT::compileOpConstructSetupArgs):
     2139        (JSC::JIT::compileOpCallVarargs):
     2140        (JSC::JIT::compileOpCall):
     2141        (JSC::JIT::compileOpCallSlowCase):
     2142        * jit/JITInlineMethods.h:
     2143        (JSC::JIT::getConstantOperandImmediateInt):
     2144        (JSC::JIT::isOperandConstantImmediateInt):
     2145        (JSC::JIT::emitInitRegister):
     2146        (JSC::JIT::addSlowCase):
     2147        (JSC::JIT::addJump):
     2148        (JSC::JIT::emitJumpSlowToHot):
     2149        (JSC::JIT::tagFor):
     2150        (JSC::JIT::payloadFor):
     2151        (JSC::JIT::emitLoad):
     2152        (JSC::JIT::emitLoadReturnValue):
     2153        (JSC::JIT::emitStore):
     2154        (JSC::JIT::emitStoreReturnValue):
     2155        * jit/JITOpcodes.cpp:
     2156        (JSC::JIT::emit_op_mov):
     2157        (JSC::JIT::emit_op_end):
     2158        (JSC::JIT::emit_op_jmp):
     2159        (JSC::JIT::emit_op_loop):
     2160        (JSC::JIT::emit_op_loop_if_less):
     2161        (JSC::JIT::emit_op_loop_if_lesseq):
     2162        (JSC::JIT::emit_op_instanceof):
     2163        (JSC::JIT::emit_op_get_global_var):
     2164        (JSC::JIT::emit_op_put_global_var):
     2165        (JSC::JIT::emit_op_get_scoped_var):
     2166        (JSC::JIT::emit_op_put_scoped_var):
     2167        (JSC::JIT::emit_op_tear_off_activation):
     2168        (JSC::JIT::emit_op_ret):
     2169        (JSC::JIT::emit_op_construct_verify):
     2170        (JSC::JIT::emit_op_to_primitive):
     2171        (JSC::JIT::emit_op_loop_if_true):
     2172        (JSC::JIT::emit_op_resolve_global):
     2173        (JSC::JIT::emit_op_not):
     2174        (JSC::JIT::emit_op_jfalse):
     2175        (JSC::JIT::emit_op_jeq_null):
     2176        (JSC::JIT::emit_op_jneq_null):
     2177        (JSC::JIT::emit_op_jneq_ptr):
     2178        (JSC::JIT::emit_op_unexpected_load):
     2179        (JSC::JIT::emit_op_eq):
     2180        (JSC::JIT::emit_op_bitnot):
     2181        (JSC::JIT::emit_op_jtrue):
     2182        (JSC::JIT::emit_op_neq):
     2183        (JSC::JIT::emit_op_bitxor):
     2184        (JSC::JIT::emit_op_bitor):
     2185        (JSC::JIT::emit_op_throw):
     2186        (JSC::JIT::emit_op_next_pname):
     2187        (JSC::JIT::emit_op_push_scope):
     2188        (JSC::JIT::emit_op_to_jsnumber):
     2189        (JSC::JIT::emit_op_push_new_scope):
     2190        (JSC::JIT::emit_op_catch):
     2191        (JSC::JIT::emit_op_switch_imm):
     2192        (JSC::JIT::emit_op_switch_char):
     2193        (JSC::JIT::emit_op_switch_string):
     2194        (JSC::JIT::emit_op_new_error):
     2195        (JSC::JIT::emit_op_eq_null):
     2196        (JSC::JIT::emit_op_neq_null):
     2197        (JSC::JIT::emit_op_convert_this):
     2198        (JSC::JIT::emit_op_profile_will_call):
     2199        (JSC::JIT::emit_op_profile_did_call):
     2200        (JSC::JIT::emitSlow_op_construct_verify):
     2201        (JSC::JIT::emitSlow_op_get_by_val):
     2202        (JSC::JIT::emitSlow_op_loop_if_less):
     2203        (JSC::JIT::emitSlow_op_loop_if_lesseq):
     2204        (JSC::JIT::emitSlow_op_put_by_val):
     2205        (JSC::JIT::emitSlow_op_not):
     2206        (JSC::JIT::emitSlow_op_instanceof):
     2207        * jit/JITPropertyAccess.cpp:
     2208        (JSC::JIT::emit_op_get_by_val):
     2209        (JSC::JIT::emit_op_put_by_val):
     2210        (JSC::JIT::emit_op_put_by_index):
     2211        (JSC::JIT::emit_op_put_getter):
     2212        (JSC::JIT::emit_op_put_setter):
     2213        (JSC::JIT::emit_op_del_by_id):
     2214        (JSC::JIT::compileGetByIdHotPath):
     2215        (JSC::JIT::compilePutByIdHotPath):
     2216        * jit/JITStubCall.h:
     2217        (JSC::JITStubCall::JITStubCall):
     2218        (JSC::JITStubCall::addArgument):
     2219        (JSC::JITStubCall::call):
     2220        (JSC::JITStubCall::):
     2221        (JSC::CallEvalJITStub::CallEvalJITStub):
     2222        * jit/JITStubs.cpp:
     2223        (JSC::):
     2224        (JSC::JITStubs::cti_op_add):
     2225        (JSC::JITStubs::cti_op_pre_inc):
     2226        (JSC::JITStubs::cti_op_mul):
     2227        (JSC::JITStubs::cti_op_get_by_val):
     2228        (JSC::JITStubs::cti_op_get_by_val_string):
     2229        (JSC::JITStubs::cti_op_get_by_val_byte_array):
     2230        (JSC::JITStubs::cti_op_sub):
     2231        (JSC::JITStubs::cti_op_put_by_val):
     2232        (JSC::JITStubs::cti_op_put_by_val_array):
     2233        (JSC::JITStubs::cti_op_put_by_val_byte_array):
     2234        (JSC::JITStubs::cti_op_negate):
     2235        (JSC::JITStubs::cti_op_div):
     2236        (JSC::JITStubs::cti_op_pre_dec):
     2237        (JSC::JITStubs::cti_op_post_inc):
     2238        (JSC::JITStubs::cti_op_eq):
     2239        (JSC::JITStubs::cti_op_lshift):
     2240        (JSC::JITStubs::cti_op_bitand):
     2241        (JSC::JITStubs::cti_op_rshift):
     2242        (JSC::JITStubs::cti_op_bitnot):
     2243        (JSC::JITStubs::cti_op_mod):
     2244        (JSC::JITStubs::cti_op_neq):
     2245        (JSC::JITStubs::cti_op_post_dec):
     2246        (JSC::JITStubs::cti_op_urshift):
     2247        (JSC::JITStubs::cti_op_bitxor):
     2248        (JSC::JITStubs::cti_op_bitor):
     2249        (JSC::JITStubs::cti_op_switch_imm):
     2250        * jit/JITStubs.h:
     2251        * runtime/JSArray.cpp:
     2252        (JSC::JSArray::JSArray):
     2253        * runtime/JSFunction.cpp:
     2254        (JSC::JSFunction::~JSFunction):
     2255        * runtime/JSValue.h:
     2256        (JSC::JSValue::payload):
     2257        * wtf/Platform.h:
     2258
     22592009-05-07  Sam Weinig  <[email protected]>
     2260
     2261        Reviewed by Geoffrey Garen.
     2262
     2263        Add some new MacroAssembler and assembler functions that will be needed shortly.
     2264
     2265        * assembler/MacroAssemblerX86Common.h:
     2266        (JSC::MacroAssemblerX86Common::add32):
     2267        (JSC::MacroAssemblerX86Common::and32):
     2268        (JSC::MacroAssemblerX86Common::mul32):
     2269        (JSC::MacroAssemblerX86Common::neg32):
     2270        (JSC::MacroAssemblerX86Common::or32):
     2271        (JSC::MacroAssemblerX86Common::sub32):
     2272        (JSC::MacroAssemblerX86Common::xor32):
     2273        (JSC::MacroAssemblerX86Common::branchAdd32):
     2274        (JSC::MacroAssemblerX86Common::branchMul32):
     2275        (JSC::MacroAssemblerX86Common::branchSub32):
     2276        * assembler/X86Assembler.h:
     2277        (JSC::X86Assembler::):
     2278        (JSC::X86Assembler::addl_rm):
     2279        (JSC::X86Assembler::andl_mr):
     2280        (JSC::X86Assembler::andl_rm):
     2281        (JSC::X86Assembler::andl_im):
     2282        (JSC::X86Assembler::negl_r):
     2283        (JSC::X86Assembler::notl_r):
     2284        (JSC::X86Assembler::orl_rm):
     2285        (JSC::X86Assembler::orl_im):
     2286        (JSC::X86Assembler::subl_rm):
     2287        (JSC::X86Assembler::xorl_mr):
     2288        (JSC::X86Assembler::xorl_rm):
     2289        (JSC::X86Assembler::xorl_im):
     2290        (JSC::X86Assembler::imull_mr):
     2291
     22922009-05-11  Sam Weinig  <[email protected]>
     2293
     2294        Reviewed by Cameron Zwarich.
     2295
     2296        Remove the NumberHeap.
     2297
     2298        * JavaScriptCore.exp:
     2299        * runtime/Collector.cpp:
     2300        (JSC::Heap::Heap):
     2301        (JSC::Heap::destroy):
     2302        (JSC::Heap::recordExtraCost):
     2303        (JSC::Heap::heapAllocate):
     2304        (JSC::Heap::markConservatively):
     2305        (JSC::Heap::sweep):
     2306        (JSC::Heap::collect):
     2307        (JSC::Heap::objectCount):
     2308        (JSC::Heap::statistics):
     2309        (JSC::typeName):
     2310        (JSC::Heap::isBusy):
     2311        * runtime/Collector.h:
     2312        (JSC::Heap::globalData):
     2313        * runtime/JSCell.h:
     2314
     23152009-05-11  Geoffrey Garen  <[email protected]>
     2316
     2317        Reviewed by Sam Weinig.
     2318
     2319        Land initial commit of new number representation for 32 bit platforms,
     2320        with JIT disabled.
     2321
     2322        * API/APICast.h:
     2323        (toJS):
     2324        (toRef):
     2325        * API/JSCallbackObjectFunctions.h:
     2326        (JSC::::hasInstance):
     2327        (JSC::::toNumber):
     2328        (JSC::::toString):
     2329        * API/tests/testapi.c:
     2330        (EvilExceptionObject_convertToType):
     2331        * AllInOneFile.cpp:
     2332        * JavaScriptCore.exp:
     2333        * JavaScriptCore.xcodeproj/project.pbxproj:
     2334        * bytecode/CodeBlock.cpp:
     2335        (JSC::valueToSourceString):
     2336        * bytecompiler/BytecodeGenerator.cpp:
     2337        (JSC::BytecodeGenerator::emitLoad):
     2338        (JSC::BytecodeGenerator::emitUnexpectedLoad):
     2339        (JSC::keyForImmediateSwitch):
     2340        * bytecompiler/BytecodeGenerator.h:
     2341        * interpreter/Interpreter.cpp:
     2342        (JSC::Interpreter::dumpRegisters):
     2343        (JSC::Interpreter::privateExecute):
     2344        * parser/Nodes.cpp:
     2345        (JSC::ArrayNode::emitBytecode):
     2346        (JSC::processClauseList):
     2347        * runtime/ArgList.h:
     2348        * runtime/Collector.h:
     2349        (JSC::sizeof):
     2350        * runtime/DateMath.cpp:
     2351        * runtime/ExceptionHelpers.h:
     2352        * runtime/InitializeThreading.cpp:
     2353        * runtime/JSArray.cpp:
     2354        (JSC::JSArray::JSArray):
     2355        * runtime/JSCell.cpp:
     2356        * runtime/JSCell.h:
     2357        (JSC::JSCell::isAPIValueWrapper):
     2358        (JSC::JSValue::isString):
     2359        (JSC::JSValue::isGetterSetter):
     2360        (JSC::JSValue::isObject):
     2361        (JSC::JSValue::getString):
     2362        (JSC::JSValue::getObject):
     2363        (JSC::JSValue::getCallData):
     2364        (JSC::JSValue::getConstructData):
     2365        (JSC::JSValue::getUInt32):
     2366        (JSC::JSValue::marked):
     2367        (JSC::JSValue::toPrimitive):
     2368        (JSC::JSValue::getPrimitiveNumber):
     2369        (JSC::JSValue::toBoolean):
     2370        (JSC::JSValue::toNumber):
     2371        (JSC::JSValue::toString):
     2372        (JSC::JSValue::needsThisConversion):
     2373        (JSC::JSValue::toThisString):
     2374        (JSC::JSValue::getJSNumber):
     2375        (JSC::JSValue::toObject):
     2376        (JSC::JSValue::toThisObject):
     2377        * runtime/JSGlobalData.cpp:
     2378        (JSC::JSGlobalData::JSGlobalData):
     2379        * runtime/JSGlobalData.h:
     2380        * runtime/JSGlobalObject.h:
     2381        (JSC::Structure::prototypeForLookup):
     2382        * runtime/JSGlobalObjectFunctions.cpp:
     2383        (JSC::globalFuncParseInt):
     2384        * runtime/JSImmediate.h:
     2385        * runtime/JSNumberCell.cpp: Removed.
     2386        * runtime/JSNumberCell.h: Removed.
     2387        * runtime/JSObject.h:
     2388        (JSC::JSValue::get):
     2389        (JSC::JSValue::put):
     2390        * runtime/JSString.h:
     2391        (JSC::JSValue::toThisJSString):
     2392        * runtime/JSValue.cpp:
     2393        (JSC::JSValue::toInteger):
     2394        (JSC::JSValue::toIntegerPreserveNaN):
     2395        (JSC::JSValue::toObjectSlowCase):
     2396        (JSC::JSValue::toThisObjectSlowCase):
     2397        (JSC::JSValue::synthesizeObject):
     2398        (JSC::JSValue::synthesizePrototype):
     2399        (JSC::JSValue::description):
     2400        (JSC::nonInlineNaN):
     2401        * runtime/JSValue.h:
     2402        (JSC::JSValue::):
     2403        (JSC::EncodedJSValueHashTraits::emptyValue):
     2404        (JSC::jsNaN):
     2405        (JSC::operator==):
     2406        (JSC::operator!=):
     2407        (JSC::toInt32):
     2408        (JSC::toUInt32):
     2409        (JSC::JSValue::encode):
     2410        (JSC::JSValue::decode):
     2411        (JSC::JSValue::JSValue):
     2412        (JSC::JSValue::operator bool):
     2413        (JSC::JSValue::operator==):
     2414        (JSC::JSValue::operator!=):
     2415        (JSC::JSValue::isUndefined):
     2416        (JSC::JSValue::isNull):
     2417        (JSC::JSValue::isUndefinedOrNull):
     2418        (JSC::JSValue::isCell):
     2419        (JSC::JSValue::isInt32):
     2420        (JSC::JSValue::isUInt32):
     2421        (JSC::JSValue::isDouble):
     2422        (JSC::JSValue::isTrue):
     2423        (JSC::JSValue::isFalse):
     2424        (JSC::JSValue::tag):
     2425        (JSC::JSValue::asInt32):
     2426        (JSC::JSValue::asUInt32):
     2427        (JSC::JSValue::asDouble):
     2428        (JSC::JSValue::asCell):
     2429        (JSC::JSValue::isNumber):
     2430        (JSC::JSValue::isBoolean):
     2431        (JSC::JSValue::getBoolean):
     2432        (JSC::JSValue::uncheckedGetNumber):
     2433        (JSC::JSValue::toJSNumber):
     2434        (JSC::JSValue::getNumber):
     2435        (JSC::JSValue::toInt32):
     2436        (JSC::JSValue::toUInt32):
     2437        * runtime/Operations.h:
     2438        (JSC::JSValue::equal):
     2439        (JSC::JSValue::equalSlowCaseInline):
     2440        (JSC::JSValue::strictEqual):
     2441        (JSC::JSValue::strictEqualSlowCaseInline):
     2442        (JSC::jsLess):
     2443        (JSC::jsLessEq):
     2444        (JSC::jsAdd):
     2445        * runtime/PropertySlot.h:
     2446        * runtime/StringPrototype.cpp:
     2447        (JSC::stringProtoFuncCharAt):
     2448        (JSC::stringProtoFuncCharCodeAt):
     2449        (JSC::stringProtoFuncIndexOf):
     2450        * wtf/Platform.h:
     2451
     2452=== Start merge of nitro-extreme branch 2009-07-30 ===
     2453
    124542009-07-29  Laszlo Gombos  <[email protected]>
    22455
  • trunk/JavaScriptCore/JavaScriptCore.exp

    r46431 r46598  
    103103__ZN3JSC11JSByteArray15createStructureENS_7JSValueE
    104104__ZN3JSC11JSByteArrayC1EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEPNS3_9ByteArrayEPKNS_9ClassInfoE
    105 __ZN3JSC11JSImmediate12nonInlineNaNEv
    106 __ZN3JSC11JSImmediate12toThisObjectENS_7JSValueEPNS_9ExecStateE
    107 __ZN3JSC11JSImmediate8toObjectENS_7JSValueEPNS_9ExecStateE
    108 __ZN3JSC11JSImmediate8toStringENS_7JSValueE
    109 __ZN3JSC11JSImmediate9prototypeENS_7JSValueEPNS_9ExecStateE
    110105__ZN3JSC11ParserArena5resetEv
    111106__ZN3JSC11checkSyntaxEPNS_9ExecStateERKNS_10SourceCodeE
     
    128123__ZN3JSC12StringObjectC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEERKNS_7UStringE
    129124__ZN3JSC12jsNumberCellEPNS_9ExecStateEd
     125__ZN3JSC12nonInlineNaNEv
    130126__ZN3JSC13SamplingFlags4stopEv
    131127__ZN3JSC13SamplingFlags5startEv
     
    163159__ZN3JSC17constructFunctionEPNS_9ExecStateERKNS_7ArgListERKNS_10IdentifierERKNS_7UStringEi
    164160__ZN3JSC18DebuggerActivationC1EPNS_8JSObjectE
    165 __ZN3JSC18jsAPIMangledNumberEPNS_9ExecStateEd
    166161__ZN3JSC19constructEmptyArrayEPNS_9ExecStateE
    167162__ZN3JSC19initializeThreadingEv
     
    173168__ZN3JSC25evaluateInGlobalCallFrameERKNS_7UStringERNS_7JSValueEPNS_14JSGlobalObjectE
    174169__ZN3JSC4Heap11objectCountEv
    175 __ZN3JSC4Heap14allocateNumberEm
    176170__ZN3JSC4Heap14primaryHeapEndEv
    177171__ZN3JSC4Heap15recordExtraCostEm
     
    218212__ZN3JSC7Profile7excludeEPKNS_11ProfileNodeE
    219213__ZN3JSC7Profile7forEachEMNS_11ProfileNodeEFvvE
    220 __ZN3JSC7UString3Rep12sharedBufferEv
    221214__ZN3JSC7UString3Rep11computeHashEPKci
    222215__ZN3JSC7UString3Rep11computeHashEPKti
     216__ZN3JSC7UString3Rep12sharedBufferEv
     217__ZN3JSC7UString3Rep14createFromUTF8EPKc
    223218__ZN3JSC7UString3Rep14nullBaseStringE
    224219__ZN3JSC7UString3Rep6createEPtiN3WTF10PassRefPtrINS3_21CrossThreadRefCountedINS3_16OwnFastMallocPtrItEEEEEE
    225220__ZN3JSC7UString3Rep7destroyEv
     221__ZN3JSC7UString4fromEd
    226222__ZN3JSC7UString4fromEi
    227223__ZN3JSC7UString4fromEj
     
    280276__ZN3JSCgtERKNS_7UStringES2_
    281277__ZN3JSCltERKNS_7UStringES2_
    282 __ZN3JSC7UString3Rep14createFromUTF8EPKc
    283278__ZN3WTF10fastCallocEmm
    284279__ZN3WTF10fastMallocEm
     
    291286__ZN3WTF12randomNumberEv
    292287__ZN3WTF13currentThreadEv
    293 __ZN3WTF37parseDateFromNullTerminatedCharactersEPKc
    294288__ZN3WTF13tryFastCallocEmm
    295289__ZN3WTF15ThreadCondition4waitERNS_5MutexE
     
    313307__ZN3WTF28setMainThreadCallbacksPausedEb
    314308__ZN3WTF36lockAtomicallyInitializedStaticMutexEv
     309__ZN3WTF37parseDateFromNullTerminatedCharactersEPKc
    315310__ZN3WTF38unlockAtomicallyInitializedStaticMutexEv
    316311__ZN3WTF5Mutex4lockEv
     
    345340__ZNK3JSC6JSCell12toThisStringEPNS_9ExecStateE
    346341__ZNK3JSC6JSCell14isGetterSetterEv
    347 __ZNK3JSC6JSCell17getTruncatedInt32ERi
    348 __ZNK3JSC6JSCell18getTruncatedUInt32ERj
    349342__ZNK3JSC6JSCell9classInfoEv
    350343__ZNK3JSC6JSCell9getStringERNS_7UStringE
     
    352345__ZNK3JSC6JSCell9getUInt32ERj
    353346__ZNK3JSC7ArgList8getSliceEiRS0_
     347__ZNK3JSC7JSValue16toObjectSlowCaseEPNS_9ExecStateE
     348__ZNK3JSC7JSValue19synthesizePrototypeEPNS_9ExecStateE
     349__ZNK3JSC7JSValue20toThisObjectSlowCaseEPNS_9ExecStateE
    354350__ZNK3JSC7JSValue9toIntegerEPNS_9ExecStateE
    355351__ZNK3JSC7UString10UTF8StringEb
     
    378374__ZTVN3JSC16InternalFunctionE
    379375__ZTVN3JSC16JSVariableObjectE
     376__ZTVN3JSC17JSAPIValueWrapperE
    380377__ZTVN3JSC8JSObjectE
    381378__ZTVN3JSC8JSStringE
  • trunk/JavaScriptCore/JavaScriptCore.vcproj/JavaScriptCore/JavaScriptCore.vcproj

    r46202 r46598  
    706706                        <File
    707707                                RelativePath="..\..\runtime\JSActivation.h"
     708                                >
     709                        </File>
     710                        <File
     711                                RelativePath="..\..\runtime\JSAPIValueWrapper.cpp"
     712                                >
     713                        </File>
     714                        <File
     715                                RelativePath="..\..\runtime\JSAPIValueWrapper.h"
    708716                                >
    709717                        </File>
  • trunk/JavaScriptCore/JavaScriptCore.xcodeproj/project.pbxproj

    r46202 r46598  
    120120                7E4EE70F0EBB7A5B005934AA /* StructureChain.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 7E4EE70E0EBB7A5B005934AA /* StructureChain.cpp */; };
    121121                7EFF00640EC05A9A00AA7C93 /* NodeInfo.h in Headers */ = {isa = PBXBuildFile; fileRef = 7EFF00630EC05A9A00AA7C93 /* NodeInfo.h */; };
     122                840480131021A1D9008E7F01 /* JSAPIValueWrapper.h in Headers */ = {isa = PBXBuildFile; fileRef = BC0894D60FAFBA2D00001865 /* JSAPIValueWrapper.h */; settings = {ATTRIBUTES = (Private, ); }; };
    122123                860161E30F3A83C100F84710 /* AbstractMacroAssembler.h in Headers */ = {isa = PBXBuildFile; fileRef = 860161DF0F3A83C100F84710 /* AbstractMacroAssembler.h */; };
    123124                860161E40F3A83C100F84710 /* MacroAssemblerX86.h in Headers */ = {isa = PBXBuildFile; fileRef = 860161E00F3A83C100F84710 /* MacroAssemblerX86.h */; };
     
    770771                BC02E9B80E184545000F9297 /* GetterSetter.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = GetterSetter.cpp; sourceTree = "<group>"; };
    771772                BC02E9B90E184580000F9297 /* JSNumberCell.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = JSNumberCell.cpp; sourceTree = "<group>"; };
     773                BC0894D50FAFBA2D00001865 /* JSAPIValueWrapper.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = JSAPIValueWrapper.cpp; path = ../runtime/JSAPIValueWrapper.cpp; sourceTree = "<group>"; };
     774                BC0894D60FAFBA2D00001865 /* JSAPIValueWrapper.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = JSAPIValueWrapper.h; path = ../runtime/JSAPIValueWrapper.h; sourceTree = "<group>"; };
    772775                BC1166000E1997B1008066DD /* DateInstance.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = DateInstance.cpp; sourceTree = "<group>"; };
    773776                BC1166010E1997B1008066DD /* DateInstance.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = DateInstance.h; sourceTree = "<group>"; };
     
    10791082                                1CAA8B4A0D32C39A0041BCFF /* JavaScript.h */,
    10801083                                1CAA8B4B0D32C39A0041BCFF /* JavaScriptCore.h */,
     1084                                BC0894D50FAFBA2D00001865 /* JSAPIValueWrapper.cpp */,
     1085                                BC0894D60FAFBA2D00001865 /* JSAPIValueWrapper.h */,
    10811086                                1421359A0A677F4F00A8195E /* JSBase.cpp */,
    10821087                                142711380A460BBB0080EEEA /* JSBase.h */,
     
    17231728                                14C5242B0F5355E900BA3D04 /* JITStubs.h in Headers */,
    17241729                                BC18C4160E16F5CD00B34460 /* JSActivation.h in Headers */,
     1730                                840480131021A1D9008E7F01 /* JSAPIValueWrapper.h in Headers */,
    17251731                                BC18C4170E16F5CD00B34460 /* JSArray.h in Headers */,
    17261732                                BC18C4180E16F5CD00B34460 /* JSBase.h in Headers */,
  • trunk/JavaScriptCore/assembler/AbstractMacroAssembler.h

    r46209 r46598  
    379379
    380380    public:
     381        typedef Vector<Jump, 16> JumpVector;
     382
    381383        void link(AbstractMacroAssembler<AssemblerType>* masm)
    382384        {
     
    409411            return !m_jumps.size();
    410412        }
     413       
     414        const JumpVector& jumps() { return m_jumps; }
    411415
    412416    private:
    413         Vector<Jump, 16> m_jumps;
     417        JumpVector m_jumps;
    414418    };
    415419
  • trunk/JavaScriptCore/assembler/MacroAssemblerX86.h

    r46209 r46598  
    5252    using MacroAssemblerX86Common::branch32;
    5353    using MacroAssemblerX86Common::call;
     54    using MacroAssemblerX86Common::loadDouble;
     55    using MacroAssemblerX86Common::convertInt32ToDouble;
    5456
    5557    void add32(Imm32 imm, RegisterID src, RegisterID dest)
     
    8688    {
    8789        m_assembler.movl_mr(address, dest);
     90    }
     91
     92    void loadDouble(void* address, FPRegisterID dest)
     93    {
     94        ASSERT(isSSE2Present());
     95        m_assembler.movsd_mr(address, dest);
     96    }
     97
     98    void convertInt32ToDouble(AbsoluteAddress src, FPRegisterID dest)
     99    {
     100        m_assembler.cvtsi2sd_mr(src.m_ptr, dest);
    88101    }
    89102
  • trunk/JavaScriptCore/assembler/MacroAssemblerX86Common.h

    r44461 r46598  
    5858    enum DoubleCondition {
    5959        DoubleEqual = X86Assembler::ConditionE,
     60        DoubleNotEqual = X86Assembler::ConditionNE,
    6061        DoubleGreaterThan = X86Assembler::ConditionA,
    6162        DoubleGreaterThanOrEqual = X86Assembler::ConditionAE,
     
    9293        m_assembler.addl_mr(src.offset, src.base, dest);
    9394    }
     95
     96    void add32(RegisterID src, Address dest)
     97    {
     98        m_assembler.addl_rm(src, dest.offset, dest.base);
     99    }
    94100   
    95101    void and32(RegisterID src, RegisterID dest)
     
    101107    {
    102108        m_assembler.andl_ir(imm.m_value, dest);
     109    }
     110
     111    void and32(RegisterID src, Address dest)
     112    {
     113        m_assembler.andl_rm(src, dest.offset, dest.base);
     114    }
     115
     116    void and32(Address src, RegisterID dest)
     117    {
     118        m_assembler.andl_mr(src.offset, src.base, dest);
    103119    }
    104120
     
    139155        m_assembler.imull_rr(src, dest);
    140156    }
     157
     158    void mul32(Address src, RegisterID dest)
     159    {
     160        m_assembler.imull_mr(src.offset, src.base, dest);
     161    }
    141162   
    142163    void mul32(Imm32 imm, RegisterID src, RegisterID dest)
     
    144165        m_assembler.imull_i32r(src, imm.m_value, dest);
    145166    }
    146    
     167
     168    void neg32(RegisterID srcDest)
     169    {
     170        m_assembler.negl_r(srcDest);
     171    }
     172
     173    void neg32(Address srcDest)
     174    {
     175        m_assembler.negl_m(srcDest.offset, srcDest.base);
     176    }
     177
    147178    void not32(RegisterID srcDest)
    148179    {
    149180        m_assembler.notl_r(srcDest);
    150181    }
     182
     183    void not32(Address srcDest)
     184    {
     185        m_assembler.notl_m(srcDest.offset, srcDest.base);
     186    }
    151187   
    152188    void or32(RegisterID src, RegisterID dest)
     
    158194    {
    159195        m_assembler.orl_ir(imm.m_value, dest);
     196    }
     197
     198    void or32(RegisterID src, Address dest)
     199    {
     200        m_assembler.orl_rm(src, dest.offset, dest.base);
     201    }
     202
     203    void or32(Address src, RegisterID dest)
     204    {
     205        m_assembler.orl_mr(src.offset, src.base, dest);
    160206    }
    161207
     
    212258    }
    213259
     260    void sub32(RegisterID src, Address dest)
     261    {
     262        m_assembler.subl_rm(src, dest.offset, dest.base);
     263    }
     264
     265
    214266    void xor32(RegisterID src, RegisterID dest)
    215267    {
     
    217269    }
    218270
    219     void xor32(Imm32 imm, RegisterID srcDest)
    220     {
    221         m_assembler.xorl_ir(imm.m_value, srcDest);
     271    void xor32(Imm32 imm, Address dest)
     272    {
     273        m_assembler.xorl_im(imm.m_value, dest.offset, dest.base);
     274    }
     275
     276    void xor32(Imm32 imm, RegisterID dest)
     277    {
     278        m_assembler.xorl_ir(imm.m_value, dest);
     279    }
     280
     281    void xor32(RegisterID src, Address dest)
     282    {
     283        m_assembler.xorl_rm(src, dest.offset, dest.base);
     284    }
     285
     286    void xor32(Address src, RegisterID dest)
     287    {
     288        m_assembler.xorl_mr(src.offset, src.base, dest);
    222289    }
    223290   
     
    301368    }
    302369
     370    void divDouble(FPRegisterID src, FPRegisterID dest)
     371    {
     372        ASSERT(isSSE2Present());
     373        m_assembler.divsd_rr(src, dest);
     374    }
     375
     376    void divDouble(Address src, FPRegisterID dest)
     377    {
     378        ASSERT(isSSE2Present());
     379        m_assembler.divsd_mr(src.offset, src.base, dest);
     380    }
     381
    303382    void subDouble(FPRegisterID src, FPRegisterID dest)
    304383    {
     
    331410    }
    332411
     412    void convertInt32ToDouble(Address src, FPRegisterID dest)
     413    {
     414        m_assembler.cvtsi2sd_mr(src.offset, src.base, dest);
     415    }
     416
    333417    Jump branchDouble(DoubleCondition cond, FPRegisterID left, FPRegisterID right)
    334418    {
    335419        ASSERT(isSSE2Present());
    336420        m_assembler.ucomisd_rr(right, left);
     421        return Jump(m_assembler.jCC(x86Condition(cond)));
     422    }
     423
     424    Jump branchDouble(DoubleCondition cond, FPRegisterID left, Address right)
     425    {
     426        m_assembler.ucomisd_mr(right.offset, right.base, left);
    337427        return Jump(m_assembler.jCC(x86Condition(cond)));
    338428    }
     
    347437        m_assembler.cvttsd2si_rr(src, dest);
    348438        return branch32(Equal, dest, Imm32(0x80000000));
     439    }
     440
     441    void zeroDouble(FPRegisterID srcDest)
     442    {
     443        ASSERT(isSSE2Present());
     444        m_assembler.xorpd_rr(srcDest, srcDest);
    349445    }
    350446
     
    398494        // Note: on 64-bit this is is a full register move; perhaps it would be
    399495        // useful to have separate move32 & movePtr, with move32 zero extending?
    400         m_assembler.movq_rr(src, dest);
     496        if (src != dest)
     497            m_assembler.movq_rr(src, dest);
    401498    }
    402499
     
    606703    }
    607704   
     705    Jump branchAdd32(Condition cond, Imm32 src, Address dest)
     706    {
     707        ASSERT((cond == Overflow) || (cond == Zero) || (cond == NonZero));
     708        add32(src, dest);
     709        return Jump(m_assembler.jCC(x86Condition(cond)));
     710    }
     711
     712    Jump branchAdd32(Condition cond, RegisterID src, Address dest)
     713    {
     714        ASSERT((cond == Overflow) || (cond == Zero) || (cond == NonZero));
     715        add32(src, dest);
     716        return Jump(m_assembler.jCC(x86Condition(cond)));
     717    }
     718
     719    Jump branchAdd32(Condition cond, Address src, RegisterID dest)
     720    {
     721        ASSERT((cond == Overflow) || (cond == Zero) || (cond == NonZero));
     722        add32(src, dest);
     723        return Jump(m_assembler.jCC(x86Condition(cond)));
     724    }
     725
    608726    Jump branchMul32(Condition cond, RegisterID src, RegisterID dest)
    609727    {
     
    612730        return Jump(m_assembler.jCC(x86Condition(cond)));
    613731    }
     732
     733    Jump branchMul32(Condition cond, Address src, RegisterID dest)
     734    {
     735        ASSERT((cond == Overflow) || (cond == Zero) || (cond == NonZero));
     736        mul32(src, dest);
     737        return Jump(m_assembler.jCC(x86Condition(cond)));
     738    }
    614739   
    615740    Jump branchMul32(Condition cond, Imm32 imm, RegisterID src, RegisterID dest)
     
    633758        return Jump(m_assembler.jCC(x86Condition(cond)));
    634759    }
    635    
     760
     761    Jump branchSub32(Condition cond, Imm32 imm, Address dest)
     762    {
     763        ASSERT((cond == Overflow) || (cond == Zero) || (cond == NonZero));
     764        sub32(imm, dest);
     765        return Jump(m_assembler.jCC(x86Condition(cond)));
     766    }
     767
     768    Jump branchSub32(Condition cond, RegisterID src, Address dest)
     769    {
     770        ASSERT((cond == Overflow) || (cond == Zero) || (cond == NonZero));
     771        sub32(src, dest);
     772        return Jump(m_assembler.jCC(x86Condition(cond)));
     773    }
     774
     775    Jump branchSub32(Condition cond, Address src, RegisterID dest)
     776    {
     777        ASSERT((cond == Overflow) || (cond == Zero) || (cond == NonZero));
     778        sub32(src, dest);
     779        return Jump(m_assembler.jCC(x86Condition(cond)));
     780    }
     781
     782    Jump branchOr32(Condition cond, RegisterID src, RegisterID dest)
     783    {
     784        ASSERT((cond == Signed) || (cond == Zero) || (cond == NonZero));
     785        or32(src, dest);
     786        return Jump(m_assembler.jCC(x86Condition(cond)));
     787    }
     788
    636789
    637790    // Miscellaneous operations:
     
    660813    {
    661814        m_assembler.ret();
     815    }
     816
     817    void set8(Condition cond, RegisterID left, RegisterID right, RegisterID dest)
     818    {
     819        m_assembler.cmpl_rr(right, left);
     820        m_assembler.setCC_r(x86Condition(cond), dest);
     821    }
     822
     823    void set8(Condition cond, Address left, RegisterID right, RegisterID dest)
     824    {
     825        m_assembler.cmpl_mr(left.offset, left.base, right);
     826        m_assembler.setCC_r(x86Condition(cond), dest);
     827    }
     828
     829    void set8(Condition cond, RegisterID left, Imm32 right, RegisterID dest)
     830    {
     831        if (((cond == Equal) || (cond == NotEqual)) && !right.m_value)
     832            m_assembler.testl_rr(left, left);
     833        else
     834            m_assembler.cmpl_ir(right.m_value, left);
     835        m_assembler.setCC_r(x86Condition(cond), dest);
    662836    }
    663837
     
    683857    // dest-src, operations always have a dest? ... possibly not true, considering
    684858    // asm ops like test, or pseudo ops like pop().
     859
     860    void setTest8(Condition cond, Address address, Imm32 mask, RegisterID dest)
     861    {
     862        if (mask.m_value == -1)
     863            m_assembler.cmpl_im(0, address.offset, address.base);
     864        else
     865            m_assembler.testl_i32m(mask.m_value, address.offset, address.base);
     866        m_assembler.setCC_r(x86Condition(cond), dest);
     867    }
     868
    685869    void setTest32(Condition cond, Address address, Imm32 mask, RegisterID dest)
    686870    {
  • trunk/JavaScriptCore/assembler/MacroAssemblerX86_64.h

    r46209 r46598  
    5151    using MacroAssemblerX86Common::store32;
    5252    using MacroAssemblerX86Common::call;
     53    using MacroAssemblerX86Common::loadDouble;
     54    using MacroAssemblerX86Common::convertInt32ToDouble;
    5355
    5456    void add32(Imm32 imm, AbsoluteAddress address)
     
    8587            swap(X86::eax, dest);
    8688        }
     89    }
     90
     91    void loadDouble(void* address, FPRegisterID dest)
     92    {
     93        move(ImmPtr(address), scratchRegister);
     94        loadDouble(scratchRegister, dest);
     95    }
     96
     97    void convertInt32ToDouble(AbsoluteAddress src, FPRegisterID dest)
     98    {
     99        move(Imm32(*static_cast<int32_t*>(src.m_ptr)), scratchRegister);
     100        m_assembler.cvtsi2sd_rr(scratchRegister, dest);
    87101    }
    88102
  • trunk/JavaScriptCore/assembler/X86Assembler.h

    r46247 r46598  
    115115        OP_2BYTE_ESCAPE                 = 0x0F,
    116116        OP_AND_EvGv                     = 0x21,
     117        OP_AND_GvEv                     = 0x23,
    117118        OP_SUB_EvGv                     = 0x29,
    118119        OP_SUB_GvEv                     = 0x2B,
    119120        PRE_PREDICT_BRANCH_NOT_TAKEN    = 0x2E,
    120121        OP_XOR_EvGv                     = 0x31,
     122        OP_XOR_GvEv                     = 0x33,
    121123        OP_CMP_EvGv                     = 0x39,
    122124        OP_CMP_GvEv                     = 0x3B,
     
    170172        OP2_MULSD_VsdWsd    = 0x59,
    171173        OP2_SUBSD_VsdWsd    = 0x5C,
     174        OP2_DIVSD_VsdWsd    = 0x5E,
     175        OP2_XORPD_VpdWpd    = 0x57,
    172176        OP2_MOVD_VdEd       = 0x6E,
    173177        OP2_MOVD_EdVd       = 0x7E,
     
    206210        GROUP3_OP_TEST = 0,
    207211        GROUP3_OP_NOT  = 2,
     212        GROUP3_OP_NEG  = 3,
    208213        GROUP3_OP_IDIV = 7,
    209214
     
    320325    }
    321326
     327    void addl_rm(RegisterID src, int offset, RegisterID base)
     328    {
     329        m_formatter.oneByteOp(OP_ADD_EvGv, src, base, offset);
     330    }
     331
    322332    void addl_ir(int imm, RegisterID dst)
    323333    {
     
    387397    }
    388398
     399    void andl_mr(int offset, RegisterID base, RegisterID dst)
     400    {
     401        m_formatter.oneByteOp(OP_AND_GvEv, dst, base, offset);
     402    }
     403
     404    void andl_rm(RegisterID src, int offset, RegisterID base)
     405    {
     406        m_formatter.oneByteOp(OP_AND_EvGv, src, base, offset);
     407    }
     408
    389409    void andl_ir(int imm, RegisterID dst)
    390410    {
     
    438458#endif
    439459
     460    void negl_r(RegisterID dst)
     461    {
     462        m_formatter.oneByteOp(OP_GROUP3_Ev, GROUP3_OP_NEG, dst);
     463    }
     464
     465    void negl_m(int offset, RegisterID base)
     466    {
     467        m_formatter.oneByteOp(OP_GROUP3_Ev, GROUP3_OP_NEG, base, offset);
     468    }
     469
    440470    void notl_r(RegisterID dst)
    441471    {
     
    443473    }
    444474
     475    void notl_m(int offset, RegisterID base)
     476    {
     477        m_formatter.oneByteOp(OP_GROUP3_Ev, GROUP3_OP_NOT, base, offset);
     478    }
     479
    445480    void orl_rr(RegisterID src, RegisterID dst)
    446481    {
     
    451486    {
    452487        m_formatter.oneByteOp(OP_OR_GvEv, dst, base, offset);
     488    }
     489
     490    void orl_rm(RegisterID src, int offset, RegisterID base)
     491    {
     492        m_formatter.oneByteOp(OP_OR_EvGv, src, base, offset);
    453493    }
    454494
     
    514554    }
    515555
     556    void subl_rm(RegisterID src, int offset, RegisterID base)
     557    {
     558        m_formatter.oneByteOp(OP_SUB_EvGv, src, base, offset);
     559    }
     560
    516561    void subl_ir(int imm, RegisterID dst)
    517562    {
     
    568613    {
    569614        m_formatter.oneByteOp(OP_XOR_EvGv, src, dst);
     615    }
     616
     617    void xorl_mr(int offset, RegisterID base, RegisterID dst)
     618    {
     619        m_formatter.oneByteOp(OP_XOR_GvEv, dst, base, offset);
     620    }
     621
     622    void xorl_rm(RegisterID src, int offset, RegisterID base)
     623    {
     624        m_formatter.oneByteOp(OP_XOR_EvGv, src, base, offset);
     625    }
     626
     627    void xorl_im(int imm, int offset, RegisterID base)
     628    {
     629        if (CAN_SIGN_EXTEND_8_32(imm)) {
     630            m_formatter.oneByteOp(OP_GROUP1_EvIb, GROUP1_OP_XOR, base, offset);
     631            m_formatter.immediate8(imm);
     632        } else {
     633            m_formatter.oneByteOp(OP_GROUP1_EvIz, GROUP1_OP_XOR, base, offset);
     634            m_formatter.immediate32(imm);
     635        }
    570636    }
    571637
     
    650716        m_formatter.twoByteOp(OP2_IMUL_GvEv, dst, src);
    651717    }
    652    
     718
     719    void imull_mr(int offset, RegisterID base, RegisterID dst)
     720    {
     721        m_formatter.twoByteOp(OP2_IMUL_GvEv, dst, base, offset);
     722    }
     723
    653724    void imull_i32r(RegisterID src, int32_t value, RegisterID dst)
    654725    {
     
    11551226    }
    11561227   
     1228    JmpSrc jz()
     1229    {
     1230        return je();
     1231    }
     1232
    11571233    JmpSrc jl()
    11581234    {
     
    12471323    }
    12481324
     1325    void cvtsi2sd_mr(int offset, RegisterID base, XMMRegisterID dst)
     1326    {
     1327        m_formatter.prefix(PRE_SSE_F2);
     1328        m_formatter.twoByteOp(OP2_CVTSI2SD_VsdEd, (RegisterID)dst, base, offset);
     1329    }
     1330
     1331#if !PLATFORM(X86_64)
     1332    void cvtsi2sd_mr(void* address, XMMRegisterID dst)
     1333    {
     1334        m_formatter.prefix(PRE_SSE_F2);
     1335        m_formatter.twoByteOp(OP2_CVTSI2SD_VsdEd, (RegisterID)dst, address);
     1336    }
     1337#endif
     1338
    12491339    void cvttsd2si_rr(XMMRegisterID src, RegisterID dst)
    12501340    {
     
    12841374        m_formatter.twoByteOp(OP2_MOVSD_VsdWsd, (RegisterID)dst, base, offset);
    12851375    }
     1376
     1377#if !PLATFORM(X86_64)
     1378    void movsd_mr(void* address, XMMRegisterID dst)
     1379    {
     1380        m_formatter.prefix(PRE_SSE_F2);
     1381        m_formatter.twoByteOp(OP2_MOVSD_VsdWsd, (RegisterID)dst, address);
     1382    }
     1383#endif
    12861384
    12871385    void mulsd_rr(XMMRegisterID src, XMMRegisterID dst)
     
    13201418        m_formatter.prefix(PRE_SSE_66);
    13211419        m_formatter.twoByteOp(OP2_UCOMISD_VsdWsd, (RegisterID)dst, (RegisterID)src);
     1420    }
     1421
     1422    void ucomisd_mr(int offset, RegisterID base, XMMRegisterID dst)
     1423    {
     1424        m_formatter.prefix(PRE_SSE_66);
     1425        m_formatter.twoByteOp(OP2_UCOMISD_VsdWsd, (RegisterID)dst, base, offset);
     1426    }
     1427
     1428    void divsd_rr(XMMRegisterID src, XMMRegisterID dst)
     1429    {
     1430        m_formatter.prefix(PRE_SSE_F2);
     1431        m_formatter.twoByteOp(OP2_DIVSD_VsdWsd, (RegisterID)dst, (RegisterID)src);
     1432    }
     1433
     1434    void divsd_mr(int offset, RegisterID base, XMMRegisterID dst)
     1435    {
     1436        m_formatter.prefix(PRE_SSE_F2);
     1437        m_formatter.twoByteOp(OP2_DIVSD_VsdWsd, (RegisterID)dst, base, offset);
     1438    }
     1439
     1440    void xorpd_rr(XMMRegisterID src, XMMRegisterID dst)
     1441    {
     1442        m_formatter.prefix(PRE_SSE_66);
     1443        m_formatter.twoByteOp(OP2_XORPD_VpdWpd, (RegisterID)dst, (RegisterID)src);
    13221444    }
    13231445
     
    16051727            memoryModRM(reg, base, index, scale, offset);
    16061728        }
     1729
     1730#if !PLATFORM(X86_64)
     1731        void twoByteOp(TwoByteOpcodeID opcode, int reg, void* address)
     1732        {
     1733            m_buffer.ensureSpace(maxInstructionSize);
     1734            m_buffer.putByteUnchecked(OP_2BYTE_ESCAPE);
     1735            m_buffer.putByteUnchecked(opcode);
     1736            memoryModRM(reg, address);
     1737        }
     1738#endif
    16071739
    16081740#if PLATFORM(X86_64)
  • trunk/JavaScriptCore/bytecode/CodeBlock.cpp

    r46210 r46598  
    5858static UString valueToSourceString(ExecState* exec, JSValue val)
    5959{
     60    if (!val)
     61        return "0";
     62
    6063    if (val.isString()) {
    6164        UString result("\"");
     
    596599        case op_div: {
    597600            printBinaryOp(location, it, "div");
     601            ++it;
    598602            break;
    599603        }
     
    738742            int id0 = (++it)->u.operand;
    739743            printf("[%4d] resolve_with_base %s, %s, %s\n", location, registerName(r0).c_str(), registerName(r1).c_str(), idName(id0, m_identifiers[id0]).c_str());
    740             break;
    741         }
    742         case op_resolve_func: {
    743             int r0 = (++it)->u.operand;
    744             int r1 = (++it)->u.operand;
    745             int id0 = (++it)->u.operand;
    746             printf("[%4d] resolve_func\t %s, %s, %s\n", location, registerName(r0).c_str(), registerName(r1).c_str(), idName(id0, m_identifiers[id0]).c_str());
    747744            break;
    748745        }
     
    13281325    }
    13291326
     1327#if ENABLE(JIT_OPTIMIZE_CALL)
    13301328    unlinkCallers();
    13311329#endif
     1330
     1331#endif // !ENABLE(JIT)
    13321332
    13331333#if DUMP_CODE_BLOCK_STATISTICS
     
    13361336}
    13371337
    1338 #if ENABLE(JIT)
     1338#if ENABLE(JIT_OPTIMIZE_CALL)
    13391339void CodeBlock::unlinkCallers()
    13401340{
  • trunk/JavaScriptCore/bytecode/CodeBlock.h

    r46247 r46598  
    235235        void refStructures(Instruction* vPC) const;
    236236        void derefStructures(Instruction* vPC) const;
    237 #if ENABLE(JIT)
     237#if ENABLE(JIT_OPTIMIZE_CALL)
    238238        void unlinkCallers();
    239239#endif
  • trunk/JavaScriptCore/bytecode/Opcode.h

    r46187 r46598  
    6868        macro(op_add, 5) \
    6969        macro(op_mul, 5) \
    70         macro(op_div, 4) \
     70        macro(op_div, 5) \
    7171        macro(op_mod, 4) \
    7272        macro(op_sub, 5) \
     
    9999        macro(op_resolve_base, 3) \
    100100        macro(op_resolve_with_base, 4) \
    101         macro(op_resolve_func, 4) \
    102101        macro(op_get_by_id, 8) \
    103102        macro(op_get_by_id_self, 8) \
  • trunk/JavaScriptCore/bytecompiler/BytecodeGenerator.cpp

    r45791 r46598  
    328328        emitOpcode(op_enter);
    329329
    330      if (usesArguments) {
     330    if (usesArguments) {
    331331        emitOpcode(op_init_arguments);
    332332
     
    862862
    863863    if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor ||
    864         opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub) {
     864        opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div)
    865865        instructions().append(types.toInt());
    866     }
    867866
    868867    return dst;
     
    11841183}
    11851184
    1186 RegisterID* BytecodeGenerator::emitResolveFunction(RegisterID* baseDst, RegisterID* funcDst, const Identifier& property)
    1187 {
    1188     emitOpcode(op_resolve_func);
    1189     instructions().append(baseDst->index());
    1190     instructions().append(funcDst->index());
    1191     instructions().append(addConstant(property));
    1192     return baseDst;
    1193 }
    1194 
    11951185void BytecodeGenerator::emitMethodCheck()
    11961186{
     
    18061796    instructions().append(retAddrDst->index());
    18071797    instructions().append(finally->offsetFrom(instructions().size()));
     1798    emitLabel(newLabel().get()); // Record the fact that the next instruction is implicitly labeled, because op_sret will return to it.
    18081799    return finally;
    18091800}
     
    18591850    double value = static_cast<NumberNode*>(node)->value();
    18601851    int32_t key = static_cast<int32_t>(value);
    1861     ASSERT(JSValue::makeInt32Fast(key) && (JSValue::makeInt32Fast(key).getInt32Fast() == value));
    18621852    ASSERT(key == value);
    18631853    ASSERT(key >= min);
  • trunk/JavaScriptCore/bytecompiler/BytecodeGenerator.h

    r45609 r46598  
    277277        RegisterID* emitResolveBase(RegisterID* dst, const Identifier& property);
    278278        RegisterID* emitResolveWithBase(RegisterID* baseDst, RegisterID* propDst, const Identifier& property);
    279         RegisterID* emitResolveFunction(RegisterID* baseDst, RegisterID* funcDst, const Identifier& property);
    280279
    281280        void emitMethodCheck();
     
    356355        PassRefPtr<Label> emitComplexJumpScopes(Label* target, ControlFlowContext* topScope, ControlFlowContext* bottomScope);
    357356
    358         typedef HashMap<EncodedJSValue, unsigned, PtrHash<EncodedJSValue>, JSValueHashTraits> JSValueMap;
     357        typedef HashMap<EncodedJSValue, unsigned, EncodedJSValueHash, EncodedJSValueHashTraits> JSValueMap;
    359358
    360359        struct IdentifierMapIndexHashTraits {
  • trunk/JavaScriptCore/interpreter/CallFrame.h

    r45609 r46598  
    106106        Instruction* returnPC() const { return this[RegisterFile::ReturnPC].vPC(); }
    107107
    108         void setCalleeArguments(Arguments* arguments) { this[RegisterFile::OptionalCalleeArguments] = arguments; }
     108        void setCalleeArguments(JSValue arguments) { this[RegisterFile::OptionalCalleeArguments] = arguments; }
    109109        void setCallerFrame(CallFrame* callerFrame) { this[RegisterFile::CallerFrame] = callerFrame; }
    110110        void setScopeChain(ScopeChainNode* scopeChain) { this[RegisterFile::ScopeChain] = scopeChain; }
     
    119119            setCallerFrame(callerFrame);
    120120            this[RegisterFile::ReturnPC] = vPC; // This is either an Instruction* or a pointer into JIT generated code stored as an Instruction*.
    121             this[RegisterFile::ReturnValueRegister] = returnValueRegister;
     121            this[RegisterFile::ReturnValueRegister] = Register::withInt(returnValueRegister);
    122122            setArgumentCount(argc); // original argument count (for the sake of the "arguments" object)
    123123            setCallee(function);
    124             setCalleeArguments(0);
     124            setCalleeArguments(JSValue());
    125125        }
    126126
     
    136136
    137137    private:
    138         void setArgumentCount(int count) { this[RegisterFile::ArgumentCount] = count; }
     138        void setArgumentCount(int count) { this[RegisterFile::ArgumentCount] = Register::withInt(count); }
    139139        void setCallee(JSFunction* callee) { this[RegisterFile::Callee] = callee; }
    140140        void setCodeBlock(CodeBlock* codeBlock) { this[RegisterFile::CodeBlock] = codeBlock; }
  • trunk/JavaScriptCore/interpreter/CallFrameClosure.h

    r44030 r46598  
    5050    {
    5151        newCallFrame->setScopeChain(scopeChain);
    52         newCallFrame->setCalleeArguments(0);
     52        newCallFrame->setCalleeArguments(JSValue());
    5353        for (int i = providedParams; i < expectedParams; ++i)
    5454            newCallFrame[i - RegisterFile::CallFrameHeaderSize - expectedParams] = jsUndefined();
  • trunk/JavaScriptCore/interpreter/Interpreter.cpp

    r45903 r46598  
    381381{
    382382    printf("Register frame: \n\n");
    383     printf("----------------------------------------------------\n");
    384     printf("            use            |   address  |   value   \n");
    385     printf("----------------------------------------------------\n");
     383    printf("-----------------------------------------------------------------------------\n");
     384    printf("            use            |   address  |                value               \n");
     385    printf("-----------------------------------------------------------------------------\n");
    386386
    387387    CodeBlock* codeBlock = callFrame->codeBlock();
     
    389389    const Register* it;
    390390    const Register* end;
     391    JSValue v;
    391392
    392393    if (codeBlock->codeType() == GlobalCode) {
     
    394395        end = it + registerFile->numGlobals();
    395396        while (it != end) {
    396             printf("[global var]               | %10p | %10p \n", it, (*it).v());
     397            v = (*it).jsValue();
     398#if USE(JSVALUE32_64)
     399            printf("[global var]               | %10p | %-16s 0x%llx \n", it, v.description(), JSValue::encode(v));
     400#else
     401            printf("[global var]               | %10p | %-16s %p \n", it, v.description(), JSValue::encode(v));
     402#endif
    397403            ++it;
    398404        }
    399         printf("----------------------------------------------------\n");
     405        printf("-----------------------------------------------------------------------------\n");
    400406    }
    401407   
    402408    it = callFrame->registers() - RegisterFile::CallFrameHeaderSize - codeBlock->m_numParameters;
    403     printf("[this]                     | %10p | %10p \n", it, (*it).v()); ++it;
     409    v = (*it).jsValue();
     410#if USE(JSVALUE32_64)
     411    printf("[this]                     | %10p | %-16s 0x%llx \n", it, v.description(), JSValue::encode(v)); ++it;
     412#else
     413    printf("[this]                     | %10p | %-16s %p \n", it, v.description(), JSValue::encode(v)); ++it;
     414#endif
    404415    end = it + max(codeBlock->m_numParameters - 1, 0); // - 1 to skip "this"
    405416    if (it != end) {
    406417        do {
    407             printf("[param]                    | %10p | %10p \n", it, (*it).v());
     418            v = (*it).jsValue();
     419#if USE(JSVALUE32_64)
     420            printf("[param]                    | %10p | %-16s 0x%llx \n", it, v.description(), JSValue::encode(v));
     421#else
     422            printf("[param]                    | %10p | %-16s %p \n", it, v.description(), JSValue::encode(v));
     423#endif
    408424            ++it;
    409425        } while (it != end);
    410426    }
    411     printf("----------------------------------------------------\n");
    412 
    413     printf("[CodeBlock]                | %10p | %10p \n", it, (*it).v()); ++it;
    414     printf("[ScopeChain]               | %10p | %10p \n", it, (*it).v()); ++it;
    415     printf("[CallerRegisters]          | %10p | %10p \n", it, (*it).v()); ++it;
    416     printf("[ReturnPC]                 | %10p | %10p \n", it, (*it).v()); ++it;
    417     printf("[ReturnValueRegister]      | %10p | %10p \n", it, (*it).v()); ++it;
    418     printf("[ArgumentCount]            | %10p | %10p \n", it, (*it).v()); ++it;
    419     printf("[Callee]                   | %10p | %10p \n", it, (*it).v()); ++it;
    420     printf("[OptionalCalleeArguments]  | %10p | %10p \n", it, (*it).v()); ++it;
    421     printf("----------------------------------------------------\n");
     427    printf("-----------------------------------------------------------------------------\n");
     428    printf("[CodeBlock]                | %10p | %p \n", it, (*it).codeBlock()); ++it;
     429    printf("[ScopeChain]               | %10p | %p \n", it, (*it).scopeChain()); ++it;
     430    printf("[CallerRegisters]          | %10p | %d \n", it, (*it).i()); ++it;
     431    printf("[ReturnPC]                 | %10p | %p \n", it, (*it).vPC()); ++it;
     432    printf("[ReturnValueRegister]      | %10p | %d \n", it, (*it).i()); ++it;
     433    printf("[ArgumentCount]            | %10p | %d \n", it, (*it).i()); ++it;
     434    printf("[Callee]                   | %10p | %p \n", it, (*it).function()); ++it;
     435    printf("[OptionalCalleeArguments]  | %10p | %p \n", it, (*it).arguments()); ++it;
     436    printf("-----------------------------------------------------------------------------\n");
    422437
    423438    int registerCount = 0;
     
    426441    if (it != end) {
    427442        do {
    428             printf("[r%2d]                      | %10p | %10p \n", registerCount, it, (*it).v());
     443            v = (*it).jsValue();
     444#if USE(JSVALUE32_64)
     445            printf("[r%2d]                      | %10p | %-16s 0x%llx \n", registerCount, it, v.description(), JSValue::encode(v));
     446#else
     447            printf("[r%2d]                      | %10p | %-16s %p \n", registerCount, it, v.description(), JSValue::encode(v));
     448#endif
    429449            ++it;
    430450            ++registerCount;
    431451        } while (it != end);
    432452    }
    433     printf("----------------------------------------------------\n");
     453    printf("-----------------------------------------------------------------------------\n");
    434454
    435455    end = it + codeBlock->m_numCalleeRegisters - codeBlock->m_numVars;
    436456    if (it != end) {
    437457        do {
    438             printf("[r%2d]                      | %10p | %10p \n", registerCount, it, (*it).v());
     458            v = (*it).jsValue();
     459#if USE(JSVALUE32_64)
     460            printf("[r%2d]                      | %10p | %-16s 0x%llx \n", registerCount, it, v.description(), JSValue::encode(v));
     461#else
     462            printf("[r%2d]                      | %10p | %-16s %p \n", registerCount, it, v.description(), JSValue::encode(v));
     463#endif
    439464            ++it;
    440465            ++registerCount;
    441466        } while (it != end);
    442467    }
    443     printf("----------------------------------------------------\n");
     468    printf("-----------------------------------------------------------------------------\n");
    444469}
    445470
     
    11121137
    11131138#if ENABLE(JIT)
    1114     // Currently with CTI enabled we never interpret functions
     1139    // Mixing Interpreter + JIT is not supported.
    11151140    ASSERT_NOT_REACHED();
    11161141#endif
     
    12421267        JSValue src1 = callFrame->r((++vPC)->u.operand).jsValue();
    12431268        JSValue src2 = callFrame->r((++vPC)->u.operand).jsValue();
    1244         if (JSFastMath::canDoFastBitwiseOperations(src1, src2))
    1245             callFrame->r(dst) = JSFastMath::equal(src1, src2);
     1269        if (src1.isInt32() && src2.isInt32())
     1270            callFrame->r(dst) = jsBoolean(src1.asInt32() == src2.asInt32());
    12461271        else {
    12471272            JSValue result = jsBoolean(JSValue::equalSlowCase(callFrame, src1, src2));
     
    12821307        JSValue src1 = callFrame->r((++vPC)->u.operand).jsValue();
    12831308        JSValue src2 = callFrame->r((++vPC)->u.operand).jsValue();
    1284         if (JSFastMath::canDoFastBitwiseOperations(src1, src2))
    1285             callFrame->r(dst) = JSFastMath::notEqual(src1, src2);
     1309        if (src1.isInt32() && src2.isInt32())
     1310            callFrame->r(dst) = jsBoolean(src1.asInt32() != src2.asInt32());
    12861311        else {
    12871312            JSValue result = jsBoolean(!JSValue::equalSlowCase(callFrame, src1, src2));
     
    13841409        int srcDst = (++vPC)->u.operand;
    13851410        JSValue v = callFrame->r(srcDst).jsValue();
    1386         if (JSFastMath::canDoFastAdditiveOperations(v))
    1387             callFrame->r(srcDst) = JSValue(JSFastMath::incImmediateNumber(v));
     1411        if (v.isInt32() && v.asInt32() < INT_MAX)
     1412            callFrame->r(srcDst) = jsNumber(callFrame, v.asInt32() + 1);
    13881413        else {
    13891414            JSValue result = jsNumber(callFrame, v.toNumber(callFrame) + 1);
     
    14031428        int srcDst = (++vPC)->u.operand;
    14041429        JSValue v = callFrame->r(srcDst).jsValue();
    1405         if (JSFastMath::canDoFastAdditiveOperations(v))
    1406             callFrame->r(srcDst) = JSValue(JSFastMath::decImmediateNumber(v));
     1430        if (v.isInt32() && v.asInt32() > INT_MIN)
     1431            callFrame->r(srcDst) = jsNumber(callFrame, v.asInt32() - 1);
    14071432        else {
    14081433            JSValue result = jsNumber(callFrame, v.toNumber(callFrame) - 1);
     
    14241449        int srcDst = (++vPC)->u.operand;
    14251450        JSValue v = callFrame->r(srcDst).jsValue();
    1426         if (JSFastMath::canDoFastAdditiveOperations(v)) {
     1451        if (v.isInt32() && v.asInt32() < INT_MAX) {
     1452            callFrame->r(srcDst) = jsNumber(callFrame, v.asInt32() + 1);
    14271453            callFrame->r(dst) = v;
    1428             callFrame->r(srcDst) = JSValue(JSFastMath::incImmediateNumber(v));
    14291454        } else {
    14301455            JSValue number = callFrame->r(srcDst).jsValue().toJSNumber(callFrame);
    14311456            CHECK_FOR_EXCEPTION();
     1457            callFrame->r(srcDst) = jsNumber(callFrame, number.uncheckedGetNumber() + 1);
    14321458            callFrame->r(dst) = number;
    1433             callFrame->r(srcDst) = JSValue(jsNumber(callFrame, number.uncheckedGetNumber() + 1));
    14341459        }
    14351460
     
    14471472        int srcDst = (++vPC)->u.operand;
    14481473        JSValue v = callFrame->r(srcDst).jsValue();
    1449         if (JSFastMath::canDoFastAdditiveOperations(v)) {
     1474        if (v.isInt32() && v.asInt32() > INT_MIN) {
     1475            callFrame->r(srcDst) = jsNumber(callFrame, v.asInt32() - 1);
    14501476            callFrame->r(dst) = v;
    1451             callFrame->r(srcDst) = JSValue(JSFastMath::decImmediateNumber(v));
    14521477        } else {
    14531478            JSValue number = callFrame->r(srcDst).jsValue().toJSNumber(callFrame);
    14541479            CHECK_FOR_EXCEPTION();
     1480            callFrame->r(srcDst) = jsNumber(callFrame, number.uncheckedGetNumber() - 1);
    14551481            callFrame->r(dst) = number;
    1456             callFrame->r(srcDst) = JSValue(jsNumber(callFrame, number.uncheckedGetNumber() - 1));
    14571482        }
    14581483
     
    14901515        int dst = (++vPC)->u.operand;
    14911516        JSValue src = callFrame->r((++vPC)->u.operand).jsValue();
    1492         ++vPC;
    1493         double v;
    1494         if (src.getNumber(v))
    1495             callFrame->r(dst) = JSValue(jsNumber(callFrame, -v));
     1517        if (src.isInt32() && src.asInt32())
     1518            callFrame->r(dst) = jsNumber(callFrame, -src.asInt32());
    14961519        else {
    14971520            JSValue result = jsNumber(callFrame, -src.toNumber(callFrame));
     
    15001523        }
    15011524
     1525        ++vPC;
    15021526        NEXT_INSTRUCTION();
    15031527    }
     
    15121536        JSValue src1 = callFrame->r((++vPC)->u.operand).jsValue();
    15131537        JSValue src2 = callFrame->r((++vPC)->u.operand).jsValue();
    1514         if (JSFastMath::canDoFastAdditiveOperations(src1, src2))
    1515             callFrame->r(dst) = JSValue(JSFastMath::addImmediateNumbers(src1, src2));
     1538        if (src1.isInt32() && src2.isInt32() && !(src1.asInt32() | src2.asInt32() & 0xc0000000)) // no overflow
     1539            callFrame->r(dst) = jsNumber(callFrame, src1.asInt32() + src2.asInt32());
    15161540        else {
    15171541            JSValue result = jsAdd(callFrame, src1, src2);
     
    15311555        JSValue src1 = callFrame->r((++vPC)->u.operand).jsValue();
    15321556        JSValue src2 = callFrame->r((++vPC)->u.operand).jsValue();
    1533         double left;
    1534         double right;
    1535         if (JSValue::areBothInt32Fast(src1, src2)) {
    1536             int32_t left = src1.getInt32Fast();
    1537             int32_t right = src2.getInt32Fast();
    1538             if ((left | right) >> 15 == 0)
    1539                 callFrame->r(dst) = JSValue(jsNumber(callFrame, left * right));
    1540             else
    1541                 callFrame->r(dst) = JSValue(jsNumber(callFrame, static_cast<double>(left) * static_cast<double>(right)));
    1542         } else if (src1.getNumber(left) && src2.getNumber(right))
    1543             callFrame->r(dst) = JSValue(jsNumber(callFrame, left * right));
     1557        if (src1.isInt32() && src2.isInt32() && !(src1.asInt32() | src2.asInt32() >> 15)) // no overflow
     1558                callFrame->r(dst) = jsNumber(callFrame, src1.asInt32() * src2.asInt32());
    15441559        else {
    15451560            JSValue result = jsNumber(callFrame, src1.toNumber(callFrame) * src2.toNumber(callFrame));
     
    15611576        JSValue dividend = callFrame->r((++vPC)->u.operand).jsValue();
    15621577        JSValue divisor = callFrame->r((++vPC)->u.operand).jsValue();
    1563         double left;
    1564         double right;
    1565         if (dividend.getNumber(left) && divisor.getNumber(right))
    1566             callFrame->r(dst) = JSValue(jsNumber(callFrame, left / right));
    1567         else {
    1568             JSValue result = jsNumber(callFrame, dividend.toNumber(callFrame) / divisor.toNumber(callFrame));
    1569             CHECK_FOR_EXCEPTION();
    1570             callFrame->r(dst) = result;
    1571         }
    1572         ++vPC;
     1578
     1579        JSValue result = jsNumber(callFrame, dividend.toNumber(callFrame) / divisor.toNumber(callFrame));
     1580        CHECK_FOR_EXCEPTION();
     1581        callFrame->r(dst) = result;
     1582
     1583        vPC += 2;
    15731584        NEXT_INSTRUCTION();
    15741585    }
     
    15811592        */
    15821593        int dst = (++vPC)->u.operand;
    1583         int dividend = (++vPC)->u.operand;
    1584         int divisor = (++vPC)->u.operand;
    1585 
    1586         JSValue dividendValue = callFrame->r(dividend).jsValue();
    1587         JSValue divisorValue = callFrame->r(divisor).jsValue();
    1588 
    1589         if (JSValue::areBothInt32Fast(dividendValue, divisorValue) && divisorValue != jsNumber(callFrame, 0)) {
    1590             // We expect the result of the modulus of a number that was representable as an int32 to also be representable
    1591             // as an int32.
    1592             JSValue result = JSValue::makeInt32Fast(dividendValue.getInt32Fast() % divisorValue.getInt32Fast());
     1594        JSValue dividend = callFrame->r((++vPC)->u.operand).jsValue();
     1595        JSValue divisor = callFrame->r((++vPC)->u.operand).jsValue();
     1596
     1597        if (dividend.isInt32() && divisor.isInt32() && divisor.asInt32() != 0) {
     1598            JSValue result = jsNumber(callFrame, dividend.asInt32() % divisor.asInt32());
    15931599            ASSERT(result);
    15941600            callFrame->r(dst) = result;
     
    15971603        }
    15981604
    1599         double d = dividendValue.toNumber(callFrame);
    1600         JSValue result = jsNumber(callFrame, fmod(d, divisorValue.toNumber(callFrame)));
     1605        // Conversion to double must happen outside the call to fmod since the
     1606        // order of argument evaluation is not guaranteed.
     1607        double d1 = dividend.toNumber(callFrame);
     1608        double d2 = divisor.toNumber(callFrame);
     1609        JSValue result = jsNumber(callFrame, fmod(d1, d2));
    16011610        CHECK_FOR_EXCEPTION();
    16021611        callFrame->r(dst) = result;
     
    16141623        JSValue src1 = callFrame->r((++vPC)->u.operand).jsValue();
    16151624        JSValue src2 = callFrame->r((++vPC)->u.operand).jsValue();
    1616         double left;
    1617         double right;
    1618         if (JSFastMath::canDoFastAdditiveOperations(src1, src2))
    1619             callFrame->r(dst) = JSValue(JSFastMath::subImmediateNumbers(src1, src2));
    1620         else if (src1.getNumber(left) && src2.getNumber(right))
    1621             callFrame->r(dst) = JSValue(jsNumber(callFrame, left - right));
     1625        if (src1.isInt32() && src2.isInt32() && !(src1.asInt32() | src2.asInt32() & 0xc0000000)) // no overflow
     1626            callFrame->r(dst) = jsNumber(callFrame, src1.asInt32() - src2.asInt32());
    16221627        else {
    16231628            JSValue result = jsNumber(callFrame, src1.toNumber(callFrame) - src2.toNumber(callFrame));
     
    16381643        JSValue val = callFrame->r((++vPC)->u.operand).jsValue();
    16391644        JSValue shift = callFrame->r((++vPC)->u.operand).jsValue();
    1640         int32_t left;
    1641         uint32_t right;
    1642         if (JSValue::areBothInt32Fast(val, shift))
    1643             callFrame->r(dst) = JSValue(jsNumber(callFrame, val.getInt32Fast() << (shift.getInt32Fast() & 0x1f)));
    1644         else if (val.numberToInt32(left) && shift.numberToUInt32(right))
    1645             callFrame->r(dst) = JSValue(jsNumber(callFrame, left << (right & 0x1f)));
     1645
     1646        if (val.isInt32() && shift.isInt32())
     1647            callFrame->r(dst) = jsNumber(callFrame, val.asInt32() << (shift.asInt32() & 0x1f));
    16461648        else {
    16471649            JSValue result = jsNumber(callFrame, (val.toInt32(callFrame)) << (shift.toUInt32(callFrame) & 0x1f));
     
    16631665        JSValue val = callFrame->r((++vPC)->u.operand).jsValue();
    16641666        JSValue shift = callFrame->r((++vPC)->u.operand).jsValue();
    1665         int32_t left;
    1666         uint32_t right;
    1667         if (JSFastMath::canDoFastRshift(val, shift))
    1668             callFrame->r(dst) = JSValue(JSFastMath::rightShiftImmediateNumbers(val, shift));
    1669         else if (val.numberToInt32(left) && shift.numberToUInt32(right))
    1670             callFrame->r(dst) = JSValue(jsNumber(callFrame, left >> (right & 0x1f)));
     1667
     1668        if (val.isInt32() && shift.isInt32())
     1669            callFrame->r(dst) = jsNumber(callFrame, val.asInt32() >> (shift.asInt32() & 0x1f));
    16711670        else {
    16721671            JSValue result = jsNumber(callFrame, (val.toInt32(callFrame)) >> (shift.toUInt32(callFrame) & 0x1f));
     
    16881687        JSValue val = callFrame->r((++vPC)->u.operand).jsValue();
    16891688        JSValue shift = callFrame->r((++vPC)->u.operand).jsValue();
    1690         if (JSFastMath::canDoFastUrshift(val, shift))
    1691             callFrame->r(dst) = JSValue(JSFastMath::rightShiftImmediateNumbers(val, shift));
     1689        if (val.isUInt32() && shift.isInt32())
     1690            callFrame->r(dst) = jsNumber(callFrame, val.asInt32() >> (shift.asInt32() & 0x1f));
    16921691        else {
    16931692            JSValue result = jsNumber(callFrame, (val.toUInt32(callFrame)) >> (shift.toUInt32(callFrame) & 0x1f));
     
    17091708        JSValue src1 = callFrame->r((++vPC)->u.operand).jsValue();
    17101709        JSValue src2 = callFrame->r((++vPC)->u.operand).jsValue();
    1711         int32_t left;
    1712         int32_t right;
    1713         if (JSFastMath::canDoFastBitwiseOperations(src1, src2))
    1714             callFrame->r(dst) = JSValue(JSFastMath::andImmediateNumbers(src1, src2));
    1715         else if (src1.numberToInt32(left) && src2.numberToInt32(right))
    1716             callFrame->r(dst) = JSValue(jsNumber(callFrame, left & right));
     1710        if (src1.isInt32() && src2.isInt32())
     1711            callFrame->r(dst) = jsNumber(callFrame, src1.asInt32() & src2.asInt32());
    17171712        else {
    17181713            JSValue result = jsNumber(callFrame, src1.toInt32(callFrame) & src2.toInt32(callFrame));
     
    17341729        JSValue src1 = callFrame->r((++vPC)->u.operand).jsValue();
    17351730        JSValue src2 = callFrame->r((++vPC)->u.operand).jsValue();
    1736         int32_t left;
    1737         int32_t right;
    1738         if (JSFastMath::canDoFastBitwiseOperations(src1, src2))
    1739             callFrame->r(dst) = JSValue(JSFastMath::xorImmediateNumbers(src1, src2));
    1740         else if (src1.numberToInt32(left) && src2.numberToInt32(right))
    1741             callFrame->r(dst) = JSValue(jsNumber(callFrame, left ^ right));
     1731        if (src1.isInt32() && src2.isInt32())
     1732            callFrame->r(dst) = jsNumber(callFrame, src1.asInt32() ^ src2.asInt32());
    17421733        else {
    17431734            JSValue result = jsNumber(callFrame, src1.toInt32(callFrame) ^ src2.toInt32(callFrame));
     
    17591750        JSValue src1 = callFrame->r((++vPC)->u.operand).jsValue();
    17601751        JSValue src2 = callFrame->r((++vPC)->u.operand).jsValue();
    1761         int32_t left;
    1762         int32_t right;
    1763         if (JSFastMath::canDoFastBitwiseOperations(src1, src2))
    1764             callFrame->r(dst) = JSValue(JSFastMath::orImmediateNumbers(src1, src2));
    1765         else if (src1.numberToInt32(left) && src2.numberToInt32(right))
    1766             callFrame->r(dst) = JSValue(jsNumber(callFrame, left | right));
     1752        if (src1.isInt32() && src2.isInt32())
     1753            callFrame->r(dst) = jsNumber(callFrame, src1.asInt32() | src2.asInt32());
    17671754        else {
    17681755            JSValue result = jsNumber(callFrame, src1.toInt32(callFrame) | src2.toInt32(callFrame));
     
    17821769        int dst = (++vPC)->u.operand;
    17831770        JSValue src = callFrame->r((++vPC)->u.operand).jsValue();
    1784         int32_t value;
    1785         if (src.numberToInt32(value))
    1786             callFrame->r(dst) = JSValue(jsNumber(callFrame, ~value));
     1771        if (src.isInt32())
     1772            callFrame->r(dst) = jsNumber(callFrame, ~src.asInt32());
    17871773        else {
    17881774            JSValue result = jsNumber(callFrame, ~src.toInt32(callFrame));
     
    21182104        NEXT_INSTRUCTION();
    21192105    }
    2120     DEFINE_OPCODE(op_resolve_func) {
    2121         /* resolve_func baseDst(r) funcDst(r) property(id)
    2122 
    2123            Searches the scope chain for an object containing
    2124            identifier property, and if one is found, writes the
    2125            appropriate object to use as "this" when calling its
    2126            properties to register baseDst; and the retrieved property
    2127            value to register propDst. If the property is not found,
    2128            raises an exception.
    2129 
    2130            This differs from resolve_with_base, because the
    2131            global this value will be substituted for activations or
    2132            the global object, which is the right behavior for function
    2133            calls but not for other property lookup.
    2134         */
    2135         if (UNLIKELY(!resolveBaseAndFunc(callFrame, vPC, exceptionValue)))
    2136             goto vm_throw;
    2137 
    2138         vPC += 4;
    2139         NEXT_INSTRUCTION();
    2140     }
    21412106    DEFINE_OPCODE(op_get_by_id) {
    21422107        /* get_by_id dst(r) base(r) property(id) structure(sID) nop(n) nop(n) nop(n)
     
    23192284        if (LIKELY(isJSArray(globalData, baseValue))) {
    23202285            int dst = vPC[1].u.operand;
    2321             callFrame->r(dst) = JSValue(jsNumber(callFrame, asArray(baseValue)->length()));
     2286            callFrame->r(dst) = jsNumber(callFrame, asArray(baseValue)->length());
    23222287            vPC += 8;
    23232288            NEXT_INSTRUCTION();
     
    23392304        if (LIKELY(isJSString(globalData, baseValue))) {
    23402305            int dst = vPC[1].u.operand;
    2341             callFrame->r(dst) = JSValue(jsNumber(callFrame, asString(baseValue)->value().size()));
     2306            callFrame->r(dst) = jsNumber(callFrame, asString(baseValue)->value().size());
    23422307            vPC += 8;
    23432308            NEXT_INSTRUCTION();
     
    25172482        JSValue result;
    25182483
    2519         if (LIKELY(subscript.isUInt32Fast())) {
    2520             uint32_t i = subscript.getUInt32Fast();
     2484        if (LIKELY(subscript.isUInt32())) {
     2485            uint32_t i = subscript.asUInt32();
    25212486            if (isJSArray(globalData, baseValue)) {
    25222487                JSArray* jsArray = asArray(baseValue);
     
    25592524        JSValue subscript = callFrame->r(property).jsValue();
    25602525
    2561         if (LIKELY(subscript.isUInt32Fast())) {
    2562             uint32_t i = subscript.getUInt32Fast();
     2526        if (LIKELY(subscript.isUInt32())) {
     2527            uint32_t i = subscript.asUInt32();
    25632528            if (isJSArray(globalData, baseValue)) {
    25642529                JSArray* jsArray = asArray(baseValue);
     
    25712536                double dValue = 0;
    25722537                JSValue jsValue = callFrame->r(value).jsValue();
    2573                 if (jsValue.isInt32Fast())
    2574                     jsByteArray->setIndex(i, jsValue.getInt32Fast());
     2538                if (jsValue.isInt32())
     2539                    jsByteArray->setIndex(i, jsValue.asInt32());
    25752540                else if (jsValue.getNumber(dValue))
    25762541                    jsByteArray->setIndex(i, dValue);
     
    28922857        int defaultOffset = (++vPC)->u.operand;
    28932858        JSValue scrutinee = callFrame->r((++vPC)->u.operand).jsValue();
    2894         if (scrutinee.isInt32Fast())
    2895             vPC += callFrame->codeBlock()->immediateSwitchJumpTable(tableIndex).offsetForValue(scrutinee.getInt32Fast(), defaultOffset);
     2859        if (scrutinee.isInt32())
     2860            vPC += callFrame->codeBlock()->immediateSwitchJumpTable(tableIndex).offsetForValue(scrutinee.asInt32(), defaultOffset);
    28962861        else {
    28972862            double value;
     
    29562921        int func = (++vPC)->u.operand;
    29572922
    2958         callFrame->r(dst) = callFrame->codeBlock()->function(func)->makeFunction(callFrame, callFrame->scopeChain());
     2923        callFrame->r(dst) = JSValue(callFrame->codeBlock()->function(func)->makeFunction(callFrame, callFrame->scopeChain()));
    29592924
    29602925        ++vPC;
     
    29722937        int func = (++vPC)->u.operand;
    29732938
    2974         callFrame->r(dst) = callFrame->codeBlock()->functionExpression(func)->makeFunction(callFrame, callFrame->scopeChain());
     2939        callFrame->r(dst) = JSValue(callFrame->codeBlock()->functionExpression(func)->makeFunction(callFrame, callFrame->scopeChain()));
    29752940
    29762941        ++vPC;
     
    30803045            CHECK_FOR_EXCEPTION();
    30813046
    3082             callFrame->r(dst) = JSValue(returnValue);
     3047            callFrame->r(dst) = returnValue;
    30833048
    30843049            vPC += 5;
     
    31633128        }
    31643129        CHECK_FOR_EXCEPTION();
    3165         callFrame->r(argCountDst) = argCount + 1;
     3130        callFrame->r(argCountDst) = Register::withInt(argCount + 1);
    31663131        ++vPC;
    31673132        NEXT_INSTRUCTION();
     
    32343199            CHECK_FOR_EXCEPTION();
    32353200           
    3236             callFrame->r(dst) = JSValue(returnValue);
     3201            callFrame->r(dst) = returnValue;
    32373202           
    32383203            vPC += 5;
     
    32803245
    32813246        ASSERT(callFrame->codeBlock()->usesArguments() && !callFrame->codeBlock()->needsFullScopeChain());
     3247
    32823248        if (callFrame->optionalCalleeArguments())
    32833249            callFrame->optionalCalleeArguments()->copyRegisters();
     
    33103276            return returnValue;
    33113277
    3312         callFrame->r(dst) = JSValue(returnValue);
     3278        callFrame->r(dst) = returnValue;
    33133279
    33143280        NEXT_INSTRUCTION();
     
    33553321        int dst = (++vPC)->u.operand;
    33563322        JSActivation* activation = new (globalData) JSActivation(callFrame, static_cast<FunctionBodyNode*>(codeBlock->ownerNode()));
    3357         callFrame->r(dst) = activation;
     3323        callFrame->r(dst) = JSValue(activation);
    33583324        callFrame->setScopeChain(callFrame->scopeChain()->copy()->push(activation));
    33593325
     
    34063372             Arguments* arguments = new (globalData) Arguments(callFrame);
    34073373             callFrame->setCalleeArguments(arguments);
    3408              callFrame->r(RegisterFile::ArgumentsRegister) = arguments;
     3374             callFrame->r(RegisterFile::ArgumentsRegister) = JSValue(arguments);
    34093375         }
    34103376        ++vPC;
     
    38683834        SymbolTable& symbolTable = codeBlock->symbolTable();
    38693835        int argumentsIndex = symbolTable.get(functionCallFrame->propertyNames().arguments.ustring().rep()).getIndex();
    3870         if (!functionCallFrame->r(argumentsIndex).arguments()) {
     3836        if (!functionCallFrame->r(argumentsIndex).jsValue()) {
    38713837            Arguments* arguments = new (callFrame) Arguments(functionCallFrame);
    38723838            functionCallFrame->setCalleeArguments(arguments);
    3873             functionCallFrame->r(RegisterFile::ArgumentsRegister) = arguments;
     3839            functionCallFrame->r(RegisterFile::ArgumentsRegister) = JSValue(arguments);
    38743840        }
    38753841        return functionCallFrame->r(argumentsIndex).jsValue();
  • trunk/JavaScriptCore/interpreter/Register.h

    r45128 r46598  
    5353        Register();
    5454        Register(JSValue);
    55         Register(Arguments*);
    5655
    5756        JSValue jsValue() const;
     
    6059        void mark();
    6160       
    62         int32_t i() const;
    63         void* v() const;
    64 
    65     private:
    66         friend class ExecState;
    67         friend class Interpreter;
    68 
    69         // Only CallFrame, Interpreter, and JITStubs should use these functions.
    70 
    71         Register(intptr_t);
    72 
    7361        Register(JSActivation*);
    7462        Register(CallFrame*);
     
    7967        Register(Instruction*);
    8068
     69        int32_t i() const;
    8170        JSActivation* activation() const;
    8271        Arguments* arguments() const;
     
    8877        Instruction* vPC() const;
    8978
     79        static Register withInt(int32_t i)
     80        {
     81            return Register(i);
     82        }
     83
     84    private:
     85        Register(int32_t);
     86
    9087        union {
    91             intptr_t i;
    92             void* v;
     88            int32_t i;
    9389            EncodedJSValue value;
    9490
    9591            JSActivation* activation;
    96             Arguments* arguments;
    9792            CallFrame* callFrame;
    9893            CodeBlock* codeBlock;
     
    133128    // Interpreter functions
    134129
    135     ALWAYS_INLINE Register::Register(Arguments* arguments)
    136     {
    137         u.arguments = arguments;
    138     }
    139 
    140130    ALWAYS_INLINE Register::Register(JSActivation* activation)
    141131    {
     
    173163    }
    174164
    175     ALWAYS_INLINE Register::Register(intptr_t i)
    176     {
    177         // See comment on 'i()' below.
    178         ASSERT(i == static_cast<int32_t>(i));
     165    ALWAYS_INLINE Register::Register(int32_t i)
     166    {
    179167        u.i = i;
    180168    }
    181169
    182     // Read 'i' as a 32-bit integer; we only use it to hold 32-bit values,
    183     // and we only write 32-bits when writing the arg count from JIT code.
    184170    ALWAYS_INLINE int32_t Register::i() const
    185171    {
    186         return static_cast<int32_t>(u.i);
    187     }
    188    
    189     ALWAYS_INLINE void* Register::v() const
    190     {
    191         return u.v;
    192     }
    193 
     172        return u.i;
     173    }
     174   
    194175    ALWAYS_INLINE JSActivation* Register::activation() const
    195176    {
     
    197178    }
    198179   
    199     ALWAYS_INLINE Arguments* Register::arguments() const
    200     {
    201         return u.arguments;
    202     }
    203    
    204180    ALWAYS_INLINE CallFrame* Register::callFrame() const
    205181    {
  • trunk/JavaScriptCore/jit/JIT.cpp

    r46438 r46598  
    3838#include "Interpreter.h"
    3939#include "JITInlineMethods.h"
     40#include "JITStubs.h"
    4041#include "JITStubCall.h"
    4142#include "JSArray.h"
     
    8081    , m_callStructureStubCompilationInfo(codeBlock ? codeBlock->numberOfCallLinkInfos() : 0)
    8182    , m_bytecodeIndex((unsigned)-1)
     83#if USE(JSVALUE32_64)
     84    , m_jumpTargetIndex(0)
     85    , m_mappedBytecodeIndex((unsigned)-1)
     86    , m_mappedVirtualRegisterIndex((unsigned)-1)
     87    , m_mappedTag((RegisterID)-1)
     88    , m_mappedPayload((RegisterID)-1)
     89#else
    8290    , m_lastResultBytecodeRegister(std::numeric_limits<int>::max())
    8391    , m_jumpTargetsPosition(0)
    84 {
    85 }
    86 
    87 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
    88 {
    89     unsigned dst = currentInstruction[1].u.operand;
    90     unsigned src1 = currentInstruction[2].u.operand;
    91     unsigned src2 = currentInstruction[3].u.operand;
    92 
    93     emitGetVirtualRegisters(src1, regT0, src2, regT1);
    94 
    95     // Jump to a slow case if either operand is a number, or if both are JSCell*s.
    96     move(regT0, regT2);
    97     orPtr(regT1, regT2);
    98     addSlowCase(emitJumpIfJSCell(regT2));
    99     addSlowCase(emitJumpIfImmediateNumber(regT2));
    100 
    101     if (type == OpStrictEq)
    102         set32(Equal, regT1, regT0, regT0);
    103     else
    104         set32(NotEqual, regT1, regT0, regT0);
    105     emitTagAsBoolImmediate(regT0);
    106 
    107     emitPutVirtualRegister(dst);
    108 }
    109 
     92#endif
     93{
     94}
     95
     96#if USE(JSVALUE32_64)
    11097void JIT::emitTimeoutCheck()
    11198{
    11299    Jump skipTimeout = branchSub32(NonZero, Imm32(1), timeoutCheckRegister);
    113     JITStubCall(this, JITStubs::cti_timeout_check).call(timeoutCheckRegister);
     100    JITStubCall stubCall(this, cti_timeout_check);
     101    stubCall.addArgument(regT1, regT0); // save last result registers.
     102    stubCall.call(timeoutCheckRegister);
     103    stubCall.getArgument(0, regT1, regT0); // reload last result registers.
    114104    skipTimeout.link(this);
     105}
     106#else
     107void JIT::emitTimeoutCheck()
     108{
     109    Jump skipTimeout = branchSub32(NonZero, Imm32(1), timeoutCheckRegister);
     110    JITStubCall(this, cti_timeout_check).call(timeoutCheckRegister);
     111    skipTimeout.link(this);
    115112
    116113    killLastResultRegister();
    117114}
    118 
     115#endif
    119116
    120117#define NEXT_OPCODE(name) \
     
    122119    break;
    123120
     121#if USE(JSVALUE32_64)
    124122#define DEFINE_BINARY_OP(name) \
    125123    case name: { \
    126         JITStubCall stubCall(this, JITStubs::cti_##name); \
     124        JITStubCall stubCall(this, cti_##name); \
     125        stubCall.addArgument(currentInstruction[2].u.operand); \
     126        stubCall.addArgument(currentInstruction[3].u.operand); \
     127        stubCall.call(currentInstruction[1].u.operand); \
     128        NEXT_OPCODE(name); \
     129    }
     130
     131#define DEFINE_UNARY_OP(name) \
     132    case name: { \
     133        JITStubCall stubCall(this, cti_##name); \
     134        stubCall.addArgument(currentInstruction[2].u.operand); \
     135        stubCall.call(currentInstruction[1].u.operand); \
     136        NEXT_OPCODE(name); \
     137    }
     138
     139#else // USE(JSVALUE32_64)
     140
     141#define DEFINE_BINARY_OP(name) \
     142    case name: { \
     143        JITStubCall stubCall(this, cti_##name); \
    127144        stubCall.addArgument(currentInstruction[2].u.operand, regT2); \
    128145        stubCall.addArgument(currentInstruction[3].u.operand, regT2); \
     
    133150#define DEFINE_UNARY_OP(name) \
    134151    case name: { \
    135         JITStubCall stubCall(this, JITStubs::cti_##name); \
     152        JITStubCall stubCall(this, cti_##name); \
    136153        stubCall.addArgument(currentInstruction[2].u.operand, regT2); \
    137154        stubCall.call(currentInstruction[1].u.operand); \
    138155        NEXT_OPCODE(name); \
    139156    }
     157#endif // USE(JSVALUE32_64)
    140158
    141159#define DEFINE_OP(name) \
     
    169187#endif
    170188
     189#if !USE(JSVALUE32_64)
    171190        if (m_labels[m_bytecodeIndex].isUsed())
    172191            killLastResultRegister();
    173        
     192#endif
     193
    174194        m_labels[m_bytecodeIndex] = label();
    175195
    176196        switch (m_interpreter->getOpcodeID(currentInstruction->u.opcode)) {
    177197        DEFINE_BINARY_OP(op_del_by_val)
     198#if !USE(JSVALUE32_64)
    178199        DEFINE_BINARY_OP(op_div)
     200#endif
    179201        DEFINE_BINARY_OP(op_in)
    180202        DEFINE_BINARY_OP(op_less)
     
    188210        DEFINE_UNARY_OP(op_is_string)
    189211        DEFINE_UNARY_OP(op_is_undefined)
     212#if !USE(JSVALUE32_64)
    190213        DEFINE_UNARY_OP(op_negate)
     214#endif
    191215        DEFINE_UNARY_OP(op_typeof)
    192216
     
    207231        DEFINE_OP(op_debug)
    208232        DEFINE_OP(op_del_by_id)
     233#if USE(JSVALUE32_64)
     234        DEFINE_OP(op_div)
     235#endif
    209236        DEFINE_OP(op_end)
    210237        DEFINE_OP(op_enter)
     
    237264        DEFINE_OP(op_mov)
    238265        DEFINE_OP(op_mul)
     266#if USE(JSVALUE32_64)
     267        DEFINE_OP(op_negate)
     268#endif
    239269        DEFINE_OP(op_neq)
    240270        DEFINE_OP(op_neq_null)
     
    266296        DEFINE_OP(op_resolve)
    267297        DEFINE_OP(op_resolve_base)
    268         DEFINE_OP(op_resolve_func)
    269298        DEFINE_OP(op_resolve_global)
    270299        DEFINE_OP(op_resolve_skip)
     
    323352
    324353    m_propertyAccessInstructionIndex = 0;
     354#if USE(JSVALUE32_64)
     355    m_globalResolveInfoIndex = 0;
     356#endif
    325357    m_callLinkInfoIndex = 0;
    326358
    327359    for (Vector<SlowCaseEntry>::iterator iter = m_slowCases.begin(); iter != m_slowCases.end();) {
    328         // FIXME: enable peephole optimizations for slow cases when applicable
     360#if !USE(JSVALUE32_64)
    329361        killLastResultRegister();
     362#endif
    330363
    331364        m_bytecodeIndex = iter->to;
     
    347380        DEFINE_SLOWCASE_OP(op_construct_verify)
    348381        DEFINE_SLOWCASE_OP(op_convert_this)
     382#if USE(JSVALUE32_64)
     383        DEFINE_SLOWCASE_OP(op_div)
     384#endif
    349385        DEFINE_SLOWCASE_OP(op_eq)
    350386        DEFINE_SLOWCASE_OP(op_get_by_id)
     
    359395        DEFINE_SLOWCASE_OP(op_loop_if_true)
    360396        DEFINE_SLOWCASE_OP(op_lshift)
     397        DEFINE_SLOWCASE_OP(op_method_check)
    361398        DEFINE_SLOWCASE_OP(op_mod)
    362399        DEFINE_SLOWCASE_OP(op_mul)
    363         DEFINE_SLOWCASE_OP(op_method_check)
     400#if USE(JSVALUE32_64)
     401        DEFINE_SLOWCASE_OP(op_negate)
     402#endif
    364403        DEFINE_SLOWCASE_OP(op_neq)
    365404        DEFINE_SLOWCASE_OP(op_not)
     
    371410        DEFINE_SLOWCASE_OP(op_put_by_id)
    372411        DEFINE_SLOWCASE_OP(op_put_by_val)
     412#if USE(JSVALUE32_64)
     413        DEFINE_SLOWCASE_OP(op_resolve_global)
     414#endif
    373415        DEFINE_SLOWCASE_OP(op_rshift)
    374416        DEFINE_SLOWCASE_OP(op_stricteq)
     
    428470        slowRegisterFileCheck.link(this);
    429471        m_bytecodeIndex = 0;
    430         JITStubCall(this, JITStubs::cti_register_file_check).call();
     472        JITStubCall(this, cti_register_file_check).call();
    431473#ifndef NDEBUG
    432474        m_bytecodeIndex = (unsigned)-1; // Reset this, in order to guard its use with ASSERTs.
     
    514556}
    515557
    516 void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, CodePtr* ctiArrayLengthTrampoline, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallPreLink, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk)
    517 {
    518 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
    519     // (1) The first function provides fast property access for array length
    520     Label arrayLengthBegin = align();
    521 
    522     // Check eax is an array
    523     Jump array_failureCases1 = emitJumpIfNotJSCell(regT0);
    524     Jump array_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsArrayVPtr));
    525 
    526     // Checks out okay! - get the length from the storage
    527     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSArray, m_storage)), regT0);
    528     load32(Address(regT0, OBJECT_OFFSETOF(ArrayStorage, m_length)), regT0);
    529 
    530     Jump array_failureCases3 = branch32(Above, regT0, Imm32(JSImmediate::maxImmediateInt));
    531 
    532     // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
    533     emitFastArithIntToImmNoCheck(regT0, regT0);
    534 
    535     ret();
    536 
    537     // (2) The second function provides fast property access for string length
    538     Label stringLengthBegin = align();
    539 
    540     // Check eax is a string
    541     Jump string_failureCases1 = emitJumpIfNotJSCell(regT0);
    542     Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
    543 
    544     // Checks out okay! - get the length from the Ustring.
    545     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSString, m_value) + OBJECT_OFFSETOF(UString, m_rep)), regT0);
    546     load32(Address(regT0, OBJECT_OFFSETOF(UString::Rep, len)), regT0);
    547 
    548     Jump string_failureCases3 = branch32(Above, regT0, Imm32(JSImmediate::maxImmediateInt));
    549 
    550     // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
    551     emitFastArithIntToImmNoCheck(regT0, regT0);
    552    
    553     ret();
    554 #endif
    555 
    556     // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
    557     COMPILE_ASSERT(sizeof(CodeType) == 4, CodeTypeEnumMustBe32Bit);
    558 
    559     Label virtualCallPreLinkBegin = align();
    560 
    561     // Load the callee CodeBlock* into eax
    562     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_body)), regT3);
    563     loadPtr(Address(regT3, OBJECT_OFFSETOF(FunctionBodyNode, m_code)), regT0);
    564     Jump hasCodeBlock1 = branchTestPtr(NonZero, regT0);
    565     preserveReturnAddressAfterCall(regT3);
    566     restoreArgumentReference();
    567     Call callJSFunction1 = call();
    568     emitGetJITStubArg(1, regT2);
    569     emitGetJITStubArg(3, regT1);
    570     restoreReturnAddressBeforeReturn(regT3);
    571     hasCodeBlock1.link(this);
    572 
    573     Jump isNativeFunc1 = branch32(Equal, Address(regT0, OBJECT_OFFSETOF(CodeBlock, m_codeType)), Imm32(NativeCode));
    574 
    575     // Check argCount matches callee arity.
    576     Jump arityCheckOkay1 = branch32(Equal, Address(regT0, OBJECT_OFFSETOF(CodeBlock, m_numParameters)), regT1);
    577     preserveReturnAddressAfterCall(regT3);
    578     emitPutJITStubArg(regT3, 2);
    579     emitPutJITStubArg(regT0, 4);
    580     restoreArgumentReference();
    581     Call callArityCheck1 = call();
    582     move(regT1, callFrameRegister);
    583     emitGetJITStubArg(1, regT2);
    584     emitGetJITStubArg(3, regT1);
    585     restoreReturnAddressBeforeReturn(regT3);
    586     arityCheckOkay1.link(this);
    587     isNativeFunc1.link(this);
    588    
    589     compileOpCallInitializeCallFrame();
    590 
    591     preserveReturnAddressAfterCall(regT3);
    592     emitPutJITStubArg(regT3, 2);
    593     restoreArgumentReference();
    594     Call callDontLazyLinkCall = call();
    595     emitGetJITStubArg(1, regT2);
    596     restoreReturnAddressBeforeReturn(regT3);
    597 
    598     jump(regT0);
    599 
    600     Label virtualCallLinkBegin = align();
    601 
    602     // Load the callee CodeBlock* into eax
    603     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_body)), regT3);
    604     loadPtr(Address(regT3, OBJECT_OFFSETOF(FunctionBodyNode, m_code)), regT0);
    605     Jump hasCodeBlock2 = branchTestPtr(NonZero, regT0);
    606     preserveReturnAddressAfterCall(regT3);
    607     restoreArgumentReference();
    608     Call callJSFunction2 = call();
    609     emitGetJITStubArg(1, regT2);
    610     emitGetJITStubArg(3, regT1);
    611     restoreReturnAddressBeforeReturn(regT3);
    612     hasCodeBlock2.link(this);
    613 
    614     Jump isNativeFunc2 = branch32(Equal, Address(regT0, OBJECT_OFFSETOF(CodeBlock, m_codeType)), Imm32(NativeCode));
    615 
    616     // Check argCount matches callee arity.
    617     Jump arityCheckOkay2 = branch32(Equal, Address(regT0, OBJECT_OFFSETOF(CodeBlock, m_numParameters)), regT1);
    618     preserveReturnAddressAfterCall(regT3);
    619     emitPutJITStubArg(regT3, 2);
    620     emitPutJITStubArg(regT0, 4);
    621     restoreArgumentReference();
    622     Call callArityCheck2 = call();
    623     move(regT1, callFrameRegister);
    624     emitGetJITStubArg(1, regT2);
    625     emitGetJITStubArg(3, regT1);
    626     restoreReturnAddressBeforeReturn(regT3);
    627     arityCheckOkay2.link(this);
    628     isNativeFunc2.link(this);
    629 
    630     compileOpCallInitializeCallFrame();
    631 
    632     preserveReturnAddressAfterCall(regT3);
    633     emitPutJITStubArg(regT3, 2);
    634     restoreArgumentReference();
    635     Call callLazyLinkCall = call();
    636     restoreReturnAddressBeforeReturn(regT3);
    637 
    638     jump(regT0);
    639 
    640     Label virtualCallBegin = align();
    641 
    642     // Load the callee CodeBlock* into eax
    643     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_body)), regT3);
    644     loadPtr(Address(regT3, OBJECT_OFFSETOF(FunctionBodyNode, m_code)), regT0);
    645     Jump hasCodeBlock3 = branchTestPtr(NonZero, regT0);
    646     preserveReturnAddressAfterCall(regT3);
    647     restoreArgumentReference();
    648     Call callJSFunction3 = call();
    649     emitGetJITStubArg(1, regT2);
    650     emitGetJITStubArg(3, regT1);
    651     restoreReturnAddressBeforeReturn(regT3);
    652     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_body)), regT3); // reload the function body nody, so we can reload the code pointer.
    653     hasCodeBlock3.link(this);
    654    
    655     Jump isNativeFunc3 = branch32(Equal, Address(regT0, OBJECT_OFFSETOF(CodeBlock, m_codeType)), Imm32(NativeCode));
    656 
    657     // Check argCount matches callee arity.
    658     Jump arityCheckOkay3 = branch32(Equal, Address(regT0, OBJECT_OFFSETOF(CodeBlock, m_numParameters)), regT1);
    659     preserveReturnAddressAfterCall(regT3);
    660     emitPutJITStubArg(regT3, 2);
    661     emitPutJITStubArg(regT0, 4);
    662     restoreArgumentReference();
    663     Call callArityCheck3 = call();
    664     move(regT1, callFrameRegister);
    665     emitGetJITStubArg(1, regT2);
    666     emitGetJITStubArg(3, regT1);
    667     restoreReturnAddressBeforeReturn(regT3);
    668     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_body)), regT3); // reload the function body nody, so we can reload the code pointer.
    669     arityCheckOkay3.link(this);
    670     isNativeFunc3.link(this);
    671 
    672     // load ctiCode from the new codeBlock.
    673     loadPtr(Address(regT3, OBJECT_OFFSETOF(FunctionBodyNode, m_jitCode)), regT0);
    674    
    675     compileOpCallInitializeCallFrame();
    676     jump(regT0);
    677 
    678    
    679     Label nativeCallThunk = align();
    680     preserveReturnAddressAfterCall(regT0);
    681     emitPutToCallFrameHeader(regT0, RegisterFile::ReturnPC); // Push return address
    682 
    683     // Load caller frame's scope chain into this callframe so that whatever we call can
    684     // get to its global data.
    685     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT1);
    686     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT1);
    687     emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
    688    
    689 
    690 #if PLATFORM(X86_64)
    691     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, X86::ecx);
    692 
    693     // Allocate stack space for our arglist
    694     subPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
    695     COMPILE_ASSERT((sizeof(ArgList) & 0xf) == 0, ArgList_should_by_16byte_aligned);
    696    
    697     // Set up arguments
    698     subPtr(Imm32(1), X86::ecx); // Don't include 'this' in argcount
    699 
    700     // Push argcount
    701     storePtr(X86::ecx, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
    702 
    703     // Calculate the start of the callframe header, and store in edx
    704     addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), callFrameRegister, X86::edx);
    705    
    706     // Calculate start of arguments as callframe header - sizeof(Register) * argcount (ecx)
    707     mul32(Imm32(sizeof(Register)), X86::ecx, X86::ecx);
    708     subPtr(X86::ecx, X86::edx);
    709 
    710     // push pointer to arguments
    711     storePtr(X86::edx, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
    712    
    713     // ArgList is passed by reference so is stackPointerRegister
    714     move(stackPointerRegister, X86::ecx);
    715    
    716     // edx currently points to the first argument, edx-sizeof(Register) points to 'this'
    717     loadPtr(Address(X86::edx, -(int32_t)sizeof(Register)), X86::edx);
    718    
    719     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86::esi);
    720 
    721     move(callFrameRegister, X86::edi);
    722 
    723     call(Address(X86::esi, OBJECT_OFFSETOF(JSFunction, m_data)));
    724    
    725     addPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
    726 #elif PLATFORM(X86)
    727     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
    728 
    729     /* We have two structs that we use to describe the stackframe we set up for our
    730      * call to native code.  NativeCallFrameStructure describes the how we set up the stack
    731      * in advance of the call.  NativeFunctionCalleeSignature describes the callframe
    732      * as the native code expects it.  We do this as we are using the fastcall calling
    733      * convention which results in the callee popping its arguments off the stack, but
    734      * not the rest of the callframe so we need a nice way to ensure we increment the
    735      * stack pointer by the right amount after the call.
    736      */
    737 #if COMPILER(MSVC) || PLATFORM(LINUX)
    738     struct NativeCallFrameStructure {
    739       //  CallFrame* callFrame; // passed in EDX
    740         JSObject* callee;
    741         JSValue thisValue;
    742         ArgList* argPointer;
    743         ArgList args;
    744         JSValue result;
    745     };
    746     struct NativeFunctionCalleeSignature {
    747         JSObject* callee;
    748         JSValue thisValue;
    749         ArgList* argPointer;
    750     };
    751 #else
    752     struct NativeCallFrameStructure {
    753       //  CallFrame* callFrame; // passed in ECX
    754       //  JSObject* callee; // passed in EDX
    755         JSValue thisValue;
    756         ArgList* argPointer;
    757         ArgList args;
    758     };
    759     struct NativeFunctionCalleeSignature {
    760         JSValue thisValue;
    761         ArgList* argPointer;
    762     };
    763 #endif
    764     const int NativeCallFrameSize = (sizeof(NativeCallFrameStructure) + 15) & ~15;
    765     // Allocate system stack frame
    766     subPtr(Imm32(NativeCallFrameSize), stackPointerRegister);
    767 
    768     // Set up arguments
    769     subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
    770 
    771     // push argcount
    772     storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_argCount)));
    773    
    774     // Calculate the start of the callframe header, and store in regT1
    775     addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int)sizeof(Register)), callFrameRegister, regT1);
    776    
    777     // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
    778     mul32(Imm32(sizeof(Register)), regT0, regT0);
    779     subPtr(regT0, regT1);
    780     storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_args)));
    781 
    782     // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
    783     addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, args)), stackPointerRegister, regT0);
    784     storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, argPointer)));
    785 
    786     // regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
    787     loadPtr(Address(regT1, -(int)sizeof(Register)), regT1);
    788     storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue)));
    789 
    790 #if COMPILER(MSVC) || PLATFORM(LINUX)
    791     // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
    792     addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, result)), stackPointerRegister, X86::ecx);
    793 
    794     // Plant callee
    795     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86::eax);
    796     storePtr(X86::eax, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, callee)));
    797 
    798     // Plant callframe
    799     move(callFrameRegister, X86::edx);
    800 
    801     call(Address(X86::eax, OBJECT_OFFSETOF(JSFunction, m_data)));
    802 
    803     // JSValue is a non-POD type
    804     loadPtr(Address(X86::eax), X86::eax);
    805 #else
    806     // Plant callee
    807     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86::edx);
    808 
    809     // Plant callframe
    810     move(callFrameRegister, X86::ecx);
    811     call(Address(X86::edx, OBJECT_OFFSETOF(JSFunction, m_data)));
    812 #endif
    813 
    814     // We've put a few temporaries on the stack in addition to the actual arguments
    815     // so pull them off now
    816     addPtr(Imm32(NativeCallFrameSize - sizeof(NativeFunctionCalleeSignature)), stackPointerRegister);
    817 
    818 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
    819 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
    820 #else
    821     breakpoint();
    822 #endif
    823 
    824     // Check for an exception
    825     loadPtr(&(globalData->exception), regT2);
    826     Jump exceptionHandler = branchTestPtr(NonZero, regT2);
    827 
    828     // Grab the return address.
    829     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
    830    
    831     // Restore our caller's "r".
    832     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
    833    
    834     // Return.
    835     restoreReturnAddressBeforeReturn(regT1);
    836     ret();
    837 
    838     // Handle an exception
    839     exceptionHandler.link(this);
    840     // Grab the return address.
    841     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
    842     move(ImmPtr(&globalData->exceptionLocation), regT2);
    843     storePtr(regT1, regT2);
    844     move(ImmPtr(reinterpret_cast<void*>(ctiVMThrowTrampoline)), regT2);
    845     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
    846     poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
    847     restoreReturnAddressBeforeReturn(regT2);
    848     ret();
    849    
    850 
    851 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
    852     Call array_failureCases1Call = makeTailRecursiveCall(array_failureCases1);
    853     Call array_failureCases2Call = makeTailRecursiveCall(array_failureCases2);
    854     Call array_failureCases3Call = makeTailRecursiveCall(array_failureCases3);
    855     Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
    856     Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
    857     Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
    858 #endif
    859 
    860     // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
    861     LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()));
    862 
    863 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
    864     patchBuffer.link(array_failureCases1Call, FunctionPtr(JITStubs::cti_op_get_by_id_array_fail));
    865     patchBuffer.link(array_failureCases2Call, FunctionPtr(JITStubs::cti_op_get_by_id_array_fail));
    866     patchBuffer.link(array_failureCases3Call, FunctionPtr(JITStubs::cti_op_get_by_id_array_fail));
    867     patchBuffer.link(string_failureCases1Call, FunctionPtr(JITStubs::cti_op_get_by_id_string_fail));
    868     patchBuffer.link(string_failureCases2Call, FunctionPtr(JITStubs::cti_op_get_by_id_string_fail));
    869     patchBuffer.link(string_failureCases3Call, FunctionPtr(JITStubs::cti_op_get_by_id_string_fail));
    870 #endif
    871     patchBuffer.link(callArityCheck1, FunctionPtr(JITStubs::cti_op_call_arityCheck));
    872     patchBuffer.link(callArityCheck2, FunctionPtr(JITStubs::cti_op_call_arityCheck));
    873     patchBuffer.link(callArityCheck3, FunctionPtr(JITStubs::cti_op_call_arityCheck));
    874     patchBuffer.link(callJSFunction1, FunctionPtr(JITStubs::cti_op_call_JSFunction));
    875     patchBuffer.link(callJSFunction2, FunctionPtr(JITStubs::cti_op_call_JSFunction));
    876     patchBuffer.link(callJSFunction3, FunctionPtr(JITStubs::cti_op_call_JSFunction));
    877     patchBuffer.link(callDontLazyLinkCall, FunctionPtr(JITStubs::cti_vm_dontLazyLinkCall));
    878     patchBuffer.link(callLazyLinkCall, FunctionPtr(JITStubs::cti_vm_lazyLinkCall));
    879 
    880     CodeRef finalCode = patchBuffer.finalizeCode();
    881     *executablePool = finalCode.m_executablePool;
    882 
    883     *ctiVirtualCallPreLink = trampolineAt(finalCode, virtualCallPreLinkBegin);
    884     *ctiVirtualCallLink = trampolineAt(finalCode, virtualCallLinkBegin);
    885     *ctiVirtualCall = trampolineAt(finalCode, virtualCallBegin);
    886     *ctiNativeCallThunk = trampolineAt(finalCode, nativeCallThunk);
    887 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
    888     *ctiArrayLengthTrampoline = trampolineAt(finalCode, arrayLengthBegin);
    889     *ctiStringLengthTrampoline = trampolineAt(finalCode, stringLengthBegin);
    890 #else
    891     UNUSED_PARAM(ctiArrayLengthTrampoline);
    892     UNUSED_PARAM(ctiStringLengthTrampoline);
    893 #endif
    894 }
    895 
     558#if !USE(JSVALUE32_64)
    896559void JIT::emitGetVariableObjectRegister(RegisterID variableObject, int index, RegisterID dst)
    897560{
     
    907570    storePtr(src, Address(variableObject, index * sizeof(Register)));
    908571}
    909 
     572#endif
     573
     574#if ENABLE(JIT_OPTIMIZE_CALL)
    910575void JIT::unlinkCall(CallLinkInfo* callLinkInfo)
    911576{
     
    914579    // match).  Reset the check so it no longer matches.
    915580    RepatchBuffer repatchBuffer(callLinkInfo->ownerCodeBlock);
     581#if USE(JSVALUE32_64)
     582    repatchBuffer.repatch(callLinkInfo->hotPathBegin, 0);
     583#else
    916584    repatchBuffer.repatch(callLinkInfo->hotPathBegin, JSValue::encode(JSValue()));
     585#endif
    917586}
    918587
     
    937606    repatchBuffer.relink(callLinkInfo->callReturnLocation, globalData->jitStubs.ctiVirtualCall());
    938607}
     608#endif // ENABLE(JIT_OPTIMIZE_CALL)
    939609
    940610} // namespace JSC
  • trunk/JavaScriptCore/jit/JIT.h

    r46528 r46598  
    2929#include <wtf/Platform.h>
    3030
    31 // OBJECT_OFFSETOF: Like the C++ offsetof macro, but you can use it with classes.
    32 // The magic number 0x4000 is insignificant. We use it to avoid using NULL, since
    33 // NULL can cause compiler problems, especially in cases of multiple inheritance.
    34 #define OBJECT_OFFSETOF(class, field) (reinterpret_cast<ptrdiff_t>(&(reinterpret_cast<class*>(0x4000)->field)) - 0x4000)
    35 
    3631#if ENABLE(JIT)
    3732
     
    178173    class JIT : private MacroAssembler {
    179174        friend class JITStubCall;
    180         friend class CallEvalJITStub;
    181175
    182176        using MacroAssembler::Jump;
     
    257251        // will compress the displacement, and we may not be able to fit a patched offset.
    258252        static const int patchGetByIdDefaultOffset = 256;
     253
     254    public:
     255        static void compile(JSGlobalData* globalData, CodeBlock* codeBlock)
     256        {
     257            JIT jit(globalData, codeBlock);
     258            jit.privateCompile();
     259        }
     260
     261        static void compileGetByIdProto(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, size_t cachedOffset, ReturnAddressPtr returnAddress)
     262        {
     263            JIT jit(globalData, codeBlock);
     264            jit.privateCompileGetByIdProto(stubInfo, structure, prototypeStructure, cachedOffset, returnAddress, callFrame);
     265        }
     266
     267        static void compileGetByIdSelfList(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, size_t cachedOffset)
     268        {
     269            JIT jit(globalData, codeBlock);
     270            jit.privateCompileGetByIdSelfList(stubInfo, polymorphicStructures, currentIndex, structure, cachedOffset);
     271        }
     272        static void compileGetByIdProtoList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, Structure* prototypeStructure, size_t cachedOffset)
     273        {
     274            JIT jit(globalData, codeBlock);
     275            jit.privateCompileGetByIdProtoList(stubInfo, prototypeStructureList, currentIndex, structure, prototypeStructure, cachedOffset, callFrame);
     276        }
     277        static void compileGetByIdChainList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset)
     278        {
     279            JIT jit(globalData, codeBlock);
     280            jit.privateCompileGetByIdChainList(stubInfo, prototypeStructureList, currentIndex, structure, chain, count, cachedOffset, callFrame);
     281        }
     282
     283        static void compileGetByIdChain(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset, ReturnAddressPtr returnAddress)
     284        {
     285            JIT jit(globalData, codeBlock);
     286            jit.privateCompileGetByIdChain(stubInfo, structure, chain, count, cachedOffset, returnAddress, callFrame);
     287        }
     288       
     289        static void compilePutByIdTransition(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress)
     290        {
     291            JIT jit(globalData, codeBlock);
     292            jit.privateCompilePutByIdTransition(stubInfo, oldStructure, newStructure, cachedOffset, chain, returnAddress);
     293        }
     294
     295        static void compileCTIMachineTrampolines(JSGlobalData* globalData, RefPtr<ExecutablePool>* executablePool, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallPreLink, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk)
     296        {
     297            JIT jit(globalData);
     298            jit.privateCompileCTIMachineTrampolines(executablePool, globalData, ctiStringLengthTrampoline, ctiVirtualCallPreLink, ctiVirtualCallLink, ctiVirtualCall, ctiNativeCallThunk);
     299        }
     300
     301        static void patchGetByIdSelf(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
     302        static void patchPutByIdReplace(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
     303        static void patchMethodCallProto(CodeBlock* codeblock, MethodCallLinkInfo&, JSFunction*, Structure*, JSObject*);
     304
     305        static void compilePatchGetArrayLength(JSGlobalData* globalData, CodeBlock* codeBlock, ReturnAddressPtr returnAddress)
     306        {
     307            JIT jit(globalData, codeBlock);
     308            return jit.privateCompilePatchGetArrayLength(returnAddress);
     309        }
     310
     311        static void linkCall(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, JITCode&, CallLinkInfo*, int callerArgCount, JSGlobalData*);
     312        static void unlinkCall(CallLinkInfo*);
     313
     314    private:
     315        struct JSRInfo {
     316            DataLabelPtr storeLocation;
     317            Label target;
     318
     319            JSRInfo(DataLabelPtr storeLocation, Label targetLocation)
     320                : storeLocation(storeLocation)
     321                , target(targetLocation)
     322            {
     323            }
     324        };
     325
     326        JIT(JSGlobalData*, CodeBlock* = 0);
     327
     328        void privateCompileMainPass();
     329        void privateCompileLinkPass();
     330        void privateCompileSlowCases();
     331        void privateCompile();
     332        void privateCompileGetByIdProto(StructureStubInfo*, Structure*, Structure* prototypeStructure, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
     333        void privateCompileGetByIdSelfList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, size_t cachedOffset);
     334        void privateCompileGetByIdProtoList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, Structure* prototypeStructure, size_t cachedOffset, CallFrame* callFrame);
     335        void privateCompileGetByIdChainList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, StructureChain* chain, size_t count, size_t cachedOffset, CallFrame* callFrame);
     336        void privateCompileGetByIdChain(StructureStubInfo*, Structure*, StructureChain*, size_t count, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
     337        void privateCompilePutByIdTransition(StructureStubInfo*, Structure*, Structure*, size_t cachedOffset, StructureChain*, ReturnAddressPtr returnAddress);
     338
     339        void privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* data, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallPreLink, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk);
     340        void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress);
     341
     342        void addSlowCase(Jump);
     343        void addSlowCase(JumpList);
     344        void addJump(Jump, int);
     345        void emitJumpSlowToHot(Jump, int);
     346
     347        void compileOpCall(OpcodeID, Instruction* instruction, unsigned callLinkInfoIndex);
     348        void compileOpCallVarargs(Instruction* instruction);
     349        void compileOpCallInitializeCallFrame();
     350        void compileOpCallSetupArgs(Instruction*);
     351        void compileOpCallVarargsSetupArgs(Instruction*);
     352        void compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID);
     353        void compileOpCallVarargsSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter);
     354        void compileOpConstructSetupArgs(Instruction*);
     355
     356        enum CompileOpStrictEqType { OpStrictEq, OpNStrictEq };
     357        void compileOpStrictEq(Instruction* instruction, CompileOpStrictEqType type);
     358
     359#if USE(JSVALUE32_64)
     360        Address tagFor(unsigned index, RegisterID base = callFrameRegister);
     361        Address payloadFor(unsigned index, RegisterID base = callFrameRegister);
     362        Address addressFor(unsigned index, RegisterID base = callFrameRegister);
     363
     364        bool getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant);
     365        bool isOperandConstantImmediateDouble(unsigned src);
     366
     367        void emitLoadTag(unsigned index, RegisterID tag);
     368        void emitLoadPayload(unsigned index, RegisterID payload);
     369
     370        void emitLoad(const JSValue& v, RegisterID tag, RegisterID payload);
     371        void emitLoad(unsigned index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
     372        void emitLoad2(unsigned index1, RegisterID tag1, RegisterID payload1, unsigned index2, RegisterID tag2, RegisterID payload2);
     373        void emitLoadDouble(unsigned index, FPRegisterID value);
     374        void emitLoadInt32ToDouble(unsigned index, FPRegisterID value);
     375
     376        void emitStore(unsigned index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
     377        void emitStore(unsigned index, const JSValue constant, RegisterID base = callFrameRegister);
     378        void emitStoreInt32(unsigned index, RegisterID payload, bool indexIsInt32 = false);
     379        void emitStoreInt32(unsigned index, Imm32 payload, bool indexIsInt32 = false);
     380        void emitStoreCell(unsigned index, RegisterID payload, bool indexIsCell = false);
     381        void emitStoreBool(unsigned index, RegisterID tag, bool indexIsBool = false);
     382        void emitStoreDouble(unsigned index, FPRegisterID value);
     383
     384        bool isLabeled(unsigned bytecodeIndex);
     385        void map(unsigned bytecodeIndex, unsigned virtualRegisterIndex, RegisterID tag, RegisterID payload);
     386        void unmap(RegisterID);
     387        void unmap();
     388        bool isMapped(unsigned virtualRegisterIndex);
     389        bool getMappedPayload(unsigned virtualRegisterIndex, RegisterID& payload);
     390        bool getMappedTag(unsigned virtualRegisterIndex, RegisterID& tag);
     391
     392        void emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex);
     393        void emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex, RegisterID tag);
     394        void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, unsigned virtualRegisterIndex);
     395
     396#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
     397        void compileGetByIdHotPath();
     398        void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
     399#endif
     400        void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, Structure* structure, size_t cachedOffset);
     401        void compileGetDirectOffset(JSObject* base, RegisterID temp, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset);
     402        void compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, Structure* structure, size_t cachedOffset);
     403
     404        // Arithmetic opcode helpers
     405        void emitAdd32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
     406        void emitSub32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
     407        void emitBinaryDoubleOp(OpcodeID, unsigned dst, unsigned op1, unsigned op2, OperandTypes, JumpList& notInt32Op1, JumpList& notInt32Op2, bool op1IsInRegisters = true, bool op2IsInRegisters = true);
     408
     409#if PLATFORM(X86)
     410        // These architecture specific value are used to enable patching - see comment on op_put_by_id.
     411        static const int patchOffsetPutByIdStructure = 7;
     412        static const int patchOffsetPutByIdExternalLoad = 13;
     413        static const int patchLengthPutByIdExternalLoad = 3;
     414        static const int patchOffsetPutByIdPropertyMapOffset1 = 22;
     415        static const int patchOffsetPutByIdPropertyMapOffset2 = 28;
     416        // These architecture specific value are used to enable patching - see comment on op_get_by_id.
     417        static const int patchOffsetGetByIdStructure = 7;
     418        static const int patchOffsetGetByIdBranchToSlowCase = 13;
     419        static const int patchOffsetGetByIdExternalLoad = 13;
     420        static const int patchLengthGetByIdExternalLoad = 3;
     421        static const int patchOffsetGetByIdPropertyMapOffset1 = 22;
     422        static const int patchOffsetGetByIdPropertyMapOffset2 = 28;
     423        static const int patchOffsetGetByIdPutResult = 28;
     424#if ENABLE(OPCODE_SAMPLING) && USE(JIT_STUB_ARGUMENT_VA_LIST)
     425        static const int patchOffsetGetByIdSlowCaseCall = 35;
     426#elif ENABLE(OPCODE_SAMPLING)
     427        static const int patchOffsetGetByIdSlowCaseCall = 37;
     428#elif USE(JIT_STUB_ARGUMENT_VA_LIST)
     429        static const int patchOffsetGetByIdSlowCaseCall = 25;
     430#else
     431        static const int patchOffsetGetByIdSlowCaseCall = 27;
     432#endif
     433        static const int patchOffsetOpCallCompareToJump = 6;
     434
     435        static const int patchOffsetMethodCheckProtoObj = 11;
     436        static const int patchOffsetMethodCheckProtoStruct = 18;
     437        static const int patchOffsetMethodCheckPutFunction = 29;
     438#else
     439#error "JSVALUE32_64 not supported on this platform."
     440#endif
     441
     442#else // USE(JSVALUE32_64)
     443        void emitGetVirtualRegister(int src, RegisterID dst);
     444        void emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2);
     445        void emitPutVirtualRegister(unsigned dst, RegisterID from = regT0);
     446
     447        int32_t getConstantOperandImmediateInt(unsigned src);
     448
     449        void emitGetVariableObjectRegister(RegisterID variableObject, int index, RegisterID dst);
     450        void emitPutVariableObjectRegister(RegisterID src, RegisterID variableObject, int index);
     451       
     452        void killLastResultRegister();
     453
     454        Jump emitJumpIfJSCell(RegisterID);
     455        Jump emitJumpIfBothJSCells(RegisterID, RegisterID, RegisterID);
     456        void emitJumpSlowCaseIfJSCell(RegisterID);
     457        Jump emitJumpIfNotJSCell(RegisterID);
     458        void emitJumpSlowCaseIfNotJSCell(RegisterID);
     459        void emitJumpSlowCaseIfNotJSCell(RegisterID, int VReg);
     460#if USE(JSVALUE64)
     461        JIT::Jump emitJumpIfImmediateNumber(RegisterID);
     462        JIT::Jump emitJumpIfNotImmediateNumber(RegisterID);
     463#else
     464        JIT::Jump emitJumpIfImmediateNumber(RegisterID reg)
     465        {
     466            return emitJumpIfImmediateInteger(reg);
     467        }
     468       
     469        JIT::Jump emitJumpIfNotImmediateNumber(RegisterID reg)
     470        {
     471            return emitJumpIfNotImmediateInteger(reg);
     472        }
     473#endif
     474        JIT::Jump emitJumpIfImmediateInteger(RegisterID);
     475        JIT::Jump emitJumpIfNotImmediateInteger(RegisterID);
     476        JIT::Jump emitJumpIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
     477        void emitJumpSlowCaseIfNotImmediateInteger(RegisterID);
     478        void emitJumpSlowCaseIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
     479
     480#if !USE(JSVALUE64)
     481        void emitFastArithDeTagImmediate(RegisterID);
     482        Jump emitFastArithDeTagImmediateJumpIfZero(RegisterID);
     483#endif
     484        void emitFastArithReTagImmediate(RegisterID src, RegisterID dest);
     485        void emitFastArithImmToInt(RegisterID);
     486        void emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest);
     487
     488        void emitTagAsBoolImmediate(RegisterID reg);
     489        void compileBinaryArithOp(OpcodeID, unsigned dst, unsigned src1, unsigned src2, OperandTypes opi);
     490        void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes opi);
     491
     492#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
     493        void compileGetByIdHotPath(int resultVReg, int baseVReg, Identifier* ident, unsigned propertyAccessInstructionIndex);
     494        void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
     495#endif
     496        void compileGetDirectOffset(RegisterID base, RegisterID result, Structure* structure, size_t cachedOffset);
     497        void compileGetDirectOffset(JSObject* base, RegisterID temp, RegisterID result, size_t cachedOffset);
     498        void compilePutDirectOffset(RegisterID base, RegisterID value, Structure* structure, size_t cachedOffset);
    259499
    260500#if PLATFORM(X86_64)
     
    332572        static const int patchOffsetMethodCheckPutFunction = 46;
    333573#endif
    334 
    335     public:
    336         static void compile(JSGlobalData* globalData, CodeBlock* codeBlock)
    337         {
    338             JIT jit(globalData, codeBlock);
    339             jit.privateCompile();
    340         }
    341 
    342         static void compileGetByIdProto(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, size_t cachedOffset, ReturnAddressPtr returnAddress)
    343         {
    344             JIT jit(globalData, codeBlock);
    345             jit.privateCompileGetByIdProto(stubInfo, structure, prototypeStructure, cachedOffset, returnAddress, callFrame);
    346         }
    347 
    348         static void compileGetByIdSelfList(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, size_t cachedOffset)
    349         {
    350             JIT jit(globalData, codeBlock);
    351             jit.privateCompileGetByIdSelfList(stubInfo, polymorphicStructures, currentIndex, structure, cachedOffset);
    352         }
    353         static void compileGetByIdProtoList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, Structure* prototypeStructure, size_t cachedOffset)
    354         {
    355             JIT jit(globalData, codeBlock);
    356             jit.privateCompileGetByIdProtoList(stubInfo, prototypeStructureList, currentIndex, structure, prototypeStructure, cachedOffset, callFrame);
    357         }
    358         static void compileGetByIdChainList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset)
    359         {
    360             JIT jit(globalData, codeBlock);
    361             jit.privateCompileGetByIdChainList(stubInfo, prototypeStructureList, currentIndex, structure, chain, count, cachedOffset, callFrame);
    362         }
    363 
    364         static void compileGetByIdChain(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset, ReturnAddressPtr returnAddress)
    365         {
    366             JIT jit(globalData, codeBlock);
    367             jit.privateCompileGetByIdChain(stubInfo, structure, chain, count, cachedOffset, returnAddress, callFrame);
    368         }
    369        
    370         static void compilePutByIdTransition(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress)
    371         {
    372             JIT jit(globalData, codeBlock);
    373             jit.privateCompilePutByIdTransition(stubInfo, oldStructure, newStructure, cachedOffset, chain, returnAddress);
    374         }
    375 
    376         static void compileCTIMachineTrampolines(JSGlobalData* globalData, RefPtr<ExecutablePool>* executablePool, CodePtr* ctiArrayLengthTrampoline, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallPreLink, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk)
    377         {
    378             JIT jit(globalData);
    379             jit.privateCompileCTIMachineTrampolines(executablePool, globalData, ctiArrayLengthTrampoline, ctiStringLengthTrampoline, ctiVirtualCallPreLink, ctiVirtualCallLink, ctiVirtualCall, ctiNativeCallThunk);
    380         }
    381 
    382         static void patchGetByIdSelf(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
    383         static void patchPutByIdReplace(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
    384         static void patchMethodCallProto(CodeBlock* codeblock, MethodCallLinkInfo&, JSFunction*, Structure*, JSObject*);
    385 
    386         static void compilePatchGetArrayLength(JSGlobalData* globalData, CodeBlock* codeBlock, ReturnAddressPtr returnAddress)
    387         {
    388             JIT jit(globalData, codeBlock);
    389             return jit.privateCompilePatchGetArrayLength(returnAddress);
    390         }
    391 
    392         static void linkCall(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, JITCode&, CallLinkInfo*, int callerArgCount, JSGlobalData*);
    393         static void unlinkCall(CallLinkInfo*);
    394 
    395     private:
    396         struct JSRInfo {
    397             DataLabelPtr storeLocation;
    398             Label target;
    399 
    400             JSRInfo(DataLabelPtr storeLocation, Label targetLocation)
    401                 : storeLocation(storeLocation)
    402                 , target(targetLocation)
    403             {
    404             }
    405         };
    406 
    407         JIT(JSGlobalData*, CodeBlock* = 0);
    408 
    409         void privateCompileMainPass();
    410         void privateCompileLinkPass();
    411         void privateCompileSlowCases();
    412         void privateCompile();
    413         void privateCompileGetByIdProto(StructureStubInfo*, Structure*, Structure* prototypeStructure, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
    414         void privateCompileGetByIdSelfList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, size_t cachedOffset);
    415         void privateCompileGetByIdProtoList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, Structure* prototypeStructure, size_t cachedOffset, CallFrame* callFrame);
    416         void privateCompileGetByIdChainList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, StructureChain* chain, size_t count, size_t cachedOffset, CallFrame* callFrame);
    417         void privateCompileGetByIdChain(StructureStubInfo*, Structure*, StructureChain*, size_t count, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
    418         void privateCompilePutByIdTransition(StructureStubInfo*, Structure*, Structure*, size_t cachedOffset, StructureChain*, ReturnAddressPtr returnAddress);
    419 
    420         void privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* data, CodePtr* ctiArrayLengthTrampoline, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallPreLink, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk);
    421         void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress);
    422 
    423         void addSlowCase(Jump);
    424         void addJump(Jump, int);
    425         void emitJumpSlowToHot(Jump, int);
    426 
    427 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
    428         void compileGetByIdHotPath(int resultVReg, int baseVReg, Identifier* ident, unsigned propertyAccessInstructionIndex);
    429         void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, unsigned propertyAccessInstructionIndex, bool isMethodCheck = false);
    430 #endif
    431         void compileOpCall(OpcodeID, Instruction* instruction, unsigned callLinkInfoIndex);
    432         void compileOpCallVarargs(Instruction* instruction);
    433         void compileOpCallInitializeCallFrame();
    434         void compileOpCallSetupArgs(Instruction*);
    435         void compileOpCallVarargsSetupArgs(Instruction*);
    436         void compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID);
    437         void compileOpCallVarargsSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter);
    438         void compileOpConstructSetupArgs(Instruction*);
    439         enum CompileOpStrictEqType { OpStrictEq, OpNStrictEq };
    440         void compileOpStrictEq(Instruction* instruction, CompileOpStrictEqType type);
    441 
    442         void compileGetDirectOffset(RegisterID base, RegisterID result, Structure* structure, size_t cachedOffset);
    443         void compileGetDirectOffset(JSObject* base, RegisterID temp, RegisterID result, size_t cachedOffset);
    444         void compilePutDirectOffset(RegisterID base, RegisterID value, Structure* structure, size_t cachedOffset);
    445 
    446         // Arithmetic Ops
     574#endif // USE(JSVALUE32_64)
    447575
    448576        void emit_op_add(Instruction*);
    449         void emit_op_sub(Instruction*);
    450         void emit_op_mul(Instruction*);
    451         void emit_op_mod(Instruction*);
    452577        void emit_op_bitand(Instruction*);
    453         void emit_op_lshift(Instruction*);
    454         void emit_op_rshift(Instruction*);
     578        void emit_op_bitnot(Instruction*);
     579        void emit_op_bitor(Instruction*);
     580        void emit_op_bitxor(Instruction*);
     581        void emit_op_call(Instruction*);
     582        void emit_op_call_eval(Instruction*);
     583        void emit_op_call_varargs(Instruction*);
     584        void emit_op_catch(Instruction*);
     585        void emit_op_construct(Instruction*);
     586        void emit_op_construct_verify(Instruction*);
     587        void emit_op_convert_this(Instruction*);
     588        void emit_op_create_arguments(Instruction*);
     589        void emit_op_debug(Instruction*);
     590        void emit_op_del_by_id(Instruction*);
     591        void emit_op_div(Instruction*);
     592        void emit_op_end(Instruction*);
     593        void emit_op_enter(Instruction*);
     594        void emit_op_enter_with_activation(Instruction*);
     595        void emit_op_eq(Instruction*);
     596        void emit_op_eq_null(Instruction*);
     597        void emit_op_get_by_id(Instruction*);
     598        void emit_op_get_by_val(Instruction*);
     599        void emit_op_get_global_var(Instruction*);
     600        void emit_op_get_scoped_var(Instruction*);
     601        void emit_op_init_arguments(Instruction*);
     602        void emit_op_instanceof(Instruction*);
     603        void emit_op_jeq_null(Instruction*);
     604        void emit_op_jfalse(Instruction*);
     605        void emit_op_jmp(Instruction*);
     606        void emit_op_jmp_scopes(Instruction*);
     607        void emit_op_jneq_null(Instruction*);
     608        void emit_op_jneq_ptr(Instruction*);
    455609        void emit_op_jnless(Instruction*);
    456610        void emit_op_jnlesseq(Instruction*);
    457         void emit_op_pre_inc(Instruction*);
    458         void emit_op_pre_dec(Instruction*);
    459         void emit_op_post_inc(Instruction*);
    460         void emit_op_post_dec(Instruction*);
    461         void emitSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&);
    462         void emitSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&);
    463         void emitSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&);
    464         void emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&);
    465         void emitSlow_op_bitand(Instruction*, Vector<SlowCaseEntry>::iterator&);
    466         void emitSlow_op_lshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
    467         void emitSlow_op_rshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
    468         void emitSlow_op_jnless(Instruction*, Vector<SlowCaseEntry>::iterator&);
    469         void emitSlow_op_jnlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
    470         void emitSlow_op_pre_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
    471         void emitSlow_op_pre_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
    472         void emitSlow_op_post_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
    473         void emitSlow_op_post_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
    474 
    475         void emit_op_get_by_val(Instruction*);
    476         void emit_op_put_by_val(Instruction*);
    477         void emit_op_put_by_index(Instruction*);
    478         void emit_op_put_getter(Instruction*);
    479         void emit_op_put_setter(Instruction*);
    480         void emit_op_del_by_id(Instruction*);
    481 
    482         void emit_op_mov(Instruction*);
    483         void emit_op_end(Instruction*);
    484         void emit_op_jmp(Instruction*);
     611        void emit_op_jsr(Instruction*);
     612        void emit_op_jtrue(Instruction*);
     613        void emit_op_load_varargs(Instruction*);
    485614        void emit_op_loop(Instruction*);
    486615        void emit_op_loop_if_less(Instruction*);
    487616        void emit_op_loop_if_lesseq(Instruction*);
     617        void emit_op_loop_if_true(Instruction*);
     618        void emit_op_lshift(Instruction*);
     619        void emit_op_method_check(Instruction*);
     620        void emit_op_mod(Instruction*);
     621        void emit_op_mov(Instruction*);
     622        void emit_op_mul(Instruction*);
     623        void emit_op_negate(Instruction*);
     624        void emit_op_neq(Instruction*);
     625        void emit_op_neq_null(Instruction*);
     626        void emit_op_new_array(Instruction*);
     627        void emit_op_new_error(Instruction*);
     628        void emit_op_new_func(Instruction*);
     629        void emit_op_new_func_exp(Instruction*);
    488630        void emit_op_new_object(Instruction*);
     631        void emit_op_new_regexp(Instruction*);
     632        void emit_op_next_pname(Instruction*);
     633        void emit_op_not(Instruction*);
     634        void emit_op_nstricteq(Instruction*);
     635        void emit_op_pop_scope(Instruction*);
     636        void emit_op_post_dec(Instruction*);
     637        void emit_op_post_inc(Instruction*);
     638        void emit_op_pre_dec(Instruction*);
     639        void emit_op_pre_inc(Instruction*);
     640        void emit_op_profile_did_call(Instruction*);
     641        void emit_op_profile_will_call(Instruction*);
     642        void emit_op_push_new_scope(Instruction*);
     643        void emit_op_push_scope(Instruction*);
    489644        void emit_op_put_by_id(Instruction*);
    490         void emit_op_get_by_id(Instruction*);
    491         void emit_op_instanceof(Instruction*);
    492         void emit_op_new_func(Instruction*);
    493         void emit_op_call(Instruction*);
    494         void emit_op_call_eval(Instruction*);
    495         void emit_op_method_check(Instruction*);
    496         void emit_op_load_varargs(Instruction*);
    497         void emit_op_call_varargs(Instruction*);
    498         void emit_op_construct(Instruction*);
    499         void emit_op_get_global_var(Instruction*);
     645        void emit_op_put_by_index(Instruction*);
     646        void emit_op_put_by_val(Instruction*);
     647        void emit_op_put_getter(Instruction*);
    500648        void emit_op_put_global_var(Instruction*);
    501         void emit_op_get_scoped_var(Instruction*);
    502649        void emit_op_put_scoped_var(Instruction*);
     650        void emit_op_put_setter(Instruction*);
     651        void emit_op_resolve(Instruction*);
     652        void emit_op_resolve_base(Instruction*);
     653        void emit_op_resolve_global(Instruction*);
     654        void emit_op_resolve_skip(Instruction*);
     655        void emit_op_resolve_with_base(Instruction*);
     656        void emit_op_ret(Instruction*);
     657        void emit_op_rshift(Instruction*);
     658        void emit_op_sret(Instruction*);
     659        void emit_op_strcat(Instruction*);
     660        void emit_op_stricteq(Instruction*);
     661        void emit_op_sub(Instruction*);
     662        void emit_op_switch_char(Instruction*);
     663        void emit_op_switch_imm(Instruction*);
     664        void emit_op_switch_string(Instruction*);
    503665        void emit_op_tear_off_activation(Instruction*);
    504666        void emit_op_tear_off_arguments(Instruction*);
    505         void emit_op_ret(Instruction*);
    506         void emit_op_new_array(Instruction*);
    507         void emit_op_resolve(Instruction*);
    508         void emit_op_construct_verify(Instruction*);
     667        void emit_op_throw(Instruction*);
     668        void emit_op_to_jsnumber(Instruction*);
    509669        void emit_op_to_primitive(Instruction*);
    510         void emit_op_strcat(Instruction*);
    511         void emit_op_resolve_func(Instruction*);
    512         void emit_op_loop_if_true(Instruction*);
    513         void emit_op_resolve_base(Instruction*);
    514         void emit_op_resolve_skip(Instruction*);
    515         void emit_op_resolve_global(Instruction*);
    516         void emit_op_not(Instruction*);
    517         void emit_op_jfalse(Instruction*);
    518         void emit_op_jeq_null(Instruction*);
    519         void emit_op_jneq_null(Instruction*);
    520         void emit_op_jneq_ptr(Instruction*);
    521670        void emit_op_unexpected_load(Instruction*);
    522         void emit_op_jsr(Instruction*);
    523         void emit_op_sret(Instruction*);
    524         void emit_op_eq(Instruction*);
    525         void emit_op_bitnot(Instruction*);
    526         void emit_op_resolve_with_base(Instruction*);
    527         void emit_op_new_func_exp(Instruction*);
    528         void emit_op_jtrue(Instruction*);
    529         void emit_op_neq(Instruction*);
    530         void emit_op_bitxor(Instruction*);
    531         void emit_op_new_regexp(Instruction*);
    532         void emit_op_bitor(Instruction*);
    533         void emit_op_throw(Instruction*);
    534         void emit_op_next_pname(Instruction*);
    535         void emit_op_push_scope(Instruction*);
    536         void emit_op_pop_scope(Instruction*);
    537         void emit_op_stricteq(Instruction*);
    538         void emit_op_nstricteq(Instruction*);
    539         void emit_op_to_jsnumber(Instruction*);
    540         void emit_op_push_new_scope(Instruction*);
    541         void emit_op_catch(Instruction*);
    542         void emit_op_jmp_scopes(Instruction*);
    543         void emit_op_switch_imm(Instruction*);
    544         void emit_op_switch_char(Instruction*);
    545         void emit_op_switch_string(Instruction*);
    546         void emit_op_new_error(Instruction*);
    547         void emit_op_debug(Instruction*);
    548         void emit_op_eq_null(Instruction*);
    549         void emit_op_neq_null(Instruction*);
    550         void emit_op_enter(Instruction*);
    551         void emit_op_enter_with_activation(Instruction*);
    552         void emit_op_init_arguments(Instruction*);
    553         void emit_op_create_arguments(Instruction*);
    554         void emit_op_convert_this(Instruction*);
    555         void emit_op_profile_will_call(Instruction*);
    556         void emit_op_profile_did_call(Instruction*);
    557 
    558         void emitSlow_op_convert_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
    559         void emitSlow_op_construct_verify(Instruction*, Vector<SlowCaseEntry>::iterator&);
    560         void emitSlow_op_to_primitive(Instruction*, Vector<SlowCaseEntry>::iterator&);
    561         void emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
    562         void emitSlow_op_loop_if_less(Instruction*, Vector<SlowCaseEntry>::iterator&);
    563         void emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
    564         void emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
    565         void emitSlow_op_loop_if_lesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
    566         void emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
    567         void emitSlow_op_loop_if_true(Instruction*, Vector<SlowCaseEntry>::iterator&);
    568         void emitSlow_op_not(Instruction*, Vector<SlowCaseEntry>::iterator&);
    569         void emitSlow_op_jfalse(Instruction*, Vector<SlowCaseEntry>::iterator&);
     671
     672        void emitSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&);
     673        void emitSlow_op_bitand(Instruction*, Vector<SlowCaseEntry>::iterator&);
    570674        void emitSlow_op_bitnot(Instruction*, Vector<SlowCaseEntry>::iterator&);
    571         void emitSlow_op_jtrue(Instruction*, Vector<SlowCaseEntry>::iterator&);
     675        void emitSlow_op_bitor(Instruction*, Vector<SlowCaseEntry>::iterator&);
    572676        void emitSlow_op_bitxor(Instruction*, Vector<SlowCaseEntry>::iterator&);
    573         void emitSlow_op_bitor(Instruction*, Vector<SlowCaseEntry>::iterator&);
    574         void emitSlow_op_eq(Instruction*, Vector<SlowCaseEntry>::iterator&);
    575         void emitSlow_op_neq(Instruction*, Vector<SlowCaseEntry>::iterator&);
    576         void emitSlow_op_stricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
    577         void emitSlow_op_nstricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
    578         void emitSlow_op_instanceof(Instruction*, Vector<SlowCaseEntry>::iterator&);
    579677        void emitSlow_op_call(Instruction*, Vector<SlowCaseEntry>::iterator&);
    580678        void emitSlow_op_call_eval(Instruction*, Vector<SlowCaseEntry>::iterator&);
    581         void emitSlow_op_method_check(Instruction*, Vector<SlowCaseEntry>::iterator&);
    582679        void emitSlow_op_call_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
    583680        void emitSlow_op_construct(Instruction*, Vector<SlowCaseEntry>::iterator&);
     681        void emitSlow_op_construct_verify(Instruction*, Vector<SlowCaseEntry>::iterator&);
     682        void emitSlow_op_convert_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
     683        void emitSlow_op_div(Instruction*, Vector<SlowCaseEntry>::iterator&);
     684        void emitSlow_op_eq(Instruction*, Vector<SlowCaseEntry>::iterator&);
     685        void emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
     686        void emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
     687        void emitSlow_op_instanceof(Instruction*, Vector<SlowCaseEntry>::iterator&);
     688        void emitSlow_op_jfalse(Instruction*, Vector<SlowCaseEntry>::iterator&);
     689        void emitSlow_op_jnless(Instruction*, Vector<SlowCaseEntry>::iterator&);
     690        void emitSlow_op_jnlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
     691        void emitSlow_op_jtrue(Instruction*, Vector<SlowCaseEntry>::iterator&);
     692        void emitSlow_op_loop_if_less(Instruction*, Vector<SlowCaseEntry>::iterator&);
     693        void emitSlow_op_loop_if_lesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
     694        void emitSlow_op_loop_if_true(Instruction*, Vector<SlowCaseEntry>::iterator&);
     695        void emitSlow_op_lshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
     696        void emitSlow_op_method_check(Instruction*, Vector<SlowCaseEntry>::iterator&);
     697        void emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&);
     698        void emitSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&);
     699        void emitSlow_op_negate(Instruction*, Vector<SlowCaseEntry>::iterator&);
     700        void emitSlow_op_neq(Instruction*, Vector<SlowCaseEntry>::iterator&);
     701        void emitSlow_op_not(Instruction*, Vector<SlowCaseEntry>::iterator&);
     702        void emitSlow_op_nstricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
     703        void emitSlow_op_post_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
     704        void emitSlow_op_post_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
     705        void emitSlow_op_pre_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
     706        void emitSlow_op_pre_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
     707        void emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
     708        void emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
     709        void emitSlow_op_resolve_global(Instruction*, Vector<SlowCaseEntry>::iterator&);
     710        void emitSlow_op_rshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
     711        void emitSlow_op_stricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
     712        void emitSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&);
    584713        void emitSlow_op_to_jsnumber(Instruction*, Vector<SlowCaseEntry>::iterator&);
    585 
    586 #if ENABLE(JIT_OPTIMIZE_ARITHMETIC)
    587         void compileBinaryArithOp(OpcodeID, unsigned dst, unsigned src1, unsigned src2, OperandTypes opi);
    588         void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes opi);
    589 #endif
    590 
    591         void emitGetVirtualRegister(int src, RegisterID dst);
    592         void emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2);
    593         void emitPutVirtualRegister(unsigned dst, RegisterID from = regT0);
    594 
     714        void emitSlow_op_to_primitive(Instruction*, Vector<SlowCaseEntry>::iterator&);
     715
     716        /* These functions are deprecated: Please use JITStubCall instead. */
    595717        void emitPutJITStubArg(RegisterID src, unsigned argumentNumber);
     718#if USE(JSVALUE32_64)
     719        void emitPutJITStubArgFromVirtualRegister(unsigned src, unsigned argumentNumber, RegisterID scratch1, RegisterID scratch2);
     720#else
    596721        void emitPutJITStubArgFromVirtualRegister(unsigned src, unsigned argumentNumber, RegisterID scratch);
     722#endif
    597723        void emitPutJITStubArgConstant(unsigned value, unsigned argumentNumber);
    598724        void emitPutJITStubArgConstant(void* value, unsigned argumentNumber);
     
    607733
    608734        JSValue getConstantOperand(unsigned src);
    609         int32_t getConstantOperandImmediateInt(unsigned src);
    610735        bool isOperandConstantImmediateInt(unsigned src);
    611 
    612         Jump emitJumpIfJSCell(RegisterID);
    613         Jump emitJumpIfBothJSCells(RegisterID, RegisterID, RegisterID);
    614         void emitJumpSlowCaseIfJSCell(RegisterID);
    615         Jump emitJumpIfNotJSCell(RegisterID);
    616         void emitJumpSlowCaseIfNotJSCell(RegisterID);
    617         void emitJumpSlowCaseIfNotJSCell(RegisterID, int VReg);
    618 #if USE(ALTERNATE_JSIMMEDIATE)
    619         JIT::Jump emitJumpIfImmediateNumber(RegisterID);
    620         JIT::Jump emitJumpIfNotImmediateNumber(RegisterID);
    621 #else
    622         JIT::Jump emitJumpIfImmediateNumber(RegisterID reg)
    623         {
    624             return emitJumpIfImmediateInteger(reg);
    625         }
    626        
    627         JIT::Jump emitJumpIfNotImmediateNumber(RegisterID reg)
    628         {
    629             return emitJumpIfNotImmediateInteger(reg);
    630         }
    631 #endif
    632736
    633737        Jump getSlowCase(Vector<SlowCaseEntry>::iterator& iter)
     
    642746        void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, int vReg);
    643747
    644         JIT::Jump emitJumpIfImmediateInteger(RegisterID);
    645         JIT::Jump emitJumpIfNotImmediateInteger(RegisterID);
    646         JIT::Jump emitJumpIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
    647         void emitJumpSlowCaseIfNotImmediateInteger(RegisterID);
    648         void emitJumpSlowCaseIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
    649 
    650748        Jump checkStructure(RegisterID reg, Structure* structure);
    651 
    652 #if !USE(ALTERNATE_JSIMMEDIATE)
    653         void emitFastArithDeTagImmediate(RegisterID);
    654         Jump emitFastArithDeTagImmediateJumpIfZero(RegisterID);
    655 #endif
    656         void emitFastArithReTagImmediate(RegisterID src, RegisterID dest);
    657         void emitFastArithImmToInt(RegisterID);
    658         void emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest);
    659 
    660         void emitTagAsBoolImmediate(RegisterID reg);
    661749
    662750        void restoreArgumentReference();
     
    664752
    665753        Call emitNakedCall(CodePtr function = CodePtr());
     754
    666755        void preserveReturnAddressAfterCall(RegisterID);
    667756        void restoreReturnAddressBeforeReturn(RegisterID);
    668757        void restoreReturnAddressBeforeReturn(Address);
    669758
    670         void emitGetVariableObjectRegister(RegisterID variableObject, int index, RegisterID dst);
    671         void emitPutVariableObjectRegister(RegisterID src, RegisterID variableObject, int index);
    672        
    673759        void emitTimeoutCheck();
    674760#ifndef NDEBUG
    675761        void printBytecodeOperandTypes(unsigned src1, unsigned src2);
    676762#endif
    677 
    678         void killLastResultRegister();
    679 
    680763
    681764#if ENABLE(SAMPLING_FLAGS)
     
    714797        Vector<SwitchRecord> m_switches;
    715798
    716         int m_lastResultBytecodeRegister;
    717         unsigned m_jumpTargetsPosition;
    718 
    719799        unsigned m_propertyAccessInstructionIndex;
    720800        unsigned m_globalResolveInfoIndex;
    721801        unsigned m_callLinkInfoIndex;
     802
     803#if USE(JSVALUE32_64)
     804        unsigned m_jumpTargetIndex;
     805        unsigned m_mappedBytecodeIndex;
     806        unsigned m_mappedVirtualRegisterIndex;
     807        RegisterID m_mappedTag;
     808        RegisterID m_mappedPayload;
     809#else
     810        int m_lastResultBytecodeRegister;
     811        unsigned m_jumpTargetsPosition;
     812#endif
    722813    } JIT_CLASS_ALIGNMENT;
    723 
    724 }
     814} // namespace JSC
    725815
    726816#endif // ENABLE(JIT)
  • trunk/JavaScriptCore/jit/JITArithmetic.cpp

    r44889 r46598  
    4242#endif
    4343
    44 
    4544using namespace std;
    4645
    4746namespace JSC {
     47
     48#if USE(JSVALUE32_64)
     49
     50void JIT::emit_op_negate(Instruction* currentInstruction)
     51{
     52    unsigned dst = currentInstruction[1].u.operand;
     53    unsigned src = currentInstruction[2].u.operand;
     54
     55    emitLoad(src, regT1, regT0);
     56
     57    Jump srcNotInt = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
     58    addSlowCase(branch32(Equal, regT0, Imm32(0)));
     59
     60    neg32(regT0);
     61    emitStoreInt32(dst, regT0, (dst == src));
     62
     63    Jump end = jump();
     64
     65    srcNotInt.link(this);
     66    addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
     67
     68    xor32(Imm32(1 << 31), regT1);
     69    store32(regT1, tagFor(dst));
     70    if (dst != src)
     71        store32(regT0, payloadFor(dst));
     72
     73    end.link(this);
     74}
     75
     76void JIT::emitSlow_op_negate(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     77{
     78    unsigned dst = currentInstruction[1].u.operand;
     79
     80    linkSlowCase(iter); // 0 check
     81    linkSlowCase(iter); // double check
     82
     83    JITStubCall stubCall(this, cti_op_negate);
     84    stubCall.addArgument(regT1, regT0);
     85    stubCall.call(dst);
     86}
     87
     88void JIT::emit_op_jnless(Instruction* currentInstruction)
     89{
     90    unsigned op1 = currentInstruction[1].u.operand;
     91    unsigned op2 = currentInstruction[2].u.operand;
     92    unsigned target = currentInstruction[3].u.operand;
     93
     94    JumpList notInt32Op1;
     95    JumpList notInt32Op2;
     96
     97    // Int32 less.
     98    if (isOperandConstantImmediateInt(op1)) {
     99        emitLoad(op2, regT3, regT2);
     100        notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
     101        addJump(branch32(LessThanOrEqual, regT2, Imm32(getConstantOperand(op1).asInt32())), target + 3);
     102    } else if (isOperandConstantImmediateInt(op2)) {
     103        emitLoad(op1, regT1, regT0);
     104        notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     105        addJump(branch32(GreaterThanOrEqual, regT0, Imm32(getConstantOperand(op2).asInt32())), target + 3);
     106    } else {
     107        emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
     108        notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     109        notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
     110        addJump(branch32(GreaterThanOrEqual, regT0, regT2), target + 3);
     111    }
     112
     113    if (!supportsFloatingPoint()) {
     114        addSlowCase(notInt32Op1);
     115        addSlowCase(notInt32Op2);
     116        return;
     117    }
     118    Jump end = jump();
     119
     120    // Double less.
     121    emitBinaryDoubleOp(op_jnless, target, op1, op2, OperandTypes(), notInt32Op1, notInt32Op2, !isOperandConstantImmediateInt(op1), isOperandConstantImmediateInt(op1) || !isOperandConstantImmediateInt(op2));
     122    end.link(this);
     123}
     124
     125void JIT::emitSlow_op_jnless(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     126{
     127    unsigned op1 = currentInstruction[1].u.operand;
     128    unsigned op2 = currentInstruction[2].u.operand;
     129    unsigned target = currentInstruction[3].u.operand;
     130
     131    if (!supportsFloatingPoint()) {
     132        if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
     133            linkSlowCase(iter); // int32 check
     134        linkSlowCase(iter); // int32 check
     135    } else {
     136        if (!isOperandConstantImmediateInt(op1)) {
     137            linkSlowCase(iter); // double check
     138            linkSlowCase(iter); // int32 check
     139        }
     140        if (isOperandConstantImmediateInt(op1) || !isOperandConstantImmediateInt(op2))
     141            linkSlowCase(iter); // double check
     142    }
     143
     144    JITStubCall stubCall(this, cti_op_jless);
     145    stubCall.addArgument(op1);
     146    stubCall.addArgument(op2);
     147    stubCall.call();
     148    emitJumpSlowToHot(branchTest32(Zero, regT0), target + 3);
     149}
     150
     151void JIT::emit_op_jnlesseq(Instruction* currentInstruction)
     152{
     153    unsigned op1 = currentInstruction[1].u.operand;
     154    unsigned op2 = currentInstruction[2].u.operand;
     155    unsigned target = currentInstruction[3].u.operand;
     156
     157    JumpList notInt32Op1;
     158    JumpList notInt32Op2;
     159
     160    // Int32 less.
     161    if (isOperandConstantImmediateInt(op1)) {
     162        emitLoad(op2, regT3, regT2);
     163        notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
     164        addJump(branch32(LessThan, regT2, Imm32(getConstantOperand(op1).asInt32())), target + 3);
     165    } else if (isOperandConstantImmediateInt(op2)) {
     166        emitLoad(op1, regT1, regT0);
     167        notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     168        addJump(branch32(GreaterThan, regT0, Imm32(getConstantOperand(op2).asInt32())), target + 3);
     169    } else {
     170        emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
     171        notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     172        notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
     173        addJump(branch32(GreaterThan, regT0, regT2), target + 3);
     174    }
     175
     176    if (!supportsFloatingPoint()) {
     177        addSlowCase(notInt32Op1);
     178        addSlowCase(notInt32Op2);
     179        return;
     180    }
     181    Jump end = jump();
     182
     183    // Double less.
     184    emitBinaryDoubleOp(op_jnlesseq, target, op1, op2, OperandTypes(), notInt32Op1, notInt32Op2, !isOperandConstantImmediateInt(op1), isOperandConstantImmediateInt(op1) || !isOperandConstantImmediateInt(op2));
     185    end.link(this);
     186}
     187
     188void JIT::emitSlow_op_jnlesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     189{
     190    unsigned op1 = currentInstruction[1].u.operand;
     191    unsigned op2 = currentInstruction[2].u.operand;
     192    unsigned target = currentInstruction[3].u.operand;
     193
     194    if (!supportsFloatingPoint()) {
     195        if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
     196            linkSlowCase(iter); // int32 check
     197        linkSlowCase(iter); // int32 check
     198    } else {
     199        if (!isOperandConstantImmediateInt(op1)) {
     200            linkSlowCase(iter); // double check
     201            linkSlowCase(iter); // int32 check
     202        }
     203        if (isOperandConstantImmediateInt(op1) || !isOperandConstantImmediateInt(op2))
     204            linkSlowCase(iter); // double check
     205    }
     206
     207    JITStubCall stubCall(this, cti_op_jlesseq);
     208    stubCall.addArgument(op1);
     209    stubCall.addArgument(op2);
     210    stubCall.call();
     211    emitJumpSlowToHot(branchTest32(Zero, regT0), target + 3);
     212}
     213
     214// LeftShift (<<)
     215
     216void JIT::emit_op_lshift(Instruction* currentInstruction)
     217{
     218    unsigned dst = currentInstruction[1].u.operand;
     219    unsigned op1 = currentInstruction[2].u.operand;
     220    unsigned op2 = currentInstruction[3].u.operand;
     221
     222    if (isOperandConstantImmediateInt(op2)) {
     223        emitLoad(op1, regT1, regT0);
     224        addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     225        lshift32(Imm32(getConstantOperand(op2).asInt32()), regT0);
     226        emitStoreInt32(dst, regT0, dst == op1);
     227        return;
     228    }
     229
     230    emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
     231    if (!isOperandConstantImmediateInt(op1))
     232        addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     233    addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
     234    lshift32(regT2, regT0);
     235    emitStoreInt32(dst, regT0, dst == op1 || dst == op2);
     236}
     237
     238void JIT::emitSlow_op_lshift(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     239{
     240    unsigned dst = currentInstruction[1].u.operand;
     241    unsigned op1 = currentInstruction[2].u.operand;
     242    unsigned op2 = currentInstruction[3].u.operand;
     243
     244    if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
     245        linkSlowCase(iter); // int32 check
     246    linkSlowCase(iter); // int32 check
     247
     248    JITStubCall stubCall(this, cti_op_lshift);
     249    stubCall.addArgument(op1);
     250    stubCall.addArgument(op2);
     251    stubCall.call(dst);
     252}
     253
     254// RightShift (>>)
     255
     256void JIT::emit_op_rshift(Instruction* currentInstruction)
     257{
     258    unsigned dst = currentInstruction[1].u.operand;
     259    unsigned op1 = currentInstruction[2].u.operand;
     260    unsigned op2 = currentInstruction[3].u.operand;
     261
     262    if (isOperandConstantImmediateInt(op2)) {
     263        emitLoad(op1, regT1, regT0);
     264        addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     265        rshift32(Imm32(getConstantOperand(op2).asInt32()), regT0);
     266        emitStoreInt32(dst, regT0, dst == op1);
     267        return;
     268    }
     269
     270    emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
     271    if (!isOperandConstantImmediateInt(op1))
     272        addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     273    addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
     274    rshift32(regT2, regT0);
     275    emitStoreInt32(dst, regT0, dst == op1 || dst == op2);
     276}
     277
     278void JIT::emitSlow_op_rshift(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     279{
     280    unsigned dst = currentInstruction[1].u.operand;
     281    unsigned op1 = currentInstruction[2].u.operand;
     282    unsigned op2 = currentInstruction[3].u.operand;
     283
     284    if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
     285        linkSlowCase(iter); // int32 check
     286    linkSlowCase(iter); // int32 check
     287
     288    JITStubCall stubCall(this, cti_op_rshift);
     289    stubCall.addArgument(op1);
     290    stubCall.addArgument(op2);
     291    stubCall.call(dst);
     292}
     293
     294// BitAnd (&)
     295
     296void JIT::emit_op_bitand(Instruction* currentInstruction)
     297{
     298    unsigned dst = currentInstruction[1].u.operand;
     299    unsigned op1 = currentInstruction[2].u.operand;
     300    unsigned op2 = currentInstruction[3].u.operand;
     301
     302    unsigned op;
     303    int32_t constant;
     304    if (getOperandConstantImmediateInt(op1, op2, op, constant)) {
     305        emitLoad(op, regT1, regT0);
     306        addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     307        and32(Imm32(constant), regT0);
     308        emitStoreInt32(dst, regT0, (op == dst));
     309        return;
     310    }
     311
     312    emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
     313    addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     314    addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
     315    and32(regT2, regT0);
     316    emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
     317}
     318
     319void JIT::emitSlow_op_bitand(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     320{
     321    unsigned dst = currentInstruction[1].u.operand;
     322    unsigned op1 = currentInstruction[2].u.operand;
     323    unsigned op2 = currentInstruction[3].u.operand;
     324
     325    if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
     326        linkSlowCase(iter); // int32 check
     327    linkSlowCase(iter); // int32 check
     328
     329    JITStubCall stubCall(this, cti_op_bitand);
     330    stubCall.addArgument(op1);
     331    stubCall.addArgument(op2);
     332    stubCall.call(dst);
     333}
     334
     335// BitOr (|)
     336
     337void JIT::emit_op_bitor(Instruction* currentInstruction)
     338{
     339    unsigned dst = currentInstruction[1].u.operand;
     340    unsigned op1 = currentInstruction[2].u.operand;
     341    unsigned op2 = currentInstruction[3].u.operand;
     342
     343    unsigned op;
     344    int32_t constant;
     345    if (getOperandConstantImmediateInt(op1, op2, op, constant)) {
     346        emitLoad(op, regT1, regT0);
     347        addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     348        or32(Imm32(constant), regT0);
     349        emitStoreInt32(dst, regT0, (op == dst));
     350        return;
     351    }
     352
     353    emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
     354    addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     355    addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
     356    or32(regT2, regT0);
     357    emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
     358}
     359
     360void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     361{
     362    unsigned dst = currentInstruction[1].u.operand;
     363    unsigned op1 = currentInstruction[2].u.operand;
     364    unsigned op2 = currentInstruction[3].u.operand;
     365
     366    if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
     367        linkSlowCase(iter); // int32 check
     368    linkSlowCase(iter); // int32 check
     369
     370    JITStubCall stubCall(this, cti_op_bitor);
     371    stubCall.addArgument(op1);
     372    stubCall.addArgument(op2);
     373    stubCall.call(dst);
     374}
     375
     376// BitXor (^)
     377
     378void JIT::emit_op_bitxor(Instruction* currentInstruction)
     379{
     380    unsigned dst = currentInstruction[1].u.operand;
     381    unsigned op1 = currentInstruction[2].u.operand;
     382    unsigned op2 = currentInstruction[3].u.operand;
     383
     384    unsigned op;
     385    int32_t constant;
     386    if (getOperandConstantImmediateInt(op1, op2, op, constant)) {
     387        emitLoad(op, regT1, regT0);
     388        addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     389        xor32(Imm32(constant), regT0);
     390        emitStoreInt32(dst, regT0, (op == dst));
     391        return;
     392    }
     393
     394    emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
     395    addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     396    addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
     397    xor32(regT2, regT0);
     398    emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
     399}
     400
     401void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     402{
     403    unsigned dst = currentInstruction[1].u.operand;
     404    unsigned op1 = currentInstruction[2].u.operand;
     405    unsigned op2 = currentInstruction[3].u.operand;
     406
     407    if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
     408        linkSlowCase(iter); // int32 check
     409    linkSlowCase(iter); // int32 check
     410
     411    JITStubCall stubCall(this, cti_op_bitxor);
     412    stubCall.addArgument(op1);
     413    stubCall.addArgument(op2);
     414    stubCall.call(dst);
     415}
     416
     417// BitNot (~)
     418
     419void JIT::emit_op_bitnot(Instruction* currentInstruction)
     420{
     421    unsigned dst = currentInstruction[1].u.operand;
     422    unsigned src = currentInstruction[2].u.operand;
     423
     424    emitLoad(src, regT1, regT0);
     425    addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     426
     427    not32(regT0);
     428    emitStoreInt32(dst, regT0, (dst == src));
     429}
     430
     431void JIT::emitSlow_op_bitnot(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     432{
     433    unsigned dst = currentInstruction[1].u.operand;
     434
     435    linkSlowCase(iter); // int32 check
     436
     437    JITStubCall stubCall(this, cti_op_bitnot);
     438    stubCall.addArgument(regT1, regT0);
     439    stubCall.call(dst);
     440}
     441
     442// PostInc (i++)
     443
     444void JIT::emit_op_post_inc(Instruction* currentInstruction)
     445{
     446    unsigned dst = currentInstruction[1].u.operand;
     447    unsigned srcDst = currentInstruction[2].u.operand;
     448   
     449    emitLoad(srcDst, regT1, regT0);
     450    addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     451
     452    if (dst == srcDst) // x = x++ is a noop for ints.
     453        return;
     454
     455    emitStoreInt32(dst, regT0);
     456
     457    addSlowCase(branchAdd32(Overflow, Imm32(1), regT0));
     458    emitStoreInt32(srcDst, regT0, true);
     459}
     460
     461void JIT::emitSlow_op_post_inc(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     462{
     463    unsigned dst = currentInstruction[1].u.operand;
     464    unsigned srcDst = currentInstruction[2].u.operand;
     465
     466    linkSlowCase(iter); // int32 check
     467    if (dst != srcDst)
     468        linkSlowCase(iter); // overflow check
     469
     470    JITStubCall stubCall(this, cti_op_post_inc);
     471    stubCall.addArgument(srcDst);
     472    stubCall.addArgument(Imm32(srcDst));
     473    stubCall.call(dst);
     474}
     475
     476// PostDec (i--)
     477
     478void JIT::emit_op_post_dec(Instruction* currentInstruction)
     479{
     480    unsigned dst = currentInstruction[1].u.operand;
     481    unsigned srcDst = currentInstruction[2].u.operand;
     482
     483    emitLoad(srcDst, regT1, regT0);
     484    addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     485
     486    if (dst == srcDst) // x = x-- is a noop for ints.
     487        return;
     488
     489    emitStoreInt32(dst, regT0);
     490
     491    addSlowCase(branchSub32(Overflow, Imm32(1), regT0));
     492    emitStoreInt32(srcDst, regT0, true);
     493}
     494
     495void JIT::emitSlow_op_post_dec(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     496{
     497    unsigned dst = currentInstruction[1].u.operand;
     498    unsigned srcDst = currentInstruction[2].u.operand;
     499
     500    linkSlowCase(iter); // int32 check
     501    if (dst != srcDst)
     502        linkSlowCase(iter); // overflow check
     503
     504    JITStubCall stubCall(this, cti_op_post_dec);
     505    stubCall.addArgument(srcDst);
     506    stubCall.addArgument(Imm32(srcDst));
     507    stubCall.call(dst);
     508}
     509
     510// PreInc (++i)
     511
     512void JIT::emit_op_pre_inc(Instruction* currentInstruction)
     513{
     514    unsigned srcDst = currentInstruction[1].u.operand;
     515
     516    emitLoad(srcDst, regT1, regT0);
     517
     518    addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     519    addSlowCase(branchAdd32(Overflow, Imm32(1), regT0));
     520    emitStoreInt32(srcDst, regT0, true);
     521}
     522
     523void JIT::emitSlow_op_pre_inc(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     524{
     525    unsigned srcDst = currentInstruction[1].u.operand;
     526
     527    linkSlowCase(iter); // int32 check
     528    linkSlowCase(iter); // overflow check
     529
     530    JITStubCall stubCall(this, cti_op_pre_inc);
     531    stubCall.addArgument(srcDst);
     532    stubCall.call(srcDst);
     533}
     534
     535// PreDec (--i)
     536
     537void JIT::emit_op_pre_dec(Instruction* currentInstruction)
     538{
     539    unsigned srcDst = currentInstruction[1].u.operand;
     540
     541    emitLoad(srcDst, regT1, regT0);
     542
     543    addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     544    addSlowCase(branchSub32(Overflow, Imm32(1), regT0));
     545    emitStoreInt32(srcDst, regT0, true);
     546}
     547
     548void JIT::emitSlow_op_pre_dec(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     549{
     550    unsigned srcDst = currentInstruction[1].u.operand;
     551
     552    linkSlowCase(iter); // int32 check
     553    linkSlowCase(iter); // overflow check
     554
     555    JITStubCall stubCall(this, cti_op_pre_dec);
     556    stubCall.addArgument(srcDst);
     557    stubCall.call(srcDst);
     558}
     559
     560// Addition (+)
     561
     562void JIT::emit_op_add(Instruction* currentInstruction)
     563{
     564    unsigned dst = currentInstruction[1].u.operand;
     565    unsigned op1 = currentInstruction[2].u.operand;
     566    unsigned op2 = currentInstruction[3].u.operand;
     567    OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
     568
     569    JumpList notInt32Op1;
     570    JumpList notInt32Op2;
     571
     572    unsigned op;
     573    int32_t constant;
     574    if (getOperandConstantImmediateInt(op1, op2, op, constant)) {
     575        emitAdd32Constant(dst, op, constant, op == op1 ? types.first() : types.second());
     576        return;
     577    }
     578
     579    emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
     580    notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     581    notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
     582
     583    // Int32 case.
     584    addSlowCase(branchAdd32(Overflow, regT2, regT0));
     585    emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
     586
     587    if (!supportsFloatingPoint()) {
     588        addSlowCase(notInt32Op1);
     589        addSlowCase(notInt32Op2);
     590        return;
     591    }
     592    Jump end = jump();
     593
     594    // Double case.
     595    emitBinaryDoubleOp(op_add, dst, op1, op2, types, notInt32Op1, notInt32Op2);
     596    end.link(this);
     597}
     598
     599void JIT::emitAdd32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType)
     600{
     601    // Int32 case.
     602    emitLoad(op, regT1, regT0);
     603    Jump notInt32 = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
     604    addSlowCase(branchAdd32(Overflow, Imm32(constant), regT0));
     605    emitStoreInt32(dst, regT0, (op == dst));
     606
     607    // Double case.
     608    if (!supportsFloatingPoint()) {
     609        addSlowCase(notInt32);
     610        return;
     611    }
     612    Jump end = jump();
     613
     614    notInt32.link(this);
     615    if (!opType.definitelyIsNumber())
     616        addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
     617    move(Imm32(constant), regT2);
     618    convertInt32ToDouble(regT2, fpRegT0);
     619    emitLoadDouble(op, fpRegT1);
     620    addDouble(fpRegT1, fpRegT0);
     621    emitStoreDouble(dst, fpRegT0);
     622
     623    end.link(this);
     624}
     625
     626void JIT::emitSlow_op_add(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     627{
     628    unsigned dst = currentInstruction[1].u.operand;
     629    unsigned op1 = currentInstruction[2].u.operand;
     630    unsigned op2 = currentInstruction[3].u.operand;
     631    OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
     632
     633    unsigned op;
     634    int32_t constant;
     635    if (getOperandConstantImmediateInt(op1, op2, op, constant)) {
     636        linkSlowCase(iter); // overflow check
     637
     638        if (!supportsFloatingPoint()) {
     639            linkSlowCase(iter); // non-sse case
     640            return;
     641        }
     642
     643        ResultType opType = op == op1 ? types.first() : types.second();
     644        if (!opType.definitelyIsNumber())
     645            linkSlowCase(iter); // double check
     646    } else {
     647        linkSlowCase(iter); // overflow check
     648
     649        if (!supportsFloatingPoint()) {
     650            linkSlowCase(iter); // int32 check
     651            linkSlowCase(iter); // int32 check
     652        } else {
     653            if (!types.first().definitelyIsNumber())
     654                linkSlowCase(iter); // double check
     655
     656            if (!types.second().definitelyIsNumber()) {
     657                linkSlowCase(iter); // int32 check
     658                linkSlowCase(iter); // double check
     659            }
     660        }
     661    }
     662
     663    JITStubCall stubCall(this, cti_op_add);
     664    stubCall.addArgument(op1);
     665    stubCall.addArgument(op2);
     666    stubCall.call(dst);
     667}
     668
     669// Subtraction (-)
     670
     671void JIT::emit_op_sub(Instruction* currentInstruction)
     672{
     673    unsigned dst = currentInstruction[1].u.operand;
     674    unsigned op1 = currentInstruction[2].u.operand;
     675    unsigned op2 = currentInstruction[3].u.operand;
     676    OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
     677
     678    JumpList notInt32Op1;
     679    JumpList notInt32Op2;
     680
     681    if (isOperandConstantImmediateInt(op2)) {
     682        emitSub32Constant(dst, op1, getConstantOperand(op2).asInt32(), types.first());
     683        return;
     684    }
     685
     686    emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
     687    notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     688    notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
     689
     690    // Int32 case.
     691    addSlowCase(branchSub32(Overflow, regT2, regT0));
     692    emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
     693
     694    if (!supportsFloatingPoint()) {
     695        addSlowCase(notInt32Op1);
     696        addSlowCase(notInt32Op2);
     697        return;
     698    }
     699    Jump end = jump();
     700
     701    // Double case.
     702    emitBinaryDoubleOp(op_sub, dst, op1, op2, types, notInt32Op1, notInt32Op2);
     703    end.link(this);
     704}
     705
     706void JIT::emitSub32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType)
     707{
     708    // Int32 case.
     709    emitLoad(op, regT1, regT0);
     710    Jump notInt32 = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
     711    addSlowCase(branchSub32(Overflow, Imm32(constant), regT0));
     712    emitStoreInt32(dst, regT0, (op == dst));
     713
     714    // Double case.
     715    if (!supportsFloatingPoint()) {
     716        addSlowCase(notInt32);
     717        return;
     718    }
     719    Jump end = jump();
     720
     721    notInt32.link(this);
     722    if (!opType.definitelyIsNumber())
     723        addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
     724    move(Imm32(constant), regT2);
     725    convertInt32ToDouble(regT2, fpRegT0);
     726    emitLoadDouble(op, fpRegT1);
     727    subDouble(fpRegT0, fpRegT1);
     728    emitStoreDouble(dst, fpRegT1);
     729
     730    end.link(this);
     731}
     732
     733void JIT::emitSlow_op_sub(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     734{
     735    unsigned dst = currentInstruction[1].u.operand;
     736    unsigned op1 = currentInstruction[2].u.operand;
     737    unsigned op2 = currentInstruction[3].u.operand;
     738    OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
     739
     740    if (isOperandConstantImmediateInt(op2)) {
     741        linkSlowCase(iter); // overflow check
     742
     743        if (!supportsFloatingPoint() || !types.first().definitelyIsNumber())
     744            linkSlowCase(iter); // int32 or double check
     745    } else {
     746        linkSlowCase(iter); // overflow check
     747
     748        if (!supportsFloatingPoint()) {
     749            linkSlowCase(iter); // int32 check
     750            linkSlowCase(iter); // int32 check
     751        } else {
     752            if (!types.first().definitelyIsNumber())
     753                linkSlowCase(iter); // double check
     754
     755            if (!types.second().definitelyIsNumber()) {
     756                linkSlowCase(iter); // int32 check
     757                linkSlowCase(iter); // double check
     758            }
     759        }
     760    }
     761
     762    JITStubCall stubCall(this, cti_op_sub);
     763    stubCall.addArgument(op1);
     764    stubCall.addArgument(op2);
     765    stubCall.call(dst);
     766}
     767
     768void JIT::emitBinaryDoubleOp(OpcodeID opcodeID, unsigned dst, unsigned op1, unsigned op2, OperandTypes types, JumpList& notInt32Op1, JumpList& notInt32Op2, bool op1IsInRegisters, bool op2IsInRegisters)
     769{
     770    JumpList end;
     771   
     772    if (!notInt32Op1.empty()) {
     773        // Double case 1: Op1 is not int32; Op2 is unknown.
     774        notInt32Op1.link(this);
     775
     776        ASSERT(op1IsInRegisters);
     777
     778        // Verify Op1 is double.
     779        if (!types.first().definitelyIsNumber())
     780            addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
     781
     782        if (!op2IsInRegisters)
     783            emitLoad(op2, regT3, regT2);
     784
     785        Jump doubleOp2 = branch32(Below, regT3, Imm32(JSValue::LowestTag));
     786
     787        if (!types.second().definitelyIsNumber())
     788            addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
     789
     790        convertInt32ToDouble(regT2, fpRegT0);
     791        Jump doTheMath = jump();
     792
     793        // Load Op2 as double into double register.
     794        doubleOp2.link(this);
     795        emitLoadDouble(op2, fpRegT0);
     796
     797        // Do the math.
     798        doTheMath.link(this);
     799        switch (opcodeID) {
     800            case op_mul:
     801                emitLoadDouble(op1, fpRegT2);
     802                mulDouble(fpRegT2, fpRegT0);
     803                emitStoreDouble(dst, fpRegT0);
     804                break;
     805            case op_add:
     806                emitLoadDouble(op1, fpRegT2);
     807                addDouble(fpRegT2, fpRegT0);
     808                emitStoreDouble(dst, fpRegT0);
     809                break;
     810            case op_sub:
     811                emitLoadDouble(op1, fpRegT1);
     812                subDouble(fpRegT0, fpRegT1);
     813                emitStoreDouble(dst, fpRegT1);
     814                break;
     815            case op_div:
     816                emitLoadDouble(op1, fpRegT1);
     817                divDouble(fpRegT0, fpRegT1);
     818                emitStoreDouble(dst, fpRegT1);
     819                break;
     820            case op_jnless:
     821                emitLoadDouble(op1, fpRegT2);
     822                addJump(branchDouble(DoubleLessThanOrEqual, fpRegT0, fpRegT2), dst + 3);
     823                break;
     824            case op_jnlesseq:
     825                emitLoadDouble(op1, fpRegT2);
     826                addJump(branchDouble(DoubleLessThan, fpRegT0, fpRegT2), dst + 3);
     827                break;
     828            default:
     829                ASSERT_NOT_REACHED();
     830        }
     831
     832        if (!notInt32Op2.empty())
     833            end.append(jump());
     834    }
     835
     836    if (!notInt32Op2.empty()) {
     837        // Double case 2: Op1 is int32; Op2 is not int32.
     838        notInt32Op2.link(this);
     839
     840        ASSERT(op2IsInRegisters);
     841
     842        if (!op1IsInRegisters)
     843            emitLoadPayload(op1, regT0);
     844
     845        convertInt32ToDouble(regT0, fpRegT0);
     846
     847        // Verify op2 is double.
     848        if (!types.second().definitelyIsNumber())
     849            addSlowCase(branch32(Above, regT3, Imm32(JSValue::LowestTag)));
     850
     851        // Do the math.
     852        switch (opcodeID) {
     853            case op_mul:
     854                emitLoadDouble(op2, fpRegT2);
     855                mulDouble(fpRegT2, fpRegT0);
     856                emitStoreDouble(dst, fpRegT0);
     857                break;
     858            case op_add:
     859                emitLoadDouble(op2, fpRegT2);
     860                addDouble(fpRegT2, fpRegT0);
     861                emitStoreDouble(dst, fpRegT0);
     862                break;
     863            case op_sub:
     864                emitLoadDouble(op2, fpRegT2);
     865                subDouble(fpRegT2, fpRegT0);
     866                emitStoreDouble(dst, fpRegT0);
     867                break;
     868            case op_div:
     869                emitLoadDouble(op2, fpRegT2);
     870                divDouble(fpRegT2, fpRegT0);
     871                emitStoreDouble(dst, fpRegT0);
     872                break;
     873            case op_jnless:
     874                emitLoadDouble(op2, fpRegT1);
     875                addJump(branchDouble(DoubleLessThanOrEqual, fpRegT1, fpRegT0), dst + 3);
     876                break;
     877            case op_jnlesseq:
     878                emitLoadDouble(op2, fpRegT1);
     879                addJump(branchDouble(DoubleLessThan, fpRegT1, fpRegT0), dst + 3);
     880                break;
     881            default:
     882                ASSERT_NOT_REACHED();
     883        }
     884    }
     885
     886    end.link(this);
     887}
     888
     889// Multiplication (*)
     890
     891void JIT::emit_op_mul(Instruction* currentInstruction)
     892{
     893    unsigned dst = currentInstruction[1].u.operand;
     894    unsigned op1 = currentInstruction[2].u.operand;
     895    unsigned op2 = currentInstruction[3].u.operand;
     896    OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
     897
     898    JumpList notInt32Op1;
     899    JumpList notInt32Op2;
     900
     901    emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
     902    notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     903    notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
     904
     905    // Int32 case.
     906    move(regT0, regT3);
     907    addSlowCase(branchMul32(Overflow, regT2, regT0));
     908    addSlowCase(branchTest32(Zero, regT0));
     909    emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
     910
     911    if (!supportsFloatingPoint()) {
     912        addSlowCase(notInt32Op1);
     913        addSlowCase(notInt32Op2);
     914        return;
     915    }
     916    Jump end = jump();
     917
     918    // Double case.
     919    emitBinaryDoubleOp(op_mul, dst, op1, op2, types, notInt32Op1, notInt32Op2);
     920    end.link(this);
     921}
     922
     923void JIT::emitSlow_op_mul(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     924{
     925    unsigned dst = currentInstruction[1].u.operand;
     926    unsigned op1 = currentInstruction[2].u.operand;
     927    unsigned op2 = currentInstruction[3].u.operand;
     928    OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
     929
     930    Jump overflow = getSlowCase(iter); // overflow check
     931    linkSlowCase(iter); // zero result check
     932
     933    Jump negZero = branchOr32(Signed, regT2, regT3);
     934    emitStoreInt32(dst, Imm32(0), (op1 == dst || op2 == dst));
     935
     936    emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_mul));
     937
     938    negZero.link(this);
     939    overflow.link(this);
     940
     941    if (!supportsFloatingPoint()) {
     942        linkSlowCase(iter); // int32 check
     943        linkSlowCase(iter); // int32 check
     944    }
     945
     946    if (supportsFloatingPoint()) {
     947        if (!types.first().definitelyIsNumber())
     948            linkSlowCase(iter); // double check
     949
     950        if (!types.second().definitelyIsNumber()) {
     951            linkSlowCase(iter); // int32 check
     952            linkSlowCase(iter); // double check
     953        }
     954    }
     955
     956    Label jitStubCall(this);
     957    JITStubCall stubCall(this, cti_op_mul);
     958    stubCall.addArgument(op1);
     959    stubCall.addArgument(op2);
     960    stubCall.call(dst);
     961}
     962
     963// Division (/)
     964
     965void JIT::emit_op_div(Instruction* currentInstruction)
     966{
     967    unsigned dst = currentInstruction[1].u.operand;
     968    unsigned op1 = currentInstruction[2].u.operand;
     969    unsigned op2 = currentInstruction[3].u.operand;
     970    OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
     971
     972    if (!supportsFloatingPoint()) {
     973        addSlowCase(jump());
     974        return;
     975    }
     976
     977    // Int32 divide.
     978    JumpList notInt32Op1;
     979    JumpList notInt32Op2;
     980
     981    JumpList end;
     982
     983    emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
     984
     985    notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     986    notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
     987
     988    convertInt32ToDouble(regT0, fpRegT0);
     989    convertInt32ToDouble(regT2, fpRegT1);
     990    divDouble(fpRegT1, fpRegT0);
     991
     992    JumpList doubleResult;
     993    if (!isOperandConstantImmediateInt(op1) || getConstantOperand(op1).asInt32() > 1) {
     994        m_assembler.cvttsd2si_rr(fpRegT0, regT0);
     995        convertInt32ToDouble(regT0, fpRegT1);
     996        m_assembler.ucomisd_rr(fpRegT1, fpRegT0);
     997
     998        doubleResult.append(m_assembler.jne());
     999        doubleResult.append(m_assembler.jp());
     1000       
     1001        doubleResult.append(branchTest32(Zero, regT0));
     1002
     1003        // Int32 result.
     1004        emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
     1005        end.append(jump());
     1006    }
     1007
     1008    // Double result.
     1009    doubleResult.link(this);
     1010    emitStoreDouble(dst, fpRegT0);
     1011    end.append(jump());
     1012
     1013    // Double divide.
     1014    emitBinaryDoubleOp(op_div, dst, op1, op2, types, notInt32Op1, notInt32Op2);
     1015    end.link(this);
     1016}
     1017
     1018void JIT::emitSlow_op_div(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     1019{
     1020    unsigned dst = currentInstruction[1].u.operand;
     1021    unsigned op1 = currentInstruction[2].u.operand;
     1022    unsigned op2 = currentInstruction[3].u.operand;
     1023    OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
     1024
     1025    if (!supportsFloatingPoint())
     1026        linkSlowCase(iter);
     1027    else {
     1028        if (!types.first().definitelyIsNumber())
     1029            linkSlowCase(iter); // double check
     1030
     1031        if (!types.second().definitelyIsNumber()) {
     1032            linkSlowCase(iter); // int32 check
     1033            linkSlowCase(iter); // double check
     1034        }
     1035    }
     1036
     1037    JITStubCall stubCall(this, cti_op_div);
     1038    stubCall.addArgument(op1);
     1039    stubCall.addArgument(op2);
     1040    stubCall.call(dst);
     1041}
     1042
     1043// Mod (%)
     1044
     1045/* ------------------------------ BEGIN: OP_MOD ------------------------------ */
     1046
     1047#if PLATFORM(X86) || PLATFORM(X86_64)
     1048
     1049void JIT::emit_op_mod(Instruction* currentInstruction)
     1050{
     1051    unsigned dst = currentInstruction[1].u.operand;
     1052    unsigned op1 = currentInstruction[2].u.operand;
     1053    unsigned op2 = currentInstruction[3].u.operand;
     1054
     1055    if (isOperandConstantImmediateInt(op2) && getConstantOperand(op2).asInt32() != 0) {
     1056        emitLoad(op1, X86::edx, X86::eax);
     1057        move(Imm32(getConstantOperand(op2).asInt32()), X86::ecx);
     1058        addSlowCase(branch32(NotEqual, X86::edx, Imm32(JSValue::Int32Tag)));
     1059        if (getConstantOperand(op2).asInt32() == -1)
     1060            addSlowCase(branch32(Equal, X86::eax, Imm32(0x80000000))); // -2147483648 / -1 => EXC_ARITHMETIC
     1061    } else {
     1062        emitLoad2(op1, X86::edx, X86::eax, op2, X86::ebx, X86::ecx);
     1063        addSlowCase(branch32(NotEqual, X86::edx, Imm32(JSValue::Int32Tag)));
     1064        addSlowCase(branch32(NotEqual, X86::ebx, Imm32(JSValue::Int32Tag)));
     1065
     1066        addSlowCase(branch32(Equal, X86::eax, Imm32(0x80000000))); // -2147483648 / -1 => EXC_ARITHMETIC
     1067        addSlowCase(branch32(Equal, X86::ecx, Imm32(0))); // divide by 0
     1068    }
     1069
     1070    move(X86::eax, X86::ebx); // Save dividend payload, in case of 0.
     1071    m_assembler.cdq();
     1072    m_assembler.idivl_r(X86::ecx);
     1073   
     1074    // If the remainder is zero and the dividend is negative, the result is -0.
     1075    Jump storeResult1 = branchTest32(NonZero, X86::edx);
     1076    Jump storeResult2 = branchTest32(Zero, X86::ebx, Imm32(0x80000000)); // not negative
     1077    emitStore(dst, jsNumber(m_globalData, -0.0));
     1078    Jump end = jump();
     1079
     1080    storeResult1.link(this);
     1081    storeResult2.link(this);
     1082    emitStoreInt32(dst, X86::edx, (op1 == dst || op2 == dst));
     1083    end.link(this);
     1084}
     1085
     1086void JIT::emitSlow_op_mod(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     1087{
     1088    unsigned dst = currentInstruction[1].u.operand;
     1089    unsigned op1 = currentInstruction[2].u.operand;
     1090    unsigned op2 = currentInstruction[3].u.operand;
     1091
     1092    if (isOperandConstantImmediateInt(op2) && getConstantOperand(op2).asInt32() != 0) {
     1093        linkSlowCase(iter); // int32 check
     1094        if (getConstantOperand(op2).asInt32() == -1)
     1095            linkSlowCase(iter); // 0x80000000 check
     1096    } else {
     1097        linkSlowCase(iter); // int32 check
     1098        linkSlowCase(iter); // int32 check
     1099        linkSlowCase(iter); // 0 check
     1100        linkSlowCase(iter); // 0x80000000 check
     1101    }
     1102
     1103    JITStubCall stubCall(this, cti_op_mod);
     1104    stubCall.addArgument(op1);
     1105    stubCall.addArgument(op2);
     1106    stubCall.call(dst);
     1107}
     1108
     1109#else // PLATFORM(X86) || PLATFORM(X86_64)
     1110
     1111void JIT::emit_op_mod(Instruction* currentInstruction)
     1112{
     1113    unsigned dst = currentInstruction[1].u.operand;
     1114    unsigned op1 = currentInstruction[2].u.operand;
     1115    unsigned op2 = currentInstruction[3].u.operand;
     1116
     1117    JITStubCall stubCall(this, cti_op_mod);
     1118    stubCall.addArgument(op1);
     1119    stubCall.addArgument(op2);
     1120    stubCall.call(dst);
     1121}
     1122
     1123void JIT::emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&)
     1124{
     1125}
     1126
     1127#endif // PLATFORM(X86) || PLATFORM(X86_64)
     1128
     1129/* ------------------------------ END: OP_MOD ------------------------------ */
     1130
     1131#else // USE(JSVALUE32_64)
    481132
    491133void JIT::emit_op_lshift(Instruction* currentInstruction)
     
    651149#endif
    661150    lshift32(regT2, regT0);
    67 #if !USE(ALTERNATE_JSIMMEDIATE)
     1151#if !USE(JSVALUE64)
    681152    addSlowCase(branchAdd32(Overflow, regT0, regT0));
    691153    signExtend32ToPtr(regT0, regT0);
     
    791163    unsigned op2 = currentInstruction[3].u.operand;
    801164
    81 #if USE(ALTERNATE_JSIMMEDIATE)
     1165#if USE(JSVALUE64)
    821166    UNUSED_PARAM(op1);
    831167    UNUSED_PARAM(op2);
     
    931177    notImm2.link(this);
    941178#endif
    95     JITStubCall stubCall(this, JITStubs::cti_op_lshift);
     1179    JITStubCall stubCall(this, cti_op_lshift);
    961180    stubCall.addArgument(regT0);
    971181    stubCall.addArgument(regT2);
     
    1101194        emitJumpSlowCaseIfNotImmediateInteger(regT0);
    1111195        // Mask with 0x1f as per ecma-262 11.7.2 step 7.
    112 #if USE(ALTERNATE_JSIMMEDIATE)
     1196#if USE(JSVALUE64)
    1131197        rshift32(Imm32(getConstantOperandImmediateInt(op2) & 0x1f), regT0);
    1141198#else
     
    1191203        if (supportsFloatingPointTruncate()) {
    1201204            Jump lhsIsInt = emitJumpIfImmediateInteger(regT0);
    121 #if USE(ALTERNATE_JSIMMEDIATE)
    122             // supportsFloatingPoint() && USE(ALTERNATE_JSIMMEDIATE) => 3 SlowCases
     1205#if USE(JSVALUE64)
     1206            // supportsFloatingPoint() && USE(JSVALUE64) => 3 SlowCases
    1231207            addSlowCase(emitJumpIfNotImmediateNumber(regT0));
    1241208            addPtr(tagTypeNumberRegister, regT0);
     
    1261210            addSlowCase(branchTruncateDoubleToInt32(fpRegT0, regT0));
    1271211#else
    128             // supportsFloatingPoint() && !USE(ALTERNATE_JSIMMEDIATE) => 5 SlowCases (of which 1 IfNotJSCell)
     1212            // supportsFloatingPoint() && !USE(JSVALUE64) => 5 SlowCases (of which 1 IfNotJSCell)
    1291213            emitJumpSlowCaseIfNotJSCell(regT0, op1);
    1301214            addSlowCase(checkStructure(regT0, m_globalData->numberStructure.get()));
     
    1461230        and32(Imm32(0x1f), regT2);
    1471231#endif
    148 #if USE(ALTERNATE_JSIMMEDIATE)
     1232#if USE(JSVALUE64)
    1491233        rshift32(regT2, regT0);
    1501234#else
     
    1521236#endif
    1531237    }
    154 #if USE(ALTERNATE_JSIMMEDIATE)
     1238#if USE(JSVALUE64)
    1551239    emitFastArithIntToImmNoCheck(regT0, regT0);
    1561240#else
     
    1661250    unsigned op2 = currentInstruction[3].u.operand;
    1671251
    168     JITStubCall stubCall(this, JITStubs::cti_op_rshift);
     1252    JITStubCall stubCall(this, cti_op_rshift);
    1691253
    1701254    if (isOperandConstantImmediateInt(op2)) {
     
    1741258    } else {
    1751259        if (supportsFloatingPointTruncate()) {
    176 #if USE(ALTERNATE_JSIMMEDIATE)
     1260#if USE(JSVALUE64)
    1771261            linkSlowCase(iter);
    1781262            linkSlowCase(iter);
     
    2151299        emitGetVirtualRegister(op1, regT0);
    2161300        emitJumpSlowCaseIfNotImmediateInteger(regT0);
    217 #if USE(ALTERNATE_JSIMMEDIATE)
     1301#if USE(JSVALUE64)
    2181302        int32_t op2imm = getConstantOperandImmediateInt(op2);
    2191303#else
     
    2241308        emitGetVirtualRegister(op2, regT1);
    2251309        emitJumpSlowCaseIfNotImmediateInteger(regT1);
    226 #if USE(ALTERNATE_JSIMMEDIATE)
     1310#if USE(JSVALUE64)
    2271311        int32_t op1imm = getConstantOperandImmediateInt(op1);
    2281312#else
     
    2541338
    2551339        if (supportsFloatingPoint()) {
    256 #if USE(ALTERNATE_JSIMMEDIATE)
     1340#if USE(JSVALUE64)
    2571341            Jump fail1 = emitJumpIfNotImmediateNumber(regT0);
    2581342            addPtr(tagTypeNumberRegister, regT0);
     
    2671351#endif
    2681352           
    269             int32_t op2imm = getConstantOperand(op2).getInt32Fast();;
     1353            int32_t op2imm = getConstantOperand(op2).asInt32();;
    2701354                   
    2711355            move(Imm32(op2imm), regT1);
     
    2761360            emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jnless));
    2771361
    278 #if USE(ALTERNATE_JSIMMEDIATE)
     1362#if USE(JSVALUE64)
    2791363            fail1.link(this);
    2801364#else
     
    2851369        }
    2861370
    287         JITStubCall stubCall(this, JITStubs::cti_op_jless);
     1371        JITStubCall stubCall(this, cti_op_jless);
    2881372        stubCall.addArgument(regT0);
    2891373        stubCall.addArgument(op2, regT2);
     
    2951379
    2961380        if (supportsFloatingPoint()) {
    297 #if USE(ALTERNATE_JSIMMEDIATE)
     1381#if USE(JSVALUE64)
    2981382            Jump fail1 = emitJumpIfNotImmediateNumber(regT1);
    2991383            addPtr(tagTypeNumberRegister, regT1);
     
    3081392#endif
    3091393           
    310             int32_t op1imm = getConstantOperand(op1).getInt32Fast();;
     1394            int32_t op1imm = getConstantOperand(op1).asInt32();;
    3111395                   
    3121396            move(Imm32(op1imm), regT0);
     
    3171401            emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jnless));
    3181402
    319 #if USE(ALTERNATE_JSIMMEDIATE)
     1403#if USE(JSVALUE64)
    3201404            fail1.link(this);
    3211405#else
     
    3261410        }
    3271411
    328         JITStubCall stubCall(this, JITStubs::cti_op_jless);
     1412        JITStubCall stubCall(this, cti_op_jless);
    3291413        stubCall.addArgument(op1, regT2);
    3301414        stubCall.addArgument(regT1);
     
    3361420
    3371421        if (supportsFloatingPoint()) {
    338 #if USE(ALTERNATE_JSIMMEDIATE)
     1422#if USE(JSVALUE64)
    3391423            Jump fail1 = emitJumpIfNotImmediateNumber(regT0);
    3401424            Jump fail2 = emitJumpIfNotImmediateNumber(regT1);
     
    3631447            emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jnless));
    3641448
    365 #if USE(ALTERNATE_JSIMMEDIATE)
     1449#if USE(JSVALUE64)
    3661450            fail1.link(this);
    3671451            fail2.link(this);
     
    3781462
    3791463        linkSlowCase(iter);
    380         JITStubCall stubCall(this, JITStubs::cti_op_jless);
     1464        JITStubCall stubCall(this, cti_op_jless);
    3811465        stubCall.addArgument(regT0);
    3821466        stubCall.addArgument(regT1);
     
    4001484        emitGetVirtualRegister(op1, regT0);
    4011485        emitJumpSlowCaseIfNotImmediateInteger(regT0);
    402 #if USE(ALTERNATE_JSIMMEDIATE)
     1486#if USE(JSVALUE64)
    4031487        int32_t op2imm = getConstantOperandImmediateInt(op2);
    4041488#else
     
    4091493        emitGetVirtualRegister(op2, regT1);
    4101494        emitJumpSlowCaseIfNotImmediateInteger(regT1);
    411 #if USE(ALTERNATE_JSIMMEDIATE)
     1495#if USE(JSVALUE64)
    4121496        int32_t op1imm = getConstantOperandImmediateInt(op1);
    4131497#else
     
    4391523
    4401524        if (supportsFloatingPoint()) {
    441 #if USE(ALTERNATE_JSIMMEDIATE)
     1525#if USE(JSVALUE64)
    4421526            Jump fail1 = emitJumpIfNotImmediateNumber(regT0);
    4431527            addPtr(tagTypeNumberRegister, regT0);
     
    4521536#endif
    4531537           
    454             int32_t op2imm = getConstantOperand(op2).getInt32Fast();;
     1538            int32_t op2imm = getConstantOperand(op2).asInt32();;
    4551539                   
    4561540            move(Imm32(op2imm), regT1);
     
    4611545            emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jnlesseq));
    4621546
    463 #if USE(ALTERNATE_JSIMMEDIATE)
     1547#if USE(JSVALUE64)
    4641548            fail1.link(this);
    4651549#else
     
    4701554        }
    4711555
    472         JITStubCall stubCall(this, JITStubs::cti_op_jlesseq);
     1556        JITStubCall stubCall(this, cti_op_jlesseq);
    4731557        stubCall.addArgument(regT0);
    4741558        stubCall.addArgument(op2, regT2);
     
    4801564
    4811565        if (supportsFloatingPoint()) {
    482 #if USE(ALTERNATE_JSIMMEDIATE)
     1566#if USE(JSVALUE64)
    4831567            Jump fail1 = emitJumpIfNotImmediateNumber(regT1);
    4841568            addPtr(tagTypeNumberRegister, regT1);
     
    4931577#endif
    4941578           
    495             int32_t op1imm = getConstantOperand(op1).getInt32Fast();;
     1579            int32_t op1imm = getConstantOperand(op1).asInt32();;
    4961580                   
    4971581            move(Imm32(op1imm), regT0);
     
    5021586            emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jnlesseq));
    5031587
    504 #if USE(ALTERNATE_JSIMMEDIATE)
     1588#if USE(JSVALUE64)
    5051589            fail1.link(this);
    5061590#else
     
    5111595        }
    5121596
    513         JITStubCall stubCall(this, JITStubs::cti_op_jlesseq);
     1597        JITStubCall stubCall(this, cti_op_jlesseq);
    5141598        stubCall.addArgument(op1, regT2);
    5151599        stubCall.addArgument(regT1);
     
    5211605
    5221606        if (supportsFloatingPoint()) {
    523 #if USE(ALTERNATE_JSIMMEDIATE)
     1607#if USE(JSVALUE64)
    5241608            Jump fail1 = emitJumpIfNotImmediateNumber(regT0);
    5251609            Jump fail2 = emitJumpIfNotImmediateNumber(regT1);
     
    5481632            emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jnlesseq));
    5491633
    550 #if USE(ALTERNATE_JSIMMEDIATE)
     1634#if USE(JSVALUE64)
    5511635            fail1.link(this);
    5521636            fail2.link(this);
     
    5631647
    5641648        linkSlowCase(iter);
    565         JITStubCall stubCall(this, JITStubs::cti_op_jlesseq);
     1649        JITStubCall stubCall(this, cti_op_jlesseq);
    5661650        stubCall.addArgument(regT0);
    5671651        stubCall.addArgument(regT1);
     
    5801664        emitGetVirtualRegister(op2, regT0);
    5811665        emitJumpSlowCaseIfNotImmediateInteger(regT0);
    582 #if USE(ALTERNATE_JSIMMEDIATE)
     1666#if USE(JSVALUE64)
    5831667        int32_t imm = getConstantOperandImmediateInt(op1);
    5841668        andPtr(Imm32(imm), regT0);
     
    5911675        emitGetVirtualRegister(op1, regT0);
    5921676        emitJumpSlowCaseIfNotImmediateInteger(regT0);
    593 #if USE(ALTERNATE_JSIMMEDIATE)
     1677#if USE(JSVALUE64)
    5941678        int32_t imm = getConstantOperandImmediateInt(op2);
    5951679        andPtr(Imm32(imm), regT0);
     
    6151699    linkSlowCase(iter);
    6161700    if (isOperandConstantImmediateInt(op1)) {
    617         JITStubCall stubCall(this, JITStubs::cti_op_bitand);
     1701        JITStubCall stubCall(this, cti_op_bitand);
    6181702        stubCall.addArgument(op1, regT2);
    6191703        stubCall.addArgument(regT0);
    6201704        stubCall.call(result);
    6211705    } else if (isOperandConstantImmediateInt(op2)) {
    622         JITStubCall stubCall(this, JITStubs::cti_op_bitand);
     1706        JITStubCall stubCall(this, cti_op_bitand);
    6231707        stubCall.addArgument(regT0);
    6241708        stubCall.addArgument(op2, regT2);
    6251709        stubCall.call(result);
    6261710    } else {
    627         JITStubCall stubCall(this, JITStubs::cti_op_bitand);
     1711        JITStubCall stubCall(this, cti_op_bitand);
    6281712        stubCall.addArgument(op1, regT2);
    6291713        stubCall.addArgument(regT1);
     
    6401724    move(regT0, regT1);
    6411725    emitJumpSlowCaseIfNotImmediateInteger(regT0);
    642 #if USE(ALTERNATE_JSIMMEDIATE)
     1726#if USE(JSVALUE64)
    6431727    addSlowCase(branchAdd32(Overflow, Imm32(1), regT1));
    6441728    emitFastArithIntToImmNoCheck(regT1, regT1);
     
    6581742    linkSlowCase(iter);
    6591743    linkSlowCase(iter);
    660     JITStubCall stubCall(this, JITStubs::cti_op_post_inc);
     1744    JITStubCall stubCall(this, cti_op_post_inc);
    6611745    stubCall.addArgument(regT0);
    6621746    stubCall.addArgument(Imm32(srcDst));
     
    6721756    move(regT0, regT1);
    6731757    emitJumpSlowCaseIfNotImmediateInteger(regT0);
    674 #if USE(ALTERNATE_JSIMMEDIATE)
     1758#if USE(JSVALUE64)
    6751759    addSlowCase(branchSub32(Zero, Imm32(1), regT1));
    6761760    emitFastArithIntToImmNoCheck(regT1, regT1);
     
    6901774    linkSlowCase(iter);
    6911775    linkSlowCase(iter);
    692     JITStubCall stubCall(this, JITStubs::cti_op_post_dec);
     1776    JITStubCall stubCall(this, cti_op_post_dec);
    6931777    stubCall.addArgument(regT0);
    6941778    stubCall.addArgument(Imm32(srcDst));
     
    7021786    emitGetVirtualRegister(srcDst, regT0);
    7031787    emitJumpSlowCaseIfNotImmediateInteger(regT0);
    704 #if USE(ALTERNATE_JSIMMEDIATE)
     1788#if USE(JSVALUE64)
    7051789    addSlowCase(branchAdd32(Overflow, Imm32(1), regT0));
    7061790    emitFastArithIntToImmNoCheck(regT0, regT0);
     
    7201804    emitGetVirtualRegister(srcDst, regT0);
    7211805    notImm.link(this);
    722     JITStubCall stubCall(this, JITStubs::cti_op_pre_inc);
     1806    JITStubCall stubCall(this, cti_op_pre_inc);
    7231807    stubCall.addArgument(regT0);
    7241808    stubCall.call(srcDst);
     
    7311815    emitGetVirtualRegister(srcDst, regT0);
    7321816    emitJumpSlowCaseIfNotImmediateInteger(regT0);
    733 #if USE(ALTERNATE_JSIMMEDIATE)
     1817#if USE(JSVALUE64)
    7341818    addSlowCase(branchSub32(Zero, Imm32(1), regT0));
    7351819    emitFastArithIntToImmNoCheck(regT0, regT0);
     
    7491833    emitGetVirtualRegister(srcDst, regT0);
    7501834    notImm.link(this);
    751     JITStubCall stubCall(this, JITStubs::cti_op_pre_dec);
     1835    JITStubCall stubCall(this, cti_op_pre_dec);
    7521836    stubCall.addArgument(regT0);
    7531837    stubCall.call(srcDst);
     
    7671851    emitJumpSlowCaseIfNotImmediateInteger(X86::eax);
    7681852    emitJumpSlowCaseIfNotImmediateInteger(X86::ecx);
    769 #if USE(ALTERNATE_JSIMMEDIATE)
     1853#if USE(JSVALUE64)
    7701854    addSlowCase(branchPtr(Equal, X86::ecx, ImmPtr(JSValue::encode(jsNumber(m_globalData, 0)))));
    7711855    m_assembler.cdq();
     
    7861870    unsigned result = currentInstruction[1].u.operand;
    7871871
    788 #if USE(ALTERNATE_JSIMMEDIATE)
     1872#if USE(JSVALUE64)
    7891873    linkSlowCase(iter);
    7901874    linkSlowCase(iter);
     
    7991883    notImm2.link(this);
    8001884#endif
    801     JITStubCall stubCall(this, JITStubs::cti_op_mod);
     1885    JITStubCall stubCall(this, cti_op_mod);
    8021886    stubCall.addArgument(X86::eax);
    8031887    stubCall.addArgument(X86::ecx);
     
    8131897    unsigned op2 = currentInstruction[3].u.operand;
    8141898
    815     JITStubCall stubCall(this, JITStubs::cti_op_mod);
     1899    JITStubCall stubCall(this, cti_op_mod);
    8161900    stubCall.addArgument(op1, regT2);
    8171901    stubCall.addArgument(op2, regT2);
     
    8281912/* ------------------------------ END: OP_MOD ------------------------------ */
    8291913
    830 #if !ENABLE(JIT_OPTIMIZE_ARITHMETIC)
    831 
    832 /* ------------------------------ BEGIN: !ENABLE(JIT_OPTIMIZE_ARITHMETIC) (OP_ADD, OP_SUB, OP_MUL) ------------------------------ */
    833 
    834 void JIT::emit_op_add(Instruction* currentInstruction)
    835 {
    836     unsigned result = currentInstruction[1].u.operand;
    837     unsigned op1 = currentInstruction[2].u.operand;
    838     unsigned op2 = currentInstruction[3].u.operand;
    839 
    840     JITStubCall stubCall(this, JITStubs::cti_op_add);
    841     stubCall.addArgument(op1, regT2);
    842     stubCall.addArgument(op2, regT2);
    843     stubCall.call(result);
    844 }
    845 
    846 void JIT::emitSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&)
    847 {
    848     ASSERT_NOT_REACHED();
    849 }
    850 
    851 void JIT::emit_op_mul(Instruction* currentInstruction)
    852 {
    853     unsigned result = currentInstruction[1].u.operand;
    854     unsigned op1 = currentInstruction[2].u.operand;
    855     unsigned op2 = currentInstruction[3].u.operand;
    856 
    857     JITStubCall stubCall(this, JITStubs::cti_op_mul);
    858     stubCall.addArgument(op1, regT2);
    859     stubCall.addArgument(op2, regT2);
    860     stubCall.call(result);
    861 }
    862 
    863 void JIT::emitSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&)
    864 {
    865     ASSERT_NOT_REACHED();
    866 }
    867 
    868 void JIT::emit_op_sub(Instruction* currentInstruction)
    869 {
    870     unsigned result = currentInstruction[1].u.operand;
    871     unsigned op1 = currentInstruction[2].u.operand;
    872     unsigned op2 = currentInstruction[3].u.operand;
    873 
    874     JITStubCall stubCall(this, JITStubs::cti_op_sub);
    875     stubCall.addArgument(op1, regT2);
    876     stubCall.addArgument(op2, regT2);
    877     stubCall.call(result);
    878 }
    879 
    880 void JIT::emitSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&)
    881 {
    882     ASSERT_NOT_REACHED();
    883 }
    884 
    885 #elif USE(ALTERNATE_JSIMMEDIATE) // *AND* ENABLE(JIT_OPTIMIZE_ARITHMETIC)
    886 
    887 /* ------------------------------ BEGIN: USE(ALTERNATE_JSIMMEDIATE) (OP_ADD, OP_SUB, OP_MUL) ------------------------------ */
     1914#if USE(JSVALUE64)
     1915
     1916/* ------------------------------ BEGIN: USE(JSVALUE64) (OP_ADD, OP_SUB, OP_MUL) ------------------------------ */
    8881917
    8891918void JIT::compileBinaryArithOp(OpcodeID opcodeID, unsigned, unsigned op1, unsigned op2, OperandTypes)
     
    9181947
    9191948    Label stubFunctionCall(this);
    920     JITStubCall stubCall(this, opcodeID == op_add ? JITStubs::cti_op_add : opcodeID == op_sub ? JITStubs::cti_op_sub : JITStubs::cti_op_mul);
     1949    JITStubCall stubCall(this, opcodeID == op_add ? cti_op_add : opcodeID == op_sub ? cti_op_sub : cti_op_mul);
    9211950    stubCall.addArgument(regT0);
    9221951    stubCall.addArgument(regT1);
     
    9691998
    9701999    if (!types.first().mightBeNumber() || !types.second().mightBeNumber()) {
    971         JITStubCall stubCall(this, JITStubs::cti_op_add);
     2000        JITStubCall stubCall(this, cti_op_add);
    9722001        stubCall.addArgument(op1, regT2);
    9732002        stubCall.addArgument(op2, regT2);
     
    10012030        linkSlowCase(iter);
    10022031        linkSlowCase(iter);
    1003         JITStubCall stubCall(this, JITStubs::cti_op_add);
     2032        JITStubCall stubCall(this, cti_op_add);
    10042033        stubCall.addArgument(op1, regT2);
    10052034        stubCall.addArgument(op2, regT2);
     
    10462075        linkSlowCase(iter);
    10472076        // There is an extra slow case for (op1 * -N) or (-N * op2), to check for 0 since this should produce a result of -0.
    1048         JITStubCall stubCall(this, JITStubs::cti_op_mul);
     2077        JITStubCall stubCall(this, cti_op_mul);
    10492078        stubCall.addArgument(op1, regT2);
    10502079        stubCall.addArgument(op2, regT2);
     
    10762105}
    10772106
    1078 #else // !ENABLE(JIT_OPTIMIZE_ARITHMETIC)
    1079 
    1080 /* ------------------------------ BEGIN: !ENABLE(JIT_OPTIMIZE_ARITHMETIC) (OP_ADD, OP_SUB, OP_MUL) ------------------------------ */
     2107#else // USE(JSVALUE64)
     2108
     2109/* ------------------------------ BEGIN: !USE(JSVALUE64) (OP_ADD, OP_SUB, OP_MUL) ------------------------------ */
    10812110
    10822111void JIT::compileBinaryArithOp(OpcodeID opcodeID, unsigned dst, unsigned src1, unsigned src2, OperandTypes types)
     
    12452274        linkSlowCase(iter);
    12462275
    1247     JITStubCall stubCall(this, opcodeID == op_add ? JITStubs::cti_op_add : opcodeID == op_sub ? JITStubs::cti_op_sub : JITStubs::cti_op_mul);
     2276    JITStubCall stubCall(this, opcodeID == op_add ? cti_op_add : opcodeID == op_sub ? cti_op_sub : cti_op_mul);
    12482277    stubCall.addArgument(src1, regT2);
    12492278    stubCall.addArgument(src2, regT2);
     
    12742303            compileBinaryArithOp(op_add, result, op1, op2, OperandTypes::fromInt(currentInstruction[4].u.operand));
    12752304        else {
    1276             JITStubCall stubCall(this, JITStubs::cti_op_add);
     2305            JITStubCall stubCall(this, cti_op_add);
    12772306            stubCall.addArgument(op1, regT2);
    12782307            stubCall.addArgument(op2, regT2);
     
    12932322        sub32(Imm32(getConstantOperandImmediateInt(op1) << JSImmediate::IntegerPayloadShift), regT0);
    12942323        notImm.link(this);
    1295         JITStubCall stubCall(this, JITStubs::cti_op_add);
     2324        JITStubCall stubCall(this, cti_op_add);
    12962325        stubCall.addArgument(op1, regT2);
    12972326        stubCall.addArgument(regT0);
     
    13022331        sub32(Imm32(getConstantOperandImmediateInt(op2) << JSImmediate::IntegerPayloadShift), regT0);
    13032332        notImm.link(this);
    1304         JITStubCall stubCall(this, JITStubs::cti_op_add);
     2333        JITStubCall stubCall(this, cti_op_add);
    13052334        stubCall.addArgument(regT0);
    13062335        stubCall.addArgument(op2, regT2);
     
    13522381        linkSlowCase(iter);
    13532382        // There is an extra slow case for (op1 * -N) or (-N * op2), to check for 0 since this should produce a result of -0.
    1354         JITStubCall stubCall(this, JITStubs::cti_op_mul);
     2383        JITStubCall stubCall(this, cti_op_mul);
    13552384        stubCall.addArgument(op1, regT2);
    13562385        stubCall.addArgument(op2, regT2);
     
    13702399}
    13712400
    1372 #endif // !ENABLE(JIT_OPTIMIZE_ARITHMETIC)
     2401#endif // USE(JSVALUE64)
    13732402
    13742403/* ------------------------------ END: OP_ADD, OP_SUB, OP_MUL ------------------------------ */
    13752404
     2405#endif // USE(JSVALUE32_64)
     2406
    13762407} // namespace JSC
    13772408
  • trunk/JavaScriptCore/jit/JITCall.cpp

    r44889 r46598  
    4646namespace JSC {
    4747
     48#if USE(JSVALUE32_64)
     49
     50void JIT::compileOpCallInitializeCallFrame()
     51{
     52    // regT0 holds callee, regT1 holds argCount
     53    store32(regT1, Address(callFrameRegister, RegisterFile::ArgumentCount * static_cast<int>(sizeof(Register))));
     54
     55    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_data) + OBJECT_OFFSETOF(ScopeChain, m_node)), regT1); // scopeChain
     56
     57    emitStore(static_cast<unsigned>(RegisterFile::OptionalCalleeArguments), JSValue());
     58    storePtr(regT0, Address(callFrameRegister, RegisterFile::Callee * static_cast<int>(sizeof(Register)))); // callee
     59    storePtr(regT1, Address(callFrameRegister, RegisterFile::ScopeChain * static_cast<int>(sizeof(Register)))); // scopeChain
     60}
     61
     62void JIT::compileOpCallSetupArgs(Instruction* instruction)
     63{
     64    int argCount = instruction[3].u.operand;
     65    int registerOffset = instruction[4].u.operand;
     66
     67    emitPutJITStubArg(regT0, 1);
     68    emitPutJITStubArg(regT1, 2);
     69    emitPutJITStubArgConstant(registerOffset, 3);
     70    emitPutJITStubArgConstant(argCount, 5);
     71}
     72         
     73void JIT::compileOpConstructSetupArgs(Instruction* instruction)
     74{
     75    int argCount = instruction[3].u.operand;
     76    int registerOffset = instruction[4].u.operand;
     77    int proto = instruction[5].u.operand;
     78    int thisRegister = instruction[6].u.operand;
     79
     80    emitPutJITStubArg(regT0, 1);
     81    emitPutJITStubArg(regT1, 2);
     82    emitPutJITStubArgConstant(registerOffset, 3);
     83    emitPutJITStubArgConstant(argCount, 5);
     84    emitPutJITStubArgFromVirtualRegister(proto, 7, regT2, regT3);
     85    emitPutJITStubArgConstant(thisRegister, 9);
     86}
     87
     88void JIT::compileOpCallVarargsSetupArgs(Instruction*)
     89{
     90    emitPutJITStubArg(regT0, 1);
     91    emitPutJITStubArg(regT1, 2);
     92    emitPutJITStubArg(regT3, 3); // registerOffset
     93    emitPutJITStubArg(regT2, 5); // argCount
     94}
     95
     96void JIT::compileOpCallVarargs(Instruction* instruction)
     97{
     98    int dst = instruction[1].u.operand;
     99    int callee = instruction[2].u.operand;
     100    int argCountRegister = instruction[3].u.operand;
     101    int registerOffset = instruction[4].u.operand;
     102
     103    emitLoad(callee, regT1, regT0);
     104    emitLoadPayload(argCountRegister, regT2); // argCount
     105    addPtr(Imm32(registerOffset), regT2, regT3); // registerOffset
     106
     107    compileOpCallVarargsSetupArgs(instruction);
     108
     109    emitJumpSlowCaseIfNotJSCell(callee, regT1);
     110    addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsFunctionVPtr)));
     111
     112    // Speculatively roll the callframe, assuming argCount will match the arity.
     113    mul32(Imm32(sizeof(Register)), regT3, regT3);
     114    addPtr(callFrameRegister, regT3);
     115    storePtr(callFrameRegister, Address(regT3, RegisterFile::CallerFrame * static_cast<int>(sizeof(Register))));
     116    move(regT3, callFrameRegister);
     117
     118    move(regT2, regT1); // argCount
     119
     120    emitNakedCall(m_globalData->jitStubs.ctiVirtualCall());
     121
     122    emitStore(dst, regT1, regT0);
     123   
     124    sampleCodeBlock(m_codeBlock);
     125}
     126
     127void JIT::compileOpCallVarargsSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter)
     128{
     129    int dst = instruction[1].u.operand;
     130    int callee = instruction[2].u.operand;
     131
     132    linkSlowCaseIfNotJSCell(iter, callee);
     133    linkSlowCase(iter);
     134
     135    JITStubCall stubCall(this, cti_op_call_NotJSFunction);
     136    stubCall.call(dst); // In the interpreter, the callee puts the return value in dst.
     137
     138    map(m_bytecodeIndex + OPCODE_LENGTH(op_call_varargs), dst, regT1, regT0);
     139    sampleCodeBlock(m_codeBlock);
     140}
     141
     142void JIT::emit_op_ret(Instruction* currentInstruction)
     143{
     144    unsigned dst = currentInstruction[1].u.operand;
     145
     146    // We could JIT generate the deref, only calling out to C when the refcount hits zero.
     147    if (m_codeBlock->needsFullScopeChain())
     148        JITStubCall(this, cti_op_ret_scopeChain).call();
     149
     150    emitLoad(dst, regT1, regT0);
     151    emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT2);
     152    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
     153
     154    restoreReturnAddressBeforeReturn(regT2);
     155    ret();
     156}
     157
     158void JIT::emit_op_construct_verify(Instruction* currentInstruction)
     159{
     160    unsigned dst = currentInstruction[1].u.operand;
     161
     162    emitLoad(dst, regT1, regT0);
     163    addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
     164    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
     165    addSlowCase(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo) + OBJECT_OFFSETOF(TypeInfo, m_type)), Imm32(ObjectType)));
     166}
     167
     168void JIT::emitSlow_op_construct_verify(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     169{
     170    unsigned dst = currentInstruction[1].u.operand;
     171    unsigned src = currentInstruction[2].u.operand;
     172
     173    linkSlowCase(iter);
     174    linkSlowCase(iter);
     175    emitLoad(src, regT1, regT0);
     176    emitStore(dst, regT1, regT0);
     177}
     178
     179void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     180{
     181    compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
     182}
     183
     184void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     185{
     186    compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
     187}
     188
     189void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     190{
     191    compileOpCallVarargsSlowCase(currentInstruction, iter);
     192}
     193
     194void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     195{
     196    compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
     197}
     198
     199void JIT::emit_op_call(Instruction* currentInstruction)
     200{
     201    compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
     202}
     203
     204void JIT::emit_op_call_eval(Instruction* currentInstruction)
     205{
     206    compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
     207}
     208
     209void JIT::emit_op_load_varargs(Instruction* currentInstruction)
     210{
     211    int argCountDst = currentInstruction[1].u.operand;
     212    int argsOffset = currentInstruction[2].u.operand;
     213
     214    JITStubCall stubCall(this, cti_op_load_varargs);
     215    stubCall.addArgument(Imm32(argsOffset));
     216    stubCall.call();
     217    // Stores a naked int32 in the register file.
     218    store32(returnValueRegister, Address(callFrameRegister, argCountDst * sizeof(Register)));
     219}
     220
     221void JIT::emit_op_call_varargs(Instruction* currentInstruction)
     222{
     223    compileOpCallVarargs(currentInstruction);
     224}
     225
     226void JIT::emit_op_construct(Instruction* currentInstruction)
     227{
     228    compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
     229}
     230
     231#if !ENABLE(JIT_OPTIMIZE_CALL)
     232
     233/* ------------------------------ BEGIN: !ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
     234
     235void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned)
     236{
     237    int dst = instruction[1].u.operand;
     238    int callee = instruction[2].u.operand;
     239    int argCount = instruction[3].u.operand;
     240    int registerOffset = instruction[4].u.operand;
     241
     242    Jump wasEval1;
     243    Jump wasEval2;
     244    if (opcodeID == op_call_eval) {
     245        JITStubCall stubCall(this, cti_op_call_eval);
     246        stubCall.addArgument(callee);
     247        stubCall.addArgument(JIT::Imm32(registerOffset));
     248        stubCall.addArgument(JIT::Imm32(argCount));
     249        stubCall.call();
     250        wasEval1 = branchTest32(NonZero, regT0);
     251        wasEval2 = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
     252    }
     253
     254    emitLoad(callee, regT1, regT2);
     255
     256    if (opcodeID == op_call)
     257        compileOpCallSetupArgs(instruction);
     258    else if (opcodeID == op_construct)
     259        compileOpConstructSetupArgs(instruction);
     260
     261    emitJumpSlowCaseIfNotJSCell(callee, regT1);
     262    addSlowCase(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsFunctionVPtr)));
     263
     264    // First, in the case of a construct, allocate the new object.
     265    if (opcodeID == op_construct) {
     266        JITStubCall(this, cti_op_construct_JSConstruct).call(registerOffset - RegisterFile::CallFrameHeaderSize - argCount);
     267        emitLoad(callee, regT1, regT2);
     268    }
     269
     270    // Speculatively roll the callframe, assuming argCount will match the arity.
     271    storePtr(callFrameRegister, Address(callFrameRegister, (RegisterFile::CallerFrame + registerOffset) * static_cast<int>(sizeof(Register))));
     272    addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister);
     273    move(Imm32(argCount), regT1);
     274
     275    emitNakedCall(m_globalData->jitStubs.ctiVirtualCall());
     276
     277    if (opcodeID == op_call_eval) {
     278        wasEval1.link(this);
     279        wasEval2.link(this);
     280    }
     281
     282    emitStore(dst, regT1, regT0);;
     283
     284    sampleCodeBlock(m_codeBlock);
     285}
     286
     287void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned, OpcodeID opcodeID)
     288{
     289    int dst = instruction[1].u.operand;
     290    int callee = instruction[2].u.operand;
     291
     292    linkSlowCaseIfNotJSCell(iter, callee);
     293    linkSlowCase(iter);
     294
     295    JITStubCall stubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction);
     296    stubCall.call(dst); // In the interpreter, the callee puts the return value in dst.
     297
     298    sampleCodeBlock(m_codeBlock);
     299}
     300
     301#else // !ENABLE(JIT_OPTIMIZE_CALL)
     302
     303/* ------------------------------ BEGIN: ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
     304
     305void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned callLinkInfoIndex)
     306{
     307    int dst = instruction[1].u.operand;
     308    int callee = instruction[2].u.operand;
     309    int argCount = instruction[3].u.operand;
     310    int registerOffset = instruction[4].u.operand;
     311
     312    Jump wasEval1;
     313    Jump wasEval2;
     314    if (opcodeID == op_call_eval) {
     315        JITStubCall stubCall(this, cti_op_call_eval);
     316        stubCall.addArgument(callee);
     317        stubCall.addArgument(JIT::Imm32(registerOffset));
     318        stubCall.addArgument(JIT::Imm32(argCount));
     319        stubCall.call();
     320        wasEval1 = branchTest32(NonZero, regT0);
     321        wasEval2 = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
     322    }
     323
     324    emitLoad(callee, regT1, regT0);
     325
     326    DataLabelPtr addressOfLinkedFunctionCheck;
     327    Jump jumpToSlow = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, ImmPtr(0));
     328    addSlowCase(jumpToSlow);
     329    ASSERT(differenceBetween(addressOfLinkedFunctionCheck, jumpToSlow) == patchOffsetOpCallCompareToJump);
     330    m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
     331
     332    addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
     333
     334    // The following is the fast case, only used whan a callee can be linked.
     335
     336    // In the case of OpConstruct, call out to a cti_ function to create the new object.
     337    if (opcodeID == op_construct) {
     338        int proto = instruction[5].u.operand;
     339        int thisRegister = instruction[6].u.operand;
     340
     341        JITStubCall stubCall(this, cti_op_construct_JSConstruct);
     342        stubCall.addArgument(regT1, regT0);
     343        stubCall.addArgument(Imm32(0)); // FIXME: Remove this unused JITStub argument.
     344        stubCall.addArgument(Imm32(0)); // FIXME: Remove this unused JITStub argument.
     345        stubCall.addArgument(proto);
     346        stubCall.call(thisRegister);
     347
     348        emitLoad(callee, regT1, regT0);
     349    }
     350
     351    // Fast version of stack frame initialization, directly relative to edi.
     352    // Note that this omits to set up RegisterFile::CodeBlock, which is set in the callee
     353    emitStore(registerOffset + RegisterFile::OptionalCalleeArguments, JSValue());
     354    emitStore(registerOffset + RegisterFile::Callee, regT1, regT0);
     355
     356    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_data) + OBJECT_OFFSETOF(ScopeChain, m_node)), regT1); // newScopeChain
     357    store32(Imm32(argCount), Address(callFrameRegister, (registerOffset + RegisterFile::ArgumentCount) * static_cast<int>(sizeof(Register))));
     358    storePtr(callFrameRegister, Address(callFrameRegister, (registerOffset + RegisterFile::CallerFrame) * static_cast<int>(sizeof(Register))));
     359    storePtr(regT1, Address(callFrameRegister, (registerOffset + RegisterFile::ScopeChain) * static_cast<int>(sizeof(Register))));
     360    addPtr(Imm32(registerOffset * sizeof(Register)), callFrameRegister);
     361
     362    // Call to the callee
     363    m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
     364   
     365    if (opcodeID == op_call_eval) {
     366        wasEval1.link(this);
     367        wasEval2.link(this);
     368    }
     369
     370    // Put the return value in dst. In the interpreter, op_ret does this.
     371    emitStore(dst, regT1, regT0);
     372    map(m_bytecodeIndex + opcodeLengths[opcodeID], dst, regT1, regT0);
     373
     374    sampleCodeBlock(m_codeBlock);
     375}
     376
     377void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID)
     378{
     379    int dst = instruction[1].u.operand;
     380    int callee = instruction[2].u.operand;
     381    int argCount = instruction[3].u.operand;
     382    int registerOffset = instruction[4].u.operand;
     383
     384    linkSlowCase(iter);
     385    linkSlowCase(iter);
     386
     387    // The arguments have been set up on the hot path for op_call_eval
     388    if (opcodeID == op_call)
     389        compileOpCallSetupArgs(instruction);
     390    else if (opcodeID == op_construct)
     391        compileOpConstructSetupArgs(instruction);
     392
     393    // Fast check for JS function.
     394    Jump callLinkFailNotObject = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
     395    Jump callLinkFailNotJSFunction = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsFunctionVPtr));
     396
     397    // First, in the case of a construct, allocate the new object.
     398    if (opcodeID == op_construct) {
     399        JITStubCall(this, cti_op_construct_JSConstruct).call(registerOffset - RegisterFile::CallFrameHeaderSize - argCount);
     400        emitLoad(callee, regT1, regT0);
     401    }
     402
     403    // Speculatively roll the callframe, assuming argCount will match the arity.
     404    storePtr(callFrameRegister, Address(callFrameRegister, (RegisterFile::CallerFrame + registerOffset) * static_cast<int>(sizeof(Register))));
     405    addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister);
     406    move(Imm32(argCount), regT1);
     407
     408    m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(m_globalData->jitStubs.ctiVirtualCallPreLink());
     409
     410    // Put the return value in dst.
     411    emitStore(dst, regT1, regT0);;
     412    sampleCodeBlock(m_codeBlock);
     413
     414    // If not, we need an extra case in the if below!
     415    ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_call_eval));
     416
     417    // Done! - return back to the hot path.
     418    if (opcodeID == op_construct)
     419        emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_construct));
     420    else
     421        emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_call));
     422
     423    // This handles host functions
     424    callLinkFailNotObject.link(this);
     425    callLinkFailNotJSFunction.link(this);
     426    JITStubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction).call();
     427
     428    emitStore(dst, regT1, regT0);;
     429    sampleCodeBlock(m_codeBlock);
     430}
     431
     432/* ------------------------------ END: !ENABLE / ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
     433
     434#endif // !ENABLE(JIT_OPTIMIZE_CALL)
     435
     436#else // USE(JSVALUE32_64)
     437
    48438void JIT::compileOpCallInitializeCallFrame()
    49439{
     
    129519    linkSlowCase(iter);
    130520    linkSlowCase(iter);
    131     JITStubCall stubCall(this, JITStubs::cti_op_call_NotJSFunction);
     521    JITStubCall stubCall(this, cti_op_call_NotJSFunction);
    132522    stubCall.call(dst); // In the interpreter, the callee puts the return value in dst.
    133523   
     
    149539    Jump wasEval;
    150540    if (opcodeID == op_call_eval) {
    151         CallEvalJITStub(this, instruction).call();
     541        JITStubCall stubCall(this, cti_op_call_eval);
     542        stubCall.addArgument(callee, regT2);
     543        stubCall.addArgument(JIT::Imm32(registerOffset));
     544        stubCall.addArgument(JIT::Imm32(argCount));
     545        stubCall.call();
    152546        wasEval = branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(JSValue())));
    153547    }
     
    166560    // First, in the case of a construct, allocate the new object.
    167561    if (opcodeID == op_construct) {
    168         JITStubCall(this, JITStubs::cti_op_construct_JSConstruct).call(registerOffset - RegisterFile::CallFrameHeaderSize - argCount);
     562        JITStubCall(this, cti_op_construct_JSConstruct).call(registerOffset - RegisterFile::CallFrameHeaderSize - argCount);
    169563        emitGetVirtualRegister(callee, regT2);
    170564    }
     
    192586    linkSlowCase(iter);
    193587    linkSlowCase(iter);
    194     JITStubCall stubCall(this, opcodeID == op_construct ? JITStubs::cti_op_construct_NotJSConstruct : JITStubs::cti_op_call_NotJSFunction);
     588    JITStubCall stubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction);
    195589    stubCall.call(dst); // In the interpreter, the callee puts the return value in dst.
    196590
     
    212606    Jump wasEval;
    213607    if (opcodeID == op_call_eval) {
    214         CallEvalJITStub(this, instruction).call();
     608        JITStubCall stubCall(this, cti_op_call_eval);
     609        stubCall.addArgument(callee, regT2);
     610        stubCall.addArgument(JIT::Imm32(registerOffset));
     611        stubCall.addArgument(JIT::Imm32(argCount));
     612        stubCall.call();
    215613        wasEval = branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(JSValue())));
    216614    }
     
    234632        emitPutJITStubArg(regT2, 1);
    235633        emitPutJITStubArgFromVirtualRegister(proto, 4, regT0);
    236         JITStubCall stubCall(this, JITStubs::cti_op_construct_JSConstruct);
     634        JITStubCall stubCall(this, cti_op_construct_JSConstruct);
    237635        stubCall.call(thisRegister);
    238636        emitGetVirtualRegister(callee, regT2);
     
    282680    // First, in the case of a construct, allocate the new object.
    283681    if (opcodeID == op_construct) {
    284         JITStubCall(this, JITStubs::cti_op_construct_JSConstruct).call(registerOffset - RegisterFile::CallFrameHeaderSize - argCount);
     682        JITStubCall(this, cti_op_construct_JSConstruct).call(registerOffset - RegisterFile::CallFrameHeaderSize - argCount);
    285683        emitGetVirtualRegister(callee, regT2);
    286684    }
     
    309707    callLinkFailNotObject.link(this);
    310708    callLinkFailNotJSFunction.link(this);
    311     JITStubCall(this, opcodeID == op_construct ? JITStubs::cti_op_construct_NotJSConstruct : JITStubs::cti_op_call_NotJSFunction).call();
     709    JITStubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction).call();
    312710
    313711    emitPutVirtualRegister(dst);
     
    319717#endif // !ENABLE(JIT_OPTIMIZE_CALL)
    320718
     719#endif // USE(JSVALUE32_64)
     720
    321721} // namespace JSC
    322722
  • trunk/JavaScriptCore/jit/JITInlineMethods.h

    r46438 r46598  
    3333namespace JSC {
    3434
     35/* Deprecated: Please use JITStubCall instead. */
     36
     37// puts an arg onto the stack, as an arg to a context threaded function.
     38ALWAYS_INLINE void JIT::emitPutJITStubArg(RegisterID src, unsigned argumentNumber)
     39{
     40    poke(src, argumentNumber);
     41}
     42
     43/* Deprecated: Please use JITStubCall instead. */
     44
     45ALWAYS_INLINE void JIT::emitPutJITStubArgConstant(unsigned value, unsigned argumentNumber)
     46{
     47    poke(Imm32(value), argumentNumber);
     48}
     49
     50/* Deprecated: Please use JITStubCall instead. */
     51
     52ALWAYS_INLINE void JIT::emitPutJITStubArgConstant(void* value, unsigned argumentNumber)
     53{
     54    poke(ImmPtr(value), argumentNumber);
     55}
     56
     57/* Deprecated: Please use JITStubCall instead. */
     58
     59ALWAYS_INLINE void JIT::emitGetJITStubArg(unsigned argumentNumber, RegisterID dst)
     60{
     61    peek(dst, argumentNumber);
     62}
     63
     64ALWAYS_INLINE JSValue JIT::getConstantOperand(unsigned src)
     65{
     66    ASSERT(m_codeBlock->isConstantRegisterIndex(src));
     67    return m_codeBlock->getConstant(src);
     68}
     69
     70ALWAYS_INLINE void JIT::emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry)
     71{
     72    storePtr(from, Address(callFrameRegister, entry * sizeof(Register)));
     73}
     74
     75ALWAYS_INLINE void JIT::emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry)
     76{
     77    storePtr(ImmPtr(value), Address(callFrameRegister, entry * sizeof(Register)));
     78}
     79
     80ALWAYS_INLINE void JIT::emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
     81{
     82    loadPtr(Address(from, entry * sizeof(Register)), to);
     83#if !USE(JSVALUE32_64)
     84    killLastResultRegister();
     85#endif
     86}
     87
     88ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
     89{
     90    load32(Address(from, entry * sizeof(Register)), to);
     91#if !USE(JSVALUE32_64)
     92    killLastResultRegister();
     93#endif
     94}
     95
     96ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function)
     97{
     98    ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
     99
     100    Call nakedCall = nearCall();
     101    m_calls.append(CallRecord(nakedCall, m_bytecodeIndex, function.executableAddress()));
     102    return nakedCall;
     103}
     104
     105#if PLATFORM(X86) || PLATFORM(X86_64)
     106
     107ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
     108{
     109    pop(reg);
     110}
     111
     112ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
     113{
     114    push(reg);
     115}
     116
     117ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
     118{
     119    push(address);
     120}
     121
     122#elif PLATFORM_ARM_ARCH(7)
     123
     124ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
     125{
     126    move(linkRegister, reg);
     127}
     128
     129ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
     130{
     131    move(reg, linkRegister);
     132}
     133
     134ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
     135{
     136    loadPtr(address, linkRegister);
     137}
     138
     139#endif
     140
     141#if USE(JIT_STUB_ARGUMENT_VA_LIST)
     142ALWAYS_INLINE void JIT::restoreArgumentReference()
     143{
     144    poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
     145}
     146ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline() {}
     147#else
     148ALWAYS_INLINE void JIT::restoreArgumentReference()
     149{
     150    move(stackPointerRegister, firstArgumentRegister);
     151    poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
     152}
     153ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline()
     154{
     155#if PLATFORM(X86)
     156    // Within a trampoline the return address will be on the stack at this point.
     157    addPtr(Imm32(sizeof(void*)), stackPointerRegister, firstArgumentRegister);
     158#elif PLATFORM_ARM_ARCH(7)
     159    move(stackPointerRegister, firstArgumentRegister);
     160#endif
     161    // In the trampoline on x86-64, the first argument register is not overwritten.
     162}
     163#endif
     164
     165ALWAYS_INLINE JIT::Jump JIT::checkStructure(RegisterID reg, Structure* structure)
     166{
     167    return branchPtr(NotEqual, Address(reg, OBJECT_OFFSETOF(JSCell, m_structure)), ImmPtr(structure));
     168}
     169
     170ALWAYS_INLINE void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, int vReg)
     171{
     172    if (!m_codeBlock->isKnownNotImmediate(vReg))
     173        linkSlowCase(iter);
     174}
     175
     176ALWAYS_INLINE void JIT::addSlowCase(Jump jump)
     177{
     178    ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
     179
     180    m_slowCases.append(SlowCaseEntry(jump, m_bytecodeIndex));
     181}
     182
     183ALWAYS_INLINE void JIT::addSlowCase(JumpList jumpList)
     184{
     185    ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
     186
     187    const JumpList::JumpVector& jumpVector = jumpList.jumps();
     188    size_t size = jumpVector.size();
     189    for (size_t i = 0; i < size; ++i)
     190        m_slowCases.append(SlowCaseEntry(jumpVector[i], m_bytecodeIndex));
     191}
     192
     193ALWAYS_INLINE void JIT::addJump(Jump jump, int relativeOffset)
     194{
     195    ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
     196
     197    m_jmpTable.append(JumpTable(jump, m_bytecodeIndex + relativeOffset));
     198}
     199
     200ALWAYS_INLINE void JIT::emitJumpSlowToHot(Jump jump, int relativeOffset)
     201{
     202    ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
     203
     204    jump.linkTo(m_labels[m_bytecodeIndex + relativeOffset], this);
     205}
     206
     207#if ENABLE(SAMPLING_FLAGS)
     208ALWAYS_INLINE void JIT::setSamplingFlag(int32_t flag)
     209{
     210    ASSERT(flag >= 1);
     211    ASSERT(flag <= 32);
     212    or32(Imm32(1u << (flag - 1)), AbsoluteAddress(&SamplingFlags::s_flags));
     213}
     214
     215ALWAYS_INLINE void JIT::clearSamplingFlag(int32_t flag)
     216{
     217    ASSERT(flag >= 1);
     218    ASSERT(flag <= 32);
     219    and32(Imm32(~(1u << (flag - 1))), AbsoluteAddress(&SamplingFlags::s_flags));
     220}
     221#endif
     222
     223#if ENABLE(SAMPLING_COUNTERS)
     224ALWAYS_INLINE void JIT::emitCount(AbstractSamplingCounter& counter, uint32_t count)
     225{
     226#if PLATFORM(X86_64) // Or any other 64-bit plattform.
     227    addPtr(Imm32(count), AbsoluteAddress(&counter.m_counter));
     228#elif PLATFORM(X86) // Or any other little-endian 32-bit plattform.
     229    intptr_t hiWord = reinterpret_cast<intptr_t>(&counter.m_counter) + sizeof(int32_t);
     230    add32(Imm32(count), AbsoluteAddress(&counter.m_counter));
     231    addWithCarry32(Imm32(0), AbsoluteAddress(reinterpret_cast<void*>(hiWord)));
     232#else
     233#error "SAMPLING_FLAGS not implemented on this platform."
     234#endif
     235}
     236#endif
     237
     238#if ENABLE(OPCODE_SAMPLING)
     239#if PLATFORM(X86_64)
     240ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
     241{
     242    move(ImmPtr(m_interpreter->sampler()->sampleSlot()), X86::ecx);
     243    storePtr(ImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), X86::ecx);
     244}
     245#else
     246ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
     247{
     248    storePtr(ImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), m_interpreter->sampler()->sampleSlot());
     249}
     250#endif
     251#endif
     252
     253#if ENABLE(CODEBLOCK_SAMPLING)
     254#if PLATFORM(X86_64)
     255ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
     256{
     257    move(ImmPtr(m_interpreter->sampler()->codeBlockSlot()), X86::ecx);
     258    storePtr(ImmPtr(codeBlock), X86::ecx);
     259}
     260#else
     261ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
     262{
     263    storePtr(ImmPtr(codeBlock), m_interpreter->sampler()->codeBlockSlot());
     264}
     265#endif
     266#endif
     267
     268#if USE(JSVALUE32_64)
     269
     270inline JIT::Address JIT::tagFor(unsigned index, RegisterID base)
     271{
     272    return Address(base, (index * sizeof(Register)) + OBJECT_OFFSETOF(JSValue, u.asBits.tag));
     273}
     274
     275inline JIT::Address JIT::payloadFor(unsigned index, RegisterID base)
     276{
     277    return Address(base, (index * sizeof(Register)) + OBJECT_OFFSETOF(JSValue, u.asBits.payload));
     278}
     279
     280inline JIT::Address JIT::addressFor(unsigned index, RegisterID base)
     281{
     282    return Address(base, (index * sizeof(Register)));
     283}
     284
     285inline void JIT::emitLoadTag(unsigned index, RegisterID tag)
     286{
     287    RegisterID mappedTag;
     288    if (getMappedTag(index, mappedTag)) {
     289        move(mappedTag, tag);
     290        unmap(tag);
     291        return;
     292    }
     293
     294    if (m_codeBlock->isConstantRegisterIndex(index)) {
     295        move(Imm32(getConstantOperand(index).tag()), tag);
     296        unmap(tag);
     297        return;
     298    }
     299
     300    load32(tagFor(index), tag);
     301    unmap(tag);
     302}
     303
     304inline void JIT::emitLoadPayload(unsigned index, RegisterID payload)
     305{
     306    RegisterID mappedPayload;
     307    if (getMappedPayload(index, mappedPayload)) {
     308        move(mappedPayload, payload);
     309        unmap(payload);
     310        return;
     311    }
     312
     313    if (m_codeBlock->isConstantRegisterIndex(index)) {
     314        move(Imm32(getConstantOperand(index).payload()), payload);
     315        unmap(payload);
     316        return;
     317    }
     318
     319    load32(payloadFor(index), payload);
     320    unmap(payload);
     321}
     322
     323inline void JIT::emitLoad(const JSValue& v, RegisterID tag, RegisterID payload)
     324{
     325    move(Imm32(v.payload()), payload);
     326    move(Imm32(v.tag()), tag);
     327}
     328
     329inline void JIT::emitLoad(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
     330{
     331    ASSERT(tag != payload);
     332
     333    if (base == callFrameRegister) {
     334        ASSERT(payload != base);
     335        emitLoadPayload(index, payload);
     336        emitLoadTag(index, tag);
     337        return;
     338    }
     339
     340    if (payload == base) { // avoid stomping base
     341        load32(tagFor(index, base), tag);
     342        load32(payloadFor(index, base), payload);
     343        return;
     344    }
     345
     346    load32(payloadFor(index, base), payload);
     347    load32(tagFor(index, base), tag);
     348}
     349
     350inline void JIT::emitLoad2(unsigned index1, RegisterID tag1, RegisterID payload1, unsigned index2, RegisterID tag2, RegisterID payload2)
     351{
     352    if (isMapped(index1)) {
     353        emitLoad(index1, tag1, payload1);
     354        emitLoad(index2, tag2, payload2);
     355        return;
     356    }
     357    emitLoad(index2, tag2, payload2);
     358    emitLoad(index1, tag1, payload1);
     359}
     360
     361inline void JIT::emitLoadDouble(unsigned index, FPRegisterID value)
     362{
     363    if (m_codeBlock->isConstantRegisterIndex(index)) {
     364        Register& inConstantPool = m_codeBlock->constantRegister(index);
     365        loadDouble(&inConstantPool, value);
     366    } else
     367        loadDouble(addressFor(index), value);
     368}
     369
     370inline void JIT::emitLoadInt32ToDouble(unsigned index, FPRegisterID value)
     371{
     372    if (m_codeBlock->isConstantRegisterIndex(index)) {
     373        Register& inConstantPool = m_codeBlock->constantRegister(index);
     374        char* bytePointer = reinterpret_cast<char*>(&inConstantPool);
     375        convertInt32ToDouble(AbsoluteAddress(bytePointer + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), value);
     376    } else
     377        convertInt32ToDouble(payloadFor(index), value);
     378}
     379
     380inline void JIT::emitStore(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
     381{
     382    store32(payload, payloadFor(index, base));
     383    store32(tag, tagFor(index, base));
     384}
     385
     386inline void JIT::emitStoreInt32(unsigned index, RegisterID payload, bool indexIsInt32)
     387{
     388    store32(payload, payloadFor(index, callFrameRegister));
     389    if (!indexIsInt32)
     390        store32(Imm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
     391}
     392
     393inline void JIT::emitStoreInt32(unsigned index, Imm32 payload, bool indexIsInt32)
     394{
     395    store32(payload, payloadFor(index, callFrameRegister));
     396    if (!indexIsInt32)
     397        store32(Imm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
     398}
     399
     400inline void JIT::emitStoreCell(unsigned index, RegisterID payload, bool indexIsCell)
     401{
     402    store32(payload, payloadFor(index, callFrameRegister));
     403    if (!indexIsCell)
     404        store32(Imm32(JSValue::CellTag), tagFor(index, callFrameRegister));
     405}
     406
     407inline void JIT::emitStoreBool(unsigned index, RegisterID tag, bool indexIsBool)
     408{
     409    if (!indexIsBool)
     410        store32(Imm32(0), payloadFor(index, callFrameRegister));
     411    store32(tag, tagFor(index, callFrameRegister));
     412}
     413
     414inline void JIT::emitStoreDouble(unsigned index, FPRegisterID value)
     415{
     416    storeDouble(value, addressFor(index));
     417}
     418
     419inline void JIT::emitStore(unsigned index, const JSValue constant, RegisterID base)
     420{
     421    store32(Imm32(constant.payload()), payloadFor(index, base));
     422    store32(Imm32(constant.tag()), tagFor(index, base));
     423}
     424
     425ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
     426{
     427    emitStore(dst, jsUndefined());
     428}
     429
     430inline bool JIT::isLabeled(unsigned bytecodeIndex)
     431{
     432    for (size_t numberOfJumpTargets = m_codeBlock->numberOfJumpTargets(); m_jumpTargetIndex != numberOfJumpTargets; ++m_jumpTargetIndex) {
     433        unsigned jumpTarget = m_codeBlock->jumpTarget(m_jumpTargetIndex);
     434        if (jumpTarget == bytecodeIndex)
     435            return true;
     436        if (jumpTarget > bytecodeIndex)
     437            return false;
     438    }
     439    return false;
     440}
     441
     442inline void JIT::map(unsigned bytecodeIndex, unsigned virtualRegisterIndex, RegisterID tag, RegisterID payload)
     443{
     444    if (isLabeled(bytecodeIndex))
     445        return;
     446
     447    m_mappedBytecodeIndex = bytecodeIndex;
     448    m_mappedVirtualRegisterIndex = virtualRegisterIndex;
     449    m_mappedTag = tag;
     450    m_mappedPayload = payload;
     451}
     452
     453inline void JIT::unmap(RegisterID registerID)
     454{
     455    if (m_mappedTag == registerID)
     456        m_mappedTag = (RegisterID)-1;
     457    else if (m_mappedPayload == registerID)
     458        m_mappedPayload = (RegisterID)-1;
     459}
     460
     461inline void JIT::unmap()
     462{
     463    m_mappedBytecodeIndex = (unsigned)-1;
     464    m_mappedVirtualRegisterIndex = (unsigned)-1;
     465    m_mappedTag = (RegisterID)-1;
     466    m_mappedPayload = (RegisterID)-1;
     467}
     468
     469inline bool JIT::isMapped(unsigned virtualRegisterIndex)
     470{
     471    if (m_mappedBytecodeIndex != m_bytecodeIndex)
     472        return false;
     473    if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
     474        return false;
     475    return true;
     476}
     477
     478inline bool JIT::getMappedPayload(unsigned virtualRegisterIndex, RegisterID& payload)
     479{
     480    if (m_mappedBytecodeIndex != m_bytecodeIndex)
     481        return false;
     482    if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
     483        return false;
     484    if (m_mappedPayload == (RegisterID)-1)
     485        return false;
     486    payload = m_mappedPayload;
     487    return true;
     488}
     489
     490inline bool JIT::getMappedTag(unsigned virtualRegisterIndex, RegisterID& tag)
     491{
     492    if (m_mappedBytecodeIndex != m_bytecodeIndex)
     493        return false;
     494    if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
     495        return false;
     496    if (m_mappedTag == (RegisterID)-1)
     497        return false;
     498    tag = m_mappedTag;
     499    return true;
     500}
     501
     502inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex)
     503{
     504    if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
     505        addSlowCase(branch32(NotEqual, tagFor(virtualRegisterIndex), Imm32(JSValue::CellTag)));
     506}
     507
     508inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex, RegisterID tag)
     509{
     510    if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
     511        addSlowCase(branch32(NotEqual, tag, Imm32(JSValue::CellTag)));
     512}
     513
     514inline void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, unsigned virtualRegisterIndex)
     515{
     516    if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
     517        linkSlowCase(iter);
     518}
     519
     520ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
     521{
     522    return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
     523}
     524
     525ALWAYS_INLINE bool JIT::getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant)
     526{
     527    if (isOperandConstantImmediateInt(op1)) {
     528        constant = getConstantOperand(op1).asInt32();
     529        op = op2;
     530        return true;
     531    }
     532
     533    if (isOperandConstantImmediateInt(op2)) {
     534        constant = getConstantOperand(op2).asInt32();
     535        op = op1;
     536        return true;
     537    }
     538   
     539    return false;
     540}
     541
     542ALWAYS_INLINE bool JIT::isOperandConstantImmediateDouble(unsigned src)
     543{
     544    return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isDouble();
     545}
     546
     547/* Deprecated: Please use JITStubCall instead. */
     548
     549ALWAYS_INLINE void JIT::emitPutJITStubArgFromVirtualRegister(unsigned src, unsigned argumentNumber, RegisterID scratch1, RegisterID scratch2)
     550{
     551    if (m_codeBlock->isConstantRegisterIndex(src)) {
     552        JSValue constant = m_codeBlock->getConstant(src);
     553        poke(Imm32(constant.payload()), argumentNumber);
     554        poke(Imm32(constant.tag()), argumentNumber + 1);
     555    } else {
     556        emitLoad(src, scratch1, scratch2);
     557        poke(scratch2, argumentNumber);
     558        poke(scratch1, argumentNumber + 1);
     559    }
     560}
     561
     562#else // USE(JSVALUE32_64)
     563
    35564ALWAYS_INLINE void JIT::killLastResultRegister()
    36565{
     
    83612}
    84613
    85 // puts an arg onto the stack, as an arg to a context threaded function.
    86 ALWAYS_INLINE void JIT::emitPutJITStubArg(RegisterID src, unsigned argumentNumber)
    87 {
    88     poke(src, argumentNumber);
    89 }
    90 
    91 ALWAYS_INLINE void JIT::emitPutJITStubArgConstant(unsigned value, unsigned argumentNumber)
    92 {
    93     poke(Imm32(value), argumentNumber);
    94 }
    95 
    96 ALWAYS_INLINE void JIT::emitPutJITStubArgConstant(void* value, unsigned argumentNumber)
    97 {
    98     poke(ImmPtr(value), argumentNumber);
    99 }
    100 
    101 ALWAYS_INLINE void JIT::emitGetJITStubArg(unsigned argumentNumber, RegisterID dst)
    102 {
    103     peek(dst, argumentNumber);
    104 }
    105 
    106 ALWAYS_INLINE JSValue JIT::getConstantOperand(unsigned src)
    107 {
    108     ASSERT(m_codeBlock->isConstantRegisterIndex(src));
    109     return m_codeBlock->getConstant(src);
    110 }
    111 
    112614ALWAYS_INLINE int32_t JIT::getConstantOperandImmediateInt(unsigned src)
    113615{
    114     return getConstantOperand(src).getInt32Fast();
     616    return getConstantOperand(src).asInt32();
    115617}
    116618
    117619ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
    118620{
    119     return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32Fast();
    120 }
     621    return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
     622}
     623
     624ALWAYS_INLINE void JIT::emitPutVirtualRegister(unsigned dst, RegisterID from)
     625{
     626    storePtr(from, Address(callFrameRegister, dst * sizeof(Register)));
     627    m_lastResultBytecodeRegister = (from == cachedResultRegister) ? dst : std::numeric_limits<int>::max();
     628}
     629
     630ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
     631{
     632    storePtr(ImmPtr(JSValue::encode(jsUndefined())), Address(callFrameRegister, dst * sizeof(Register)));
     633}
     634
     635ALWAYS_INLINE JIT::Jump JIT::emitJumpIfJSCell(RegisterID reg)
     636{
     637#if USE(JSVALUE64)
     638    return branchTestPtr(Zero, reg, tagMaskRegister);
     639#else
     640    return branchTest32(Zero, reg, Imm32(JSImmediate::TagMask));
     641#endif
     642}
     643
     644ALWAYS_INLINE JIT::Jump JIT::emitJumpIfBothJSCells(RegisterID reg1, RegisterID reg2, RegisterID scratch)
     645{
     646    move(reg1, scratch);
     647    orPtr(reg2, scratch);
     648    return emitJumpIfJSCell(scratch);
     649}
     650
     651ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg)
     652{
     653    addSlowCase(emitJumpIfJSCell(reg));
     654}
     655
     656ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotJSCell(RegisterID reg)
     657{
     658#if USE(JSVALUE64)
     659    return branchTestPtr(NonZero, reg, tagMaskRegister);
     660#else
     661    return branchTest32(NonZero, reg, Imm32(JSImmediate::TagMask));
     662#endif
     663}
     664
     665ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg)
     666{
     667    addSlowCase(emitJumpIfNotJSCell(reg));
     668}
     669
     670ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, int vReg)
     671{
     672    if (!m_codeBlock->isKnownNotImmediate(vReg))
     673        emitJumpSlowCaseIfNotJSCell(reg);
     674}
     675
     676#if USE(JSVALUE64)
     677ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateNumber(RegisterID reg)
     678{
     679    return branchTestPtr(NonZero, reg, tagTypeNumberRegister);
     680}
     681ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateNumber(RegisterID reg)
     682{
     683    return branchTestPtr(Zero, reg, tagTypeNumberRegister);
     684}
     685#endif
     686
     687ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateInteger(RegisterID reg)
     688{
     689#if USE(JSVALUE64)
     690    return branchPtr(AboveOrEqual, reg, tagTypeNumberRegister);
     691#else
     692    return branchTest32(NonZero, reg, Imm32(JSImmediate::TagTypeNumber));
     693#endif
     694}
     695
     696ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateInteger(RegisterID reg)
     697{
     698#if USE(JSVALUE64)
     699    return branchPtr(Below, reg, tagTypeNumberRegister);
     700#else
     701    return branchTest32(Zero, reg, Imm32(JSImmediate::TagTypeNumber));
     702#endif
     703}
     704
     705ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
     706{
     707    move(reg1, scratch);
     708    andPtr(reg2, scratch);
     709    return emitJumpIfNotImmediateInteger(scratch);
     710}
     711
     712ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg)
     713{
     714    addSlowCase(emitJumpIfNotImmediateInteger(reg));
     715}
     716
     717ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
     718{
     719    addSlowCase(emitJumpIfNotImmediateIntegers(reg1, reg2, scratch));
     720}
     721
     722#if !USE(JSVALUE64)
     723ALWAYS_INLINE void JIT::emitFastArithDeTagImmediate(RegisterID reg)
     724{
     725    subPtr(Imm32(JSImmediate::TagTypeNumber), reg);
     726}
     727
     728ALWAYS_INLINE JIT::Jump JIT::emitFastArithDeTagImmediateJumpIfZero(RegisterID reg)
     729{
     730    return branchSubPtr(Zero, Imm32(JSImmediate::TagTypeNumber), reg);
     731}
     732#endif
     733
     734ALWAYS_INLINE void JIT::emitFastArithReTagImmediate(RegisterID src, RegisterID dest)
     735{
     736#if USE(JSVALUE64)
     737    emitFastArithIntToImmNoCheck(src, dest);
     738#else
     739    if (src != dest)
     740        move(src, dest);
     741    addPtr(Imm32(JSImmediate::TagTypeNumber), dest);
     742#endif
     743}
     744
     745ALWAYS_INLINE void JIT::emitFastArithImmToInt(RegisterID reg)
     746{
     747#if USE(JSVALUE64)
     748    UNUSED_PARAM(reg);
     749#else
     750    rshiftPtr(Imm32(JSImmediate::IntegerPayloadShift), reg);
     751#endif
     752}
     753
     754// operand is int32_t, must have been zero-extended if register is 64-bit.
     755ALWAYS_INLINE void JIT::emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest)
     756{
     757#if USE(JSVALUE64)
     758    if (src != dest)
     759        move(src, dest);
     760    orPtr(tagTypeNumberRegister, dest);
     761#else
     762    signExtend32ToPtr(src, dest);
     763    addPtr(dest, dest);
     764    emitFastArithReTagImmediate(dest, dest);
     765#endif
     766}
     767
     768ALWAYS_INLINE void JIT::emitTagAsBoolImmediate(RegisterID reg)
     769{
     770    lshift32(Imm32(JSImmediate::ExtendedPayloadShift), reg);
     771    or32(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), reg);
     772}
     773
     774/* Deprecated: Please use JITStubCall instead. */
    121775
    122776// get arg puts an arg from the SF register array onto the stack, as an arg to a context threaded function.
     
    134788}
    135789
    136 ALWAYS_INLINE void JIT::emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry)
    137 {
    138     storePtr(from, Address(callFrameRegister, entry * sizeof(Register)));
    139 }
    140 
    141 ALWAYS_INLINE void JIT::emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry)
    142 {
    143     storePtr(ImmPtr(value), Address(callFrameRegister, entry * sizeof(Register)));
    144 }
    145 
    146 ALWAYS_INLINE void JIT::emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
    147 {
    148     loadPtr(Address(from, entry * sizeof(Register)), to);
    149     killLastResultRegister();
    150 }
    151 
    152 ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
    153 {
    154     load32(Address(from, entry * sizeof(Register)), to);
    155     killLastResultRegister();
    156 }
    157 
    158 ALWAYS_INLINE void JIT::emitPutVirtualRegister(unsigned dst, RegisterID from)
    159 {
    160     storePtr(from, Address(callFrameRegister, dst * sizeof(Register)));
    161     m_lastResultBytecodeRegister = (from == cachedResultRegister) ? dst : std::numeric_limits<int>::max();
    162     // FIXME: #ifndef NDEBUG, Write the correct m_type to the register.
    163 }
    164 
    165 ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
    166 {
    167     storePtr(ImmPtr(JSValue::encode(jsUndefined())), Address(callFrameRegister, dst * sizeof(Register)));
    168     // FIXME: #ifndef NDEBUG, Write the correct m_type to the register.
    169 }
    170 
    171 ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function)
    172 {
    173     ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
    174 
    175     Call nakedCall = nearCall();
    176     m_calls.append(CallRecord(nakedCall, m_bytecodeIndex, function.executableAddress()));
    177     return nakedCall;
    178 }
    179 
    180 #if PLATFORM(X86) || PLATFORM(X86_64)
    181 
    182 ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
    183 {
    184     pop(reg);
    185 }
    186 
    187 ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
    188 {
    189     push(reg);
    190 }
    191 
    192 ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
    193 {
    194     push(address);
    195 }
    196 
    197 #elif PLATFORM_ARM_ARCH(7)
    198 
    199 ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
    200 {
    201     move(linkRegister, reg);
    202 }
    203 
    204 ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
    205 {
    206     move(reg, linkRegister);
    207 }
    208 
    209 ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
    210 {
    211     loadPtr(address, linkRegister);
    212 }
    213 
    214 #endif
    215 
    216 #if USE(JIT_STUB_ARGUMENT_VA_LIST)
    217 ALWAYS_INLINE void JIT::restoreArgumentReference()
    218 {
    219     poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
    220 }
    221 ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline() {}
    222 #else
    223 ALWAYS_INLINE void JIT::restoreArgumentReference()
    224 {
    225     move(stackPointerRegister, firstArgumentRegister);
    226     poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
    227 }
    228 ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline()
    229 {
    230 #if PLATFORM(X86)
    231     // Within a trampoline the return address will be on the stack at this point.
    232     addPtr(Imm32(sizeof(void*)), stackPointerRegister, firstArgumentRegister);
    233 #elif PLATFORM_ARM_ARCH(7)
    234     move(stackPointerRegister, firstArgumentRegister);
    235 #endif
    236     // In the trampoline on x86-64, the first argument register is not overwritten.
    237 }
    238 #endif
    239 
    240 ALWAYS_INLINE JIT::Jump JIT::checkStructure(RegisterID reg, Structure* structure)
    241 {
    242     return branchPtr(NotEqual, Address(reg, OBJECT_OFFSETOF(JSCell, m_structure)), ImmPtr(structure));
    243 }
    244 
    245 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfJSCell(RegisterID reg)
    246 {
    247 #if USE(ALTERNATE_JSIMMEDIATE)
    248     return branchTestPtr(Zero, reg, tagMaskRegister);
    249 #else
    250     return branchTest32(Zero, reg, Imm32(JSImmediate::TagMask));
    251 #endif
    252 }
    253 
    254 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfBothJSCells(RegisterID reg1, RegisterID reg2, RegisterID scratch)
    255 {
    256     move(reg1, scratch);
    257     orPtr(reg2, scratch);
    258     return emitJumpIfJSCell(scratch);
    259 }
    260 
    261 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg)
    262 {
    263     addSlowCase(emitJumpIfJSCell(reg));
    264 }
    265 
    266 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotJSCell(RegisterID reg)
    267 {
    268 #if USE(ALTERNATE_JSIMMEDIATE)
    269     return branchTestPtr(NonZero, reg, tagMaskRegister);
    270 #else
    271     return branchTest32(NonZero, reg, Imm32(JSImmediate::TagMask));
    272 #endif
    273 }
    274 
    275 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg)
    276 {
    277     addSlowCase(emitJumpIfNotJSCell(reg));
    278 }
    279 
    280 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, int vReg)
    281 {
    282     if (!m_codeBlock->isKnownNotImmediate(vReg))
    283         emitJumpSlowCaseIfNotJSCell(reg);
    284 }
    285 
    286 ALWAYS_INLINE void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, int vReg)
    287 {
    288     if (!m_codeBlock->isKnownNotImmediate(vReg))
    289         linkSlowCase(iter);
    290 }
    291 
    292 #if USE(ALTERNATE_JSIMMEDIATE)
    293 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateNumber(RegisterID reg)
    294 {
    295     return branchTestPtr(NonZero, reg, tagTypeNumberRegister);
    296 }
    297 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateNumber(RegisterID reg)
    298 {
    299     return branchTestPtr(Zero, reg, tagTypeNumberRegister);
    300 }
    301 #endif
    302 
    303 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateInteger(RegisterID reg)
    304 {
    305 #if USE(ALTERNATE_JSIMMEDIATE)
    306     return branchPtr(AboveOrEqual, reg, tagTypeNumberRegister);
    307 #else
    308     return branchTest32(NonZero, reg, Imm32(JSImmediate::TagTypeNumber));
    309 #endif
    310 }
    311 
    312 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateInteger(RegisterID reg)
    313 {
    314 #if USE(ALTERNATE_JSIMMEDIATE)
    315     return branchPtr(Below, reg, tagTypeNumberRegister);
    316 #else
    317     return branchTest32(Zero, reg, Imm32(JSImmediate::TagTypeNumber));
    318 #endif
    319 }
    320 
    321 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
    322 {
    323     move(reg1, scratch);
    324     andPtr(reg2, scratch);
    325     return emitJumpIfNotImmediateInteger(scratch);
    326 }
    327 
    328 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg)
    329 {
    330     addSlowCase(emitJumpIfNotImmediateInteger(reg));
    331 }
    332 
    333 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
    334 {
    335     addSlowCase(emitJumpIfNotImmediateIntegers(reg1, reg2, scratch));
    336 }
    337 
    338 #if !USE(ALTERNATE_JSIMMEDIATE)
    339 ALWAYS_INLINE void JIT::emitFastArithDeTagImmediate(RegisterID reg)
    340 {
    341     subPtr(Imm32(JSImmediate::TagTypeNumber), reg);
    342 }
    343 
    344 ALWAYS_INLINE JIT::Jump JIT::emitFastArithDeTagImmediateJumpIfZero(RegisterID reg)
    345 {
    346     return branchSubPtr(Zero, Imm32(JSImmediate::TagTypeNumber), reg);
    347 }
    348 #endif
    349 
    350 ALWAYS_INLINE void JIT::emitFastArithReTagImmediate(RegisterID src, RegisterID dest)
    351 {
    352 #if USE(ALTERNATE_JSIMMEDIATE)
    353     emitFastArithIntToImmNoCheck(src, dest);
    354 #else
    355     if (src != dest)
    356         move(src, dest);
    357     addPtr(Imm32(JSImmediate::TagTypeNumber), dest);
    358 #endif
    359 }
    360 
    361 ALWAYS_INLINE void JIT::emitFastArithImmToInt(RegisterID reg)
    362 {
    363 #if USE(ALTERNATE_JSIMMEDIATE)
    364     UNUSED_PARAM(reg);
    365 #else
    366     rshiftPtr(Imm32(JSImmediate::IntegerPayloadShift), reg);
    367 #endif
    368 }
    369 
    370 // operand is int32_t, must have been zero-extended if register is 64-bit.
    371 ALWAYS_INLINE void JIT::emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest)
    372 {
    373 #if USE(ALTERNATE_JSIMMEDIATE)
    374     if (src != dest)
    375         move(src, dest);
    376     orPtr(tagTypeNumberRegister, dest);
    377 #else
    378     signExtend32ToPtr(src, dest);
    379     addPtr(dest, dest);
    380     emitFastArithReTagImmediate(dest, dest);
    381 #endif
    382 }
    383 
    384 ALWAYS_INLINE void JIT::emitTagAsBoolImmediate(RegisterID reg)
    385 {
    386     lshift32(Imm32(JSImmediate::ExtendedPayloadShift), reg);
    387     or32(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), reg);
    388 }
    389 
    390 ALWAYS_INLINE void JIT::addSlowCase(Jump jump)
    391 {
    392     ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
    393 
    394     m_slowCases.append(SlowCaseEntry(jump, m_bytecodeIndex));
    395 }
    396 
    397 ALWAYS_INLINE void JIT::addJump(Jump jump, int relativeOffset)
    398 {
    399     ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
    400 
    401     m_jmpTable.append(JumpTable(jump, m_bytecodeIndex + relativeOffset));
    402 }
    403 
    404 ALWAYS_INLINE void JIT::emitJumpSlowToHot(Jump jump, int relativeOffset)
    405 {
    406     ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
    407 
    408     jump.linkTo(m_labels[m_bytecodeIndex + relativeOffset], this);
    409 }
    410 
    411 #if ENABLE(SAMPLING_FLAGS)
    412 ALWAYS_INLINE void JIT::setSamplingFlag(int32_t flag)
    413 {
    414     ASSERT(flag >= 1);
    415     ASSERT(flag <= 32);
    416     or32(Imm32(1u << (flag - 1)), AbsoluteAddress(&SamplingFlags::s_flags));
    417 }
    418 
    419 ALWAYS_INLINE void JIT::clearSamplingFlag(int32_t flag)
    420 {
    421     ASSERT(flag >= 1);
    422     ASSERT(flag <= 32);
    423     and32(Imm32(~(1u << (flag - 1))), AbsoluteAddress(&SamplingFlags::s_flags));
    424 }
    425 #endif
    426 
    427 #if ENABLE(SAMPLING_COUNTERS)
    428 ALWAYS_INLINE void JIT::emitCount(AbstractSamplingCounter& counter, uint32_t count)
    429 {
    430 #if PLATFORM(X86_64) // Or any other 64-bit plattform.
    431     addPtr(Imm32(count), AbsoluteAddress(&counter.m_counter));
    432 #elif PLATFORM(X86) // Or any other little-endian 32-bit plattform.
    433     intptr_t hiWord = reinterpret_cast<intptr_t>(&counter.m_counter) + sizeof(int32_t);
    434     add32(Imm32(count), AbsoluteAddress(&counter.m_counter));
    435     addWithCarry32(Imm32(0), AbsoluteAddress(reinterpret_cast<void*>(hiWord)));
    436 #else
    437 #error "SAMPLING_FLAGS not implemented on this platform."
    438 #endif
    439 }
    440 #endif
    441 
    442 #if ENABLE(OPCODE_SAMPLING)
    443 #if PLATFORM(X86_64)
    444 ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
    445 {
    446     move(ImmPtr(m_interpreter->sampler()->sampleSlot()), X86::ecx);
    447     storePtr(ImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), X86::ecx);
    448 }
    449 #else
    450 ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
    451 {
    452     storePtr(ImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), m_interpreter->sampler()->sampleSlot());
    453 }
    454 #endif
    455 #endif
    456 
    457 #if ENABLE(CODEBLOCK_SAMPLING)
    458 #if PLATFORM(X86_64)
    459 ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
    460 {
    461     move(ImmPtr(m_interpreter->sampler()->codeBlockSlot()), X86::ecx);
    462     storePtr(ImmPtr(codeBlock), X86::ecx);
    463 }
    464 #else
    465 ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
    466 {
    467     storePtr(ImmPtr(codeBlock), m_interpreter->sampler()->codeBlockSlot());
    468 }
    469 #endif
    470 #endif
    471 }
     790#endif // USE(JSVALUE32_64)
     791
     792} // namespace JSC
    472793
    473794#endif // ENABLE(JIT)
  • trunk/JavaScriptCore/jit/JITOpcodes.cpp

    r45609 r46598  
    3333#include "JSArray.h"
    3434#include "JSCell.h"
     35#include "JSFunction.h"
     36#include "LinkBuffer.h"
    3537
    3638namespace JSC {
     39
     40#if USE(JSVALUE32_64)
     41
     42void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallPreLink, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk)
     43{
     44#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
     45    // (1) This function provides fast property access for string length
     46    Label stringLengthBegin = align();
     47   
     48    // regT0 holds payload, regT1 holds tag
     49   
     50    Jump string_failureCases1 = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
     51    Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
     52
     53    // Checks out okay! - get the length from the Ustring.
     54    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSString, m_value) + OBJECT_OFFSETOF(UString, m_rep)), regT2);
     55    load32(Address(regT2, OBJECT_OFFSETOF(UString::Rep, len)), regT2);
     56
     57    Jump string_failureCases3 = branch32(Above, regT2, Imm32(INT_MAX));
     58    move(regT2, regT0);
     59    move(Imm32(JSValue::Int32Tag), regT1);
     60
     61    ret();
     62#endif
     63
     64    // (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
     65
     66#if ENABLE(JIT_OPTIMIZE_CALL)
     67    /* VirtualCallPreLink Trampoline */
     68    Label virtualCallPreLinkBegin = align();
     69
     70    // regT0 holds callee, regT1 holds argCount.
     71    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_body)), regT2);
     72    loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionBodyNode, m_code)), regT2);
     73    Jump hasCodeBlock1 = branchTestPtr(NonZero, regT2);
     74
     75    // Lazily generate a CodeBlock.
     76    preserveReturnAddressAfterCall(regT3); // return address
     77    restoreArgumentReference();
     78    Call callJSFunction1 = call();
     79    move(regT0, regT2);
     80    emitGetJITStubArg(1, regT0); // callee
     81    emitGetJITStubArg(5, regT1); // argCount
     82    restoreReturnAddressBeforeReturn(regT3); // return address
     83    hasCodeBlock1.link(this);
     84
     85    // regT2 holds codeBlock.
     86    Jump isNativeFunc1 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(CodeBlock, m_codeType)), Imm32(NativeCode));
     87
     88    // Check argCount matches callee arity.
     89    Jump arityCheckOkay1 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(CodeBlock, m_numParameters)), regT1);
     90    preserveReturnAddressAfterCall(regT3);
     91    emitPutJITStubArg(regT3, 3); // return address
     92    emitPutJITStubArg(regT2, 7); // codeBlock
     93    restoreArgumentReference();
     94    Call callArityCheck1 = call();
     95    move(regT1, callFrameRegister);
     96    emitGetJITStubArg(1, regT0); // callee
     97    emitGetJITStubArg(5, regT1); // argCount
     98    restoreReturnAddressBeforeReturn(regT3); // return address
     99
     100    arityCheckOkay1.link(this);
     101    isNativeFunc1.link(this);
     102   
     103    compileOpCallInitializeCallFrame();
     104
     105    preserveReturnAddressAfterCall(regT3);
     106    emitPutJITStubArg(regT3, 3);
     107    restoreArgumentReference();
     108    Call callDontLazyLinkCall = call();
     109    restoreReturnAddressBeforeReturn(regT3);
     110    jump(regT0);
     111
     112    /* VirtualCallLink Trampoline */
     113    Label virtualCallLinkBegin = align();
     114
     115    // regT0 holds callee, regT1 holds argCount.
     116    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_body)), regT2);
     117    loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionBodyNode, m_code)), regT2);
     118    Jump hasCodeBlock2 = branchTestPtr(NonZero, regT2);
     119
     120    // Lazily generate a CodeBlock.
     121    preserveReturnAddressAfterCall(regT3); // return address
     122    restoreArgumentReference();
     123    Call callJSFunction2 = call();
     124    move(regT0, regT2);
     125    emitGetJITStubArg(1, regT0); // callee
     126    emitGetJITStubArg(5, regT1); // argCount
     127    restoreReturnAddressBeforeReturn(regT3); // return address
     128    hasCodeBlock2.link(this);
     129
     130    // regT2 holds codeBlock.
     131    Jump isNativeFunc2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(CodeBlock, m_codeType)), Imm32(NativeCode));
     132
     133    // Check argCount matches callee arity.
     134    Jump arityCheckOkay2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(CodeBlock, m_numParameters)), regT1);
     135    preserveReturnAddressAfterCall(regT3);
     136    emitPutJITStubArg(regT3, 3); // return address
     137    emitPutJITStubArg(regT2, 7); // codeBlock
     138    restoreArgumentReference();
     139    Call callArityCheck2 = call();
     140    move(regT1, callFrameRegister);
     141    emitGetJITStubArg(1, regT0); // callee
     142    emitGetJITStubArg(5, regT1); // argCount
     143    restoreReturnAddressBeforeReturn(regT3); // return address
     144
     145    arityCheckOkay2.link(this);
     146    isNativeFunc2.link(this);
     147
     148    compileOpCallInitializeCallFrame();
     149
     150    preserveReturnAddressAfterCall(regT3);
     151    emitPutJITStubArg(regT3, 3);
     152    restoreArgumentReference();
     153    Call callLazyLinkCall = call();
     154    restoreReturnAddressBeforeReturn(regT3);
     155    jump(regT0);
     156#endif // ENABLE(JIT_OPTIMIZE_CALL)
     157
     158    /* VirtualCall Trampoline */
     159    Label virtualCallBegin = align();
     160
     161    // regT0 holds callee, regT1 holds argCount.
     162    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_body)), regT2);
     163    loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionBodyNode, m_code)), regT2);
     164    Jump hasCodeBlock3 = branchTestPtr(NonZero, regT2);
     165
     166    // Lazily generate a CodeBlock.
     167    preserveReturnAddressAfterCall(regT3); // return address
     168    restoreArgumentReference();
     169    Call callJSFunction3 = call();
     170    move(regT0, regT2);
     171    emitGetJITStubArg(1, regT0); // callee
     172    emitGetJITStubArg(5, regT1); // argCount
     173    restoreReturnAddressBeforeReturn(regT3); // return address
     174    hasCodeBlock3.link(this);
     175   
     176    // regT2 holds codeBlock.
     177    Jump isNativeFunc3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(CodeBlock, m_codeType)), Imm32(NativeCode));
     178   
     179    // Check argCount matches callee.
     180    Jump arityCheckOkay3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(CodeBlock, m_numParameters)), regT1);
     181    preserveReturnAddressAfterCall(regT3);
     182    emitPutJITStubArg(regT3, 3); // return address
     183    emitPutJITStubArg(regT2, 7); // codeBlock
     184    restoreArgumentReference();
     185    Call callArityCheck3 = call();
     186    move(regT1, callFrameRegister);
     187    emitGetJITStubArg(1, regT0); // callee
     188    emitGetJITStubArg(5, regT1); // argCount
     189    restoreReturnAddressBeforeReturn(regT3); // return address
     190
     191    arityCheckOkay3.link(this);
     192    isNativeFunc3.link(this);
     193    compileOpCallInitializeCallFrame();
     194    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_body)), regT0);
     195    loadPtr(Address(regT0, OBJECT_OFFSETOF(FunctionBodyNode, m_jitCode)), regT0);
     196    jump(regT0);
     197
     198#if PLATFORM(X86)
     199    Label nativeCallThunk = align();
     200    preserveReturnAddressAfterCall(regT0);
     201    emitPutToCallFrameHeader(regT0, RegisterFile::ReturnPC); // Push return address
     202
     203    // Load caller frame's scope chain into this callframe so that whatever we call can
     204    // get to its global data.
     205    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT1);
     206    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT1);
     207    emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
     208   
     209    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
     210
     211    /* We have two structs that we use to describe the stackframe we set up for our
     212     * call to native code.  NativeCallFrameStructure describes the how we set up the stack
     213     * in advance of the call.  NativeFunctionCalleeSignature describes the callframe
     214     * as the native code expects it.  We do this as we are using the fastcall calling
     215     * convention which results in the callee popping its arguments off the stack, but
     216     * not the rest of the callframe so we need a nice way to ensure we increment the
     217     * stack pointer by the right amount after the call.
     218     */
     219
     220#if COMPILER(MSVC) || PLATFORM(LINUX)
     221#if COMPILER(MSVC)
     222#pragma pack(push)
     223#pragma pack(4)
     224#endif // COMPILER(MSVC)
     225    struct NativeCallFrameStructure {
     226      //  CallFrame* callFrame; // passed in EDX
     227        JSObject* callee;
     228        JSValue thisValue;
     229        ArgList* argPointer;
     230        ArgList args;
     231        JSValue result;
     232    };
     233    struct NativeFunctionCalleeSignature {
     234        JSObject* callee;
     235        JSValue thisValue;
     236        ArgList* argPointer;
     237    };
     238#if COMPILER(MSVC)
     239#pragma pack(pop)
     240#endif // COMPILER(MSVC)
     241#else
     242    struct NativeCallFrameStructure {
     243      //  CallFrame* callFrame; // passed in ECX
     244      //  JSObject* callee; // passed in EDX
     245        JSValue thisValue;
     246        ArgList* argPointer;
     247        ArgList args;
     248    };
     249    struct NativeFunctionCalleeSignature {
     250        JSValue thisValue;
     251        ArgList* argPointer;
     252    };
     253#endif
     254   
     255    const int NativeCallFrameSize = (sizeof(NativeCallFrameStructure) + 15) & ~15;
     256    // Allocate system stack frame
     257    subPtr(Imm32(NativeCallFrameSize), stackPointerRegister);
     258
     259    // Set up arguments
     260    subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
     261
     262    // push argcount
     263    storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_argCount)));
     264   
     265    // Calculate the start of the callframe header, and store in regT1
     266    addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int)sizeof(Register)), callFrameRegister, regT1);
     267   
     268    // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
     269    mul32(Imm32(sizeof(Register)), regT0, regT0);
     270    subPtr(regT0, regT1);
     271    storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_args)));
     272
     273    // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
     274    addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, args)), stackPointerRegister, regT0);
     275    storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, argPointer)));
     276
     277    // regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
     278    loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
     279    loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT3);
     280    storePtr(regT2, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
     281    storePtr(regT3, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
     282
     283#if COMPILER(MSVC) || PLATFORM(LINUX)
     284    // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
     285    addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, result)), stackPointerRegister, X86::ecx);
     286
     287    // Plant callee
     288    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86::eax);
     289    storePtr(X86::eax, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, callee)));
     290
     291    // Plant callframe
     292    move(callFrameRegister, X86::edx);
     293
     294    call(Address(X86::eax, OBJECT_OFFSETOF(JSFunction, m_data)));
     295
     296    // JSValue is a non-POD type, so eax points to it
     297    emitLoad(0, regT1, regT0, X86::eax);
     298#else
     299    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86::edx); // callee
     300    move(callFrameRegister, X86::ecx); // callFrame
     301    call(Address(X86::edx, OBJECT_OFFSETOF(JSFunction, m_data)));
     302#endif
     303
     304    // We've put a few temporaries on the stack in addition to the actual arguments
     305    // so pull them off now
     306    addPtr(Imm32(NativeCallFrameSize - sizeof(NativeFunctionCalleeSignature)), stackPointerRegister);
     307
     308    // Check for an exception
     309    // FIXME: Maybe we can optimize this comparison to JSValue().
     310    move(ImmPtr(&globalData->exception), regT2);
     311    Jump sawException1 = branch32(NotEqual, tagFor(0, regT2), Imm32(JSValue::CellTag));
     312    Jump sawException2 = branch32(NonZero, payloadFor(0, regT2), Imm32(0));
     313
     314    // Grab the return address.
     315    emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT3);
     316   
     317    // Restore our caller's "r".
     318    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
     319   
     320    // Return.
     321    restoreReturnAddressBeforeReturn(regT3);
     322    ret();
     323
     324    // Handle an exception
     325    sawException1.link(this);
     326    sawException2.link(this);
     327    // Grab the return address.
     328    emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
     329    move(ImmPtr(&globalData->exceptionLocation), regT2);
     330    storePtr(regT1, regT2);
     331    move(ImmPtr(reinterpret_cast<void*>(ctiVMThrowTrampoline)), regT2);
     332    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
     333    poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
     334    restoreReturnAddressBeforeReturn(regT2);
     335    ret();
     336
     337#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
     338#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
     339#else
     340    breakpoint();
     341#endif
     342   
     343#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
     344    Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
     345    Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
     346    Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
     347#endif
     348
     349    // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
     350    LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()));
     351
     352#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
     353    patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
     354    patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
     355    patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
     356#endif
     357#if ENABLE(JIT_OPTIMIZE_CALL)
     358    patchBuffer.link(callArityCheck1, FunctionPtr(cti_op_call_arityCheck));
     359    patchBuffer.link(callJSFunction1, FunctionPtr(cti_op_call_JSFunction));
     360    patchBuffer.link(callArityCheck2, FunctionPtr(cti_op_call_arityCheck));
     361    patchBuffer.link(callJSFunction2, FunctionPtr(cti_op_call_JSFunction));
     362    patchBuffer.link(callDontLazyLinkCall, FunctionPtr(cti_vm_dontLazyLinkCall));
     363    patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
     364#endif
     365    patchBuffer.link(callArityCheck3, FunctionPtr(cti_op_call_arityCheck));
     366    patchBuffer.link(callJSFunction3, FunctionPtr(cti_op_call_JSFunction));
     367
     368    CodeRef finalCode = patchBuffer.finalizeCode();
     369    *executablePool = finalCode.m_executablePool;
     370
     371    *ctiVirtualCall = trampolineAt(finalCode, virtualCallBegin);
     372    *ctiNativeCallThunk = trampolineAt(finalCode, nativeCallThunk);
     373#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
     374    *ctiStringLengthTrampoline = trampolineAt(finalCode, stringLengthBegin);
     375#else
     376    UNUSED_PARAM(ctiStringLengthTrampoline);
     377#endif
     378#if ENABLE(JIT_OPTIMIZE_CALL)
     379    *ctiVirtualCallPreLink = trampolineAt(finalCode, virtualCallPreLinkBegin);
     380    *ctiVirtualCallLink = trampolineAt(finalCode, virtualCallLinkBegin);
     381#else
     382    UNUSED_PARAM(ctiVirtualCallPreLink);
     383    UNUSED_PARAM(ctiVirtualCallLink);
     384#endif
     385}
     386
     387void JIT::emit_op_mov(Instruction* currentInstruction)
     388{
     389    unsigned dst = currentInstruction[1].u.operand;
     390    unsigned src = currentInstruction[2].u.operand;
     391
     392    if (m_codeBlock->isConstantRegisterIndex(src))
     393        emitStore(dst, getConstantOperand(src));
     394    else {
     395        emitLoad(src, regT1, regT0);
     396        emitStore(dst, regT1, regT0);
     397        map(m_bytecodeIndex + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
     398    }
     399}
     400
     401void JIT::emit_op_end(Instruction* currentInstruction)
     402{
     403    if (m_codeBlock->needsFullScopeChain())
     404        JITStubCall(this, cti_op_end).call();
     405    ASSERT(returnValueRegister != callFrameRegister);
     406    emitLoad(currentInstruction[1].u.operand, regT1, regT0);
     407    restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
     408    ret();
     409}
     410
     411void JIT::emit_op_jmp(Instruction* currentInstruction)
     412{
     413    unsigned target = currentInstruction[1].u.operand;
     414    addJump(jump(), target + 1);
     415}
     416
     417void JIT::emit_op_loop(Instruction* currentInstruction)
     418{
     419    unsigned target = currentInstruction[1].u.operand;
     420    emitTimeoutCheck();
     421    addJump(jump(), target + 1);
     422}
     423
     424void JIT::emit_op_loop_if_less(Instruction* currentInstruction)
     425{
     426    unsigned op1 = currentInstruction[1].u.operand;
     427    unsigned op2 = currentInstruction[2].u.operand;
     428    unsigned target = currentInstruction[3].u.operand;
     429
     430    emitTimeoutCheck();
     431
     432    if (isOperandConstantImmediateInt(op1)) {
     433        emitLoad(op2, regT1, regT0);
     434        addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     435        addJump(branch32(GreaterThan, regT0, Imm32(getConstantOperand(op1).asInt32())), target + 3);
     436        return;
     437    }
     438   
     439    if (isOperandConstantImmediateInt(op2)) {
     440        emitLoad(op1, regT1, regT0);
     441        addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     442        addJump(branch32(LessThan, regT0, Imm32(getConstantOperand(op2).asInt32())), target + 3);
     443        return;
     444    }
     445
     446    emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
     447    addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     448    addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
     449    addJump(branch32(LessThan, regT0, regT2), target + 3);
     450}
     451
     452void JIT::emitSlow_op_loop_if_less(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     453{
     454    unsigned op1 = currentInstruction[1].u.operand;
     455    unsigned op2 = currentInstruction[2].u.operand;
     456    unsigned target = currentInstruction[3].u.operand;
     457
     458    if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
     459        linkSlowCase(iter); // int32 check
     460    linkSlowCase(iter); // int32 check
     461
     462    JITStubCall stubCall(this, cti_op_loop_if_less);
     463    stubCall.addArgument(op1);
     464    stubCall.addArgument(op2);
     465    stubCall.call();
     466    emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
     467}
     468
     469void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
     470{
     471    unsigned op1 = currentInstruction[1].u.operand;
     472    unsigned op2 = currentInstruction[2].u.operand;
     473    unsigned target = currentInstruction[3].u.operand;
     474
     475    emitTimeoutCheck();
     476
     477    if (isOperandConstantImmediateInt(op1)) {
     478        emitLoad(op2, regT1, regT0);
     479        addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     480        addJump(branch32(GreaterThanOrEqual, regT0, Imm32(getConstantOperand(op1).asInt32())), target + 3);
     481        return;
     482    }
     483
     484    if (isOperandConstantImmediateInt(op2)) {
     485        emitLoad(op1, regT1, regT0);
     486        addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     487        addJump(branch32(LessThanOrEqual, regT0, Imm32(getConstantOperand(op2).asInt32())), target + 3);
     488        return;
     489    }
     490
     491    emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
     492    addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
     493    addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
     494    addJump(branch32(LessThanOrEqual, regT0, regT2), target + 3);
     495}
     496
     497void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     498{
     499    unsigned op1 = currentInstruction[1].u.operand;
     500    unsigned op2 = currentInstruction[2].u.operand;
     501    unsigned target = currentInstruction[3].u.operand;
     502
     503    if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
     504        linkSlowCase(iter); // int32 check
     505    linkSlowCase(iter); // int32 check
     506
     507    JITStubCall stubCall(this, cti_op_loop_if_lesseq);
     508    stubCall.addArgument(op1);
     509    stubCall.addArgument(op2);
     510    stubCall.call();
     511    emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
     512}
     513
     514void JIT::emit_op_new_object(Instruction* currentInstruction)
     515{
     516    JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
     517}
     518
     519void JIT::emit_op_instanceof(Instruction* currentInstruction)
     520{
     521    unsigned dst = currentInstruction[1].u.operand;
     522    unsigned value = currentInstruction[2].u.operand;
     523    unsigned baseVal = currentInstruction[3].u.operand;
     524    unsigned proto = currentInstruction[4].u.operand;
     525
     526    // Load the operands (baseVal, proto, and value respectively) into registers.
     527    // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
     528    emitLoadPayload(proto, regT1);
     529    emitLoadPayload(baseVal, regT0);
     530    emitLoadPayload(value, regT2);
     531
     532    // Check that baseVal & proto are cells.
     533    emitJumpSlowCaseIfNotJSCell(proto);
     534    emitJumpSlowCaseIfNotJSCell(baseVal);
     535
     536    // Check that baseVal is an object, that it 'ImplementsHasInstance' but that it does not 'OverridesHasInstance'.
     537    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
     538    addSlowCase(branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType))); // FIXME: Maybe remove this test.
     539    addSlowCase(branchTest32(Zero, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(ImplementsHasInstance))); // FIXME: TOT checks ImplementsDefaultHasInstance.
     540
     541    // If value is not an Object, return false.
     542    emitLoadTag(value, regT0);
     543    Jump valueIsImmediate = branch32(NotEqual, regT0, Imm32(JSValue::CellTag));
     544    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
     545    Jump valueIsNotObject = branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)); // FIXME: Maybe remove this test.
     546
     547    // Check proto is object.
     548    loadPtr(Address(regT1, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
     549    addSlowCase(branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
     550
     551    // Optimistically load the result true, and start looping.
     552    // Initially, regT1 still contains proto and regT2 still contains value.
     553    // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
     554    move(Imm32(JSValue::TrueTag), regT0);
     555    Label loop(this);
     556
     557    // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
     558    // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
     559    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
     560    load32(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
     561    Jump isInstance = branchPtr(Equal, regT2, regT1);
     562    branch32(NotEqual, regT2, Imm32(0), loop);
     563
     564    // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
     565    valueIsImmediate.link(this);
     566    valueIsNotObject.link(this);
     567    move(Imm32(JSValue::FalseTag), regT0);
     568
     569    // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
     570    isInstance.link(this);
     571    emitStoreBool(dst, regT0);
     572}
     573
     574void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     575{
     576    unsigned dst = currentInstruction[1].u.operand;
     577    unsigned value = currentInstruction[2].u.operand;
     578    unsigned baseVal = currentInstruction[3].u.operand;
     579    unsigned proto = currentInstruction[4].u.operand;
     580
     581    linkSlowCaseIfNotJSCell(iter, baseVal);
     582    linkSlowCaseIfNotJSCell(iter, proto);
     583    linkSlowCase(iter);
     584    linkSlowCase(iter);
     585    linkSlowCase(iter);
     586
     587    JITStubCall stubCall(this, cti_op_instanceof);
     588    stubCall.addArgument(value);
     589    stubCall.addArgument(baseVal);
     590    stubCall.addArgument(proto);
     591    stubCall.call(dst);
     592}
     593
     594void JIT::emit_op_new_func(Instruction* currentInstruction)
     595{
     596    JITStubCall stubCall(this, cti_op_new_func);
     597    stubCall.addArgument(ImmPtr(m_codeBlock->function(currentInstruction[2].u.operand)));
     598    stubCall.call(currentInstruction[1].u.operand);
     599}
     600
     601void JIT::emit_op_get_global_var(Instruction* currentInstruction)
     602{
     603    int dst = currentInstruction[1].u.operand;
     604    JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[2].u.jsCell);
     605    ASSERT(globalObject->isGlobalObject());
     606    int index = currentInstruction[3].u.operand;
     607
     608    loadPtr(&globalObject->d()->registers, regT2);
     609
     610    emitLoad(index, regT1, regT0, regT2);
     611    emitStore(dst, regT1, regT0);
     612    map(m_bytecodeIndex + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
     613}
     614
     615void JIT::emit_op_put_global_var(Instruction* currentInstruction)
     616{
     617    JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[1].u.jsCell);
     618    ASSERT(globalObject->isGlobalObject());
     619    int index = currentInstruction[2].u.operand;
     620    int value = currentInstruction[3].u.operand;
     621
     622    emitLoad(value, regT1, regT0);
     623
     624    loadPtr(&globalObject->d()->registers, regT2);
     625    emitStore(index, regT1, regT0, regT2);
     626    map(m_bytecodeIndex + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
     627}
     628
     629void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
     630{
     631    int dst = currentInstruction[1].u.operand;
     632    int index = currentInstruction[2].u.operand;
     633    int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
     634
     635    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
     636    while (skip--)
     637        loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
     638
     639    loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
     640    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
     641    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
     642
     643    emitLoad(index, regT1, regT0, regT2);
     644    emitStore(dst, regT1, regT0);
     645    map(m_bytecodeIndex + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
     646}
     647
     648void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
     649{
     650    int index = currentInstruction[1].u.operand;
     651    int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
     652    int value = currentInstruction[3].u.operand;
     653
     654    emitLoad(value, regT1, regT0);
     655
     656    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
     657    while (skip--)
     658        loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
     659
     660    loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
     661    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
     662    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
     663
     664    emitStore(index, regT1, regT0, regT2);
     665    map(m_bytecodeIndex + OPCODE_LENGTH(op_put_scoped_var), value, regT1, regT0);
     666}
     667
     668void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
     669{
     670    JITStubCall stubCall(this, cti_op_tear_off_activation);
     671    stubCall.addArgument(currentInstruction[1].u.operand);
     672    stubCall.call();
     673}
     674
     675void JIT::emit_op_tear_off_arguments(Instruction*)
     676{
     677    JITStubCall(this, cti_op_tear_off_arguments).call();
     678}
     679
     680void JIT::emit_op_new_array(Instruction* currentInstruction)
     681{
     682    JITStubCall stubCall(this, cti_op_new_array);
     683    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
     684    stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
     685    stubCall.call(currentInstruction[1].u.operand);
     686}
     687
     688void JIT::emit_op_resolve(Instruction* currentInstruction)
     689{
     690    JITStubCall stubCall(this, cti_op_resolve);
     691    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
     692    stubCall.call(currentInstruction[1].u.operand);
     693}
     694
     695void JIT::emit_op_to_primitive(Instruction* currentInstruction)
     696{
     697    int dst = currentInstruction[1].u.operand;
     698    int src = currentInstruction[2].u.operand;
     699
     700    emitLoad(src, regT1, regT0);
     701
     702    Jump isImm = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
     703    addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
     704    isImm.link(this);
     705
     706    if (dst != src)
     707        emitStore(dst, regT1, regT0);
     708    map(m_bytecodeIndex + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
     709}
     710
     711void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     712{
     713    int dst = currentInstruction[1].u.operand;
     714
     715    linkSlowCase(iter);
     716
     717    JITStubCall stubCall(this, cti_op_to_primitive);
     718    stubCall.addArgument(regT1, regT0);
     719    stubCall.call(dst);
     720}
     721
     722void JIT::emit_op_strcat(Instruction* currentInstruction)
     723{
     724    JITStubCall stubCall(this, cti_op_strcat);
     725    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
     726    stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
     727    stubCall.call(currentInstruction[1].u.operand);
     728}
     729
     730void JIT::emit_op_loop_if_true(Instruction* currentInstruction)
     731{
     732    unsigned cond = currentInstruction[1].u.operand;
     733    unsigned target = currentInstruction[2].u.operand;
     734
     735    emitTimeoutCheck();
     736
     737    emitLoad(cond, regT1, regT0);
     738
     739    Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
     740    addJump(branch32(NotEqual, regT0, Imm32(0)), target + 2);
     741    Jump isNotZero = jump();
     742
     743    isNotInteger.link(this);
     744
     745    addJump(branch32(Equal, regT1, Imm32(JSValue::TrueTag)), target + 2);
     746    addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::FalseTag)));
     747
     748    isNotZero.link(this);
     749}
     750
     751void JIT::emitSlow_op_loop_if_true(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     752{
     753    unsigned cond = currentInstruction[1].u.operand;
     754    unsigned target = currentInstruction[2].u.operand;
     755
     756    linkSlowCase(iter);
     757
     758    JITStubCall stubCall(this, cti_op_jtrue);
     759    stubCall.addArgument(cond);
     760    stubCall.call();
     761    emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 2);
     762}
     763
     764void JIT::emit_op_resolve_base(Instruction* currentInstruction)
     765{
     766    JITStubCall stubCall(this, cti_op_resolve_base);
     767    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
     768    stubCall.call(currentInstruction[1].u.operand);
     769}
     770
     771void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
     772{
     773    JITStubCall stubCall(this, cti_op_resolve_skip);
     774    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
     775    stubCall.addArgument(Imm32(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain()));
     776    stubCall.call(currentInstruction[1].u.operand);
     777}
     778
     779void JIT::emit_op_resolve_global(Instruction* currentInstruction)
     780{
     781    // FIXME: Optimize to use patching instead of so many memory accesses.
     782
     783    unsigned dst = currentInstruction[1].u.operand;
     784    void* globalObject = currentInstruction[2].u.jsCell;
     785   
     786    unsigned currentIndex = m_globalResolveInfoIndex++;
     787    void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
     788    void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
     789
     790    // Verify structure.
     791    move(ImmPtr(globalObject), regT0);
     792    loadPtr(structureAddress, regT1);
     793    addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure))));
     794
     795    // Load property.
     796    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_externalStorage)), regT2);
     797    load32(offsetAddr, regT3);
     798    load32(BaseIndex(regT2, regT3, TimesEight), regT0); // payload
     799    load32(BaseIndex(regT2, regT3, TimesEight, 4), regT1); // tag
     800    emitStore(dst, regT1, regT0);
     801    map(m_bytecodeIndex + OPCODE_LENGTH(op_resolve_global), dst, regT1, regT0);
     802}
     803
     804void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     805{
     806    unsigned dst = currentInstruction[1].u.operand;
     807    void* globalObject = currentInstruction[2].u.jsCell;
     808    Identifier* ident = &m_codeBlock->identifier(currentInstruction[3].u.operand);
     809
     810    unsigned currentIndex = m_globalResolveInfoIndex++;
     811
     812    linkSlowCase(iter);
     813    JITStubCall stubCall(this, cti_op_resolve_global);
     814    stubCall.addArgument(ImmPtr(globalObject));
     815    stubCall.addArgument(ImmPtr(ident));
     816    stubCall.addArgument(Imm32(currentIndex));
     817    stubCall.call(dst);
     818}
     819
     820void JIT::emit_op_not(Instruction* currentInstruction)
     821{
     822    unsigned dst = currentInstruction[1].u.operand;
     823    unsigned src = currentInstruction[2].u.operand;
     824
     825    emitLoadTag(src, regT0);
     826
     827    xor32(Imm32(JSValue::FalseTag), regT0);
     828    addSlowCase(branchTest32(NonZero, regT0, Imm32(~1)));
     829    xor32(Imm32(JSValue::TrueTag), regT0);
     830
     831    emitStoreBool(dst, regT0, (dst == src));
     832}
     833
     834void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     835{
     836    unsigned dst = currentInstruction[1].u.operand;
     837    unsigned src = currentInstruction[2].u.operand;
     838
     839    linkSlowCase(iter);
     840
     841    JITStubCall stubCall(this, cti_op_not);
     842    stubCall.addArgument(src);
     843    stubCall.call(dst);
     844}
     845
     846void JIT::emit_op_jfalse(Instruction* currentInstruction)
     847{
     848    unsigned cond = currentInstruction[1].u.operand;
     849    unsigned target = currentInstruction[2].u.operand;
     850
     851    emitLoad(cond, regT1, regT0);
     852
     853    Jump isTrue = branch32(Equal, regT1, Imm32(JSValue::TrueTag));
     854    addJump(branch32(Equal, regT1, Imm32(JSValue::FalseTag)), target + 2);
     855
     856    Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
     857    Jump isTrue2 = branch32(NotEqual, regT0, Imm32(0));
     858    addJump(jump(), target + 2);
     859
     860    isNotInteger.link(this);
     861
     862    addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
     863
     864    zeroDouble(fpRegT0);
     865    emitLoadDouble(cond, fpRegT1);
     866    addJump(branchDouble(DoubleEqual, fpRegT0, fpRegT1), target + 2);
     867   
     868    isTrue.link(this);
     869    isTrue2.link(this);
     870}
     871
     872void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     873{
     874    unsigned cond = currentInstruction[1].u.operand;
     875    unsigned target = currentInstruction[2].u.operand;
     876
     877    linkSlowCase(iter);
     878    JITStubCall stubCall(this, cti_op_jtrue);
     879    stubCall.addArgument(cond);
     880    stubCall.call();
     881    emitJumpSlowToHot(branchTest32(Zero, regT0), target + 2); // Inverted.
     882}
     883
     884void JIT::emit_op_jtrue(Instruction* currentInstruction)
     885{
     886    unsigned cond = currentInstruction[1].u.operand;
     887    unsigned target = currentInstruction[2].u.operand;
     888
     889    emitLoad(cond, regT1, regT0);
     890
     891    Jump isFalse = branch32(Equal, regT1, Imm32(JSValue::FalseTag));
     892    addJump(branch32(Equal, regT1, Imm32(JSValue::TrueTag)), target + 2);
     893
     894    Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
     895    Jump isFalse2 = branch32(Equal, regT0, Imm32(0));
     896    addJump(jump(), target + 2);
     897
     898    isNotInteger.link(this);
     899
     900    addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
     901
     902    zeroDouble(fpRegT0);
     903    emitLoadDouble(cond, fpRegT1);
     904    addJump(branchDouble(DoubleNotEqual, fpRegT0, fpRegT1), target + 2);
     905   
     906    isFalse.link(this);
     907    isFalse2.link(this);
     908}
     909
     910void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     911{
     912    unsigned cond = currentInstruction[1].u.operand;
     913    unsigned target = currentInstruction[2].u.operand;
     914
     915    linkSlowCase(iter);
     916    JITStubCall stubCall(this, cti_op_jtrue);
     917    stubCall.addArgument(cond);
     918    stubCall.call();
     919    emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 2);
     920}
     921
     922void JIT::emit_op_jeq_null(Instruction* currentInstruction)
     923{
     924    unsigned src = currentInstruction[1].u.operand;
     925    unsigned target = currentInstruction[2].u.operand;
     926
     927    emitLoad(src, regT1, regT0);
     928
     929    Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
     930
     931    // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
     932    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
     933    addJump(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
     934
     935    Jump wasNotImmediate = jump();
     936
     937    // Now handle the immediate cases - undefined & null
     938    isImmediate.link(this);
     939
     940    set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
     941    set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
     942    or32(regT2, regT1);
     943
     944    addJump(branchTest32(NonZero, regT1), target + 2);
     945
     946    wasNotImmediate.link(this);
     947}
     948
     949void JIT::emit_op_jneq_null(Instruction* currentInstruction)
     950{
     951    unsigned src = currentInstruction[1].u.operand;
     952    unsigned target = currentInstruction[2].u.operand;
     953
     954    emitLoad(src, regT1, regT0);
     955
     956    Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
     957
     958    // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
     959    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
     960    addJump(branchTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
     961
     962    Jump wasNotImmediate = jump();
     963
     964    // Now handle the immediate cases - undefined & null
     965    isImmediate.link(this);
     966
     967    set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
     968    set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
     969    or32(regT2, regT1);
     970
     971    addJump(branchTest32(Zero, regT1), target + 2);
     972
     973    wasNotImmediate.link(this);
     974}
     975
     976void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
     977{
     978    unsigned src = currentInstruction[1].u.operand;
     979    JSCell* ptr = currentInstruction[2].u.jsCell;
     980    unsigned target = currentInstruction[3].u.operand;
     981
     982    emitLoad(src, regT1, regT0);
     983    addJump(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)), target + 3);
     984    addJump(branchPtr(NotEqual, regT0, ImmPtr(ptr)), target + 3);
     985}
     986
     987void JIT::emit_op_jsr(Instruction* currentInstruction)
     988{
     989    int retAddrDst = currentInstruction[1].u.operand;
     990    int target = currentInstruction[2].u.operand;
     991    DataLabelPtr storeLocation = storePtrWithPatch(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
     992    addJump(jump(), target + 2);
     993    m_jsrSites.append(JSRInfo(storeLocation, label()));
     994}
     995
     996void JIT::emit_op_sret(Instruction* currentInstruction)
     997{
     998    jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
     999}
     1000
     1001void JIT::emit_op_eq(Instruction* currentInstruction)
     1002{
     1003    unsigned dst = currentInstruction[1].u.operand;
     1004    unsigned src1 = currentInstruction[2].u.operand;
     1005    unsigned src2 = currentInstruction[3].u.operand;
     1006   
     1007    emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
     1008    addSlowCase(branch32(NotEqual, regT1, regT3));
     1009    addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
     1010    addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
     1011
     1012    set8(Equal, regT0, regT2, regT0);
     1013    or32(Imm32(JSValue::FalseTag), regT0);
     1014
     1015    emitStoreBool(dst, regT0);
     1016}
     1017
     1018void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     1019{
     1020    unsigned dst = currentInstruction[1].u.operand;
     1021    unsigned op1 = currentInstruction[2].u.operand;
     1022    unsigned op2 = currentInstruction[3].u.operand;
     1023   
     1024    JumpList storeResult;
     1025    JumpList genericCase;
     1026   
     1027    genericCase.append(getSlowCase(iter)); // tags not equal
     1028
     1029    linkSlowCase(iter); // tags equal and JSCell
     1030    genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
     1031    genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
     1032
     1033    // String case.
     1034    JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
     1035    stubCallEqStrings.addArgument(regT0);
     1036    stubCallEqStrings.addArgument(regT2);
     1037    stubCallEqStrings.call();
     1038    storeResult.append(jump());
     1039
     1040    // Generic case.
     1041    genericCase.append(getSlowCase(iter)); // doubles
     1042    genericCase.link(this);
     1043    JITStubCall stubCallEq(this, cti_op_eq);
     1044    stubCallEq.addArgument(op1);
     1045    stubCallEq.addArgument(op2);
     1046    stubCallEq.call(regT0);
     1047
     1048    storeResult.link(this);
     1049    or32(Imm32(JSValue::FalseTag), regT0);
     1050    emitStoreBool(dst, regT0);
     1051}
     1052
     1053void JIT::emit_op_neq(Instruction* currentInstruction)
     1054{
     1055    unsigned dst = currentInstruction[1].u.operand;
     1056    unsigned src1 = currentInstruction[2].u.operand;
     1057    unsigned src2 = currentInstruction[3].u.operand;
     1058   
     1059    emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
     1060    addSlowCase(branch32(NotEqual, regT1, regT3));
     1061    addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
     1062    addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
     1063
     1064    set8(NotEqual, regT0, regT2, regT0);
     1065    or32(Imm32(JSValue::FalseTag), regT0);
     1066
     1067    emitStoreBool(dst, regT0);
     1068}
     1069
     1070void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     1071{
     1072    unsigned dst = currentInstruction[1].u.operand;
     1073   
     1074    JumpList storeResult;
     1075    JumpList genericCase;
     1076   
     1077    genericCase.append(getSlowCase(iter)); // tags not equal
     1078
     1079    linkSlowCase(iter); // tags equal and JSCell
     1080    genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
     1081    genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
     1082
     1083    // String case.
     1084    JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
     1085    stubCallEqStrings.addArgument(regT0);
     1086    stubCallEqStrings.addArgument(regT2);
     1087    stubCallEqStrings.call(regT0);
     1088    storeResult.append(jump());
     1089
     1090    // Generic case.
     1091    genericCase.append(getSlowCase(iter)); // doubles
     1092    genericCase.link(this);
     1093    JITStubCall stubCallEq(this, cti_op_eq);
     1094    stubCallEq.addArgument(regT1, regT0);
     1095    stubCallEq.addArgument(regT3, regT2);
     1096    stubCallEq.call(regT0);
     1097
     1098    storeResult.link(this);
     1099    xor32(Imm32(0x1), regT0);
     1100    or32(Imm32(JSValue::FalseTag), regT0);
     1101    emitStoreBool(dst, regT0);
     1102}
     1103
     1104void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
     1105{
     1106    unsigned dst = currentInstruction[1].u.operand;
     1107    unsigned src1 = currentInstruction[2].u.operand;
     1108    unsigned src2 = currentInstruction[3].u.operand;
     1109
     1110    emitLoadTag(src1, regT0);
     1111    emitLoadTag(src2, regT1);
     1112
     1113    // Jump to a slow case if either operand is double, or if both operands are
     1114    // cells and/or Int32s.
     1115    move(regT0, regT2);
     1116    and32(regT1, regT2);
     1117    addSlowCase(branch32(Below, regT2, Imm32(JSValue::LowestTag)));
     1118    addSlowCase(branch32(AboveOrEqual, regT2, Imm32(JSValue::CellTag)));
     1119
     1120    if (type == OpStrictEq)
     1121        set8(Equal, regT0, regT1, regT0);
     1122    else
     1123        set8(NotEqual, regT0, regT1, regT0);
     1124
     1125    or32(Imm32(JSValue::FalseTag), regT0);
     1126
     1127    emitStoreBool(dst, regT0);
     1128}
     1129
     1130void JIT::emit_op_stricteq(Instruction* currentInstruction)
     1131{
     1132    compileOpStrictEq(currentInstruction, OpStrictEq);
     1133}
     1134
     1135void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     1136{
     1137    unsigned dst = currentInstruction[1].u.operand;
     1138    unsigned src1 = currentInstruction[2].u.operand;
     1139    unsigned src2 = currentInstruction[3].u.operand;
     1140
     1141    linkSlowCase(iter);
     1142    linkSlowCase(iter);
     1143
     1144    JITStubCall stubCall(this, cti_op_stricteq);
     1145    stubCall.addArgument(src1);
     1146    stubCall.addArgument(src2);
     1147    stubCall.call(dst);
     1148}
     1149
     1150void JIT::emit_op_nstricteq(Instruction* currentInstruction)
     1151{
     1152    compileOpStrictEq(currentInstruction, OpNStrictEq);
     1153}
     1154
     1155void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     1156{
     1157    unsigned dst = currentInstruction[1].u.operand;
     1158    unsigned src1 = currentInstruction[2].u.operand;
     1159    unsigned src2 = currentInstruction[3].u.operand;
     1160
     1161    linkSlowCase(iter);
     1162    linkSlowCase(iter);
     1163
     1164    JITStubCall stubCall(this, cti_op_nstricteq);
     1165    stubCall.addArgument(src1);
     1166    stubCall.addArgument(src2);
     1167    stubCall.call(dst);
     1168}
     1169
     1170void JIT::emit_op_eq_null(Instruction* currentInstruction)
     1171{
     1172    unsigned dst = currentInstruction[1].u.operand;
     1173    unsigned src = currentInstruction[2].u.operand;
     1174
     1175    emitLoad(src, regT1, regT0);
     1176    Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
     1177
     1178    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
     1179    setTest8(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
     1180
     1181    Jump wasNotImmediate = jump();
     1182
     1183    isImmediate.link(this);
     1184
     1185    set8(Equal, regT1, Imm32(JSValue::NullTag), regT2);
     1186    set8(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
     1187    or32(regT2, regT1);
     1188
     1189    wasNotImmediate.link(this);
     1190
     1191    or32(Imm32(JSValue::FalseTag), regT1);
     1192
     1193    emitStoreBool(dst, regT1);
     1194}
     1195
     1196void JIT::emit_op_neq_null(Instruction* currentInstruction)
     1197{
     1198    unsigned dst = currentInstruction[1].u.operand;
     1199    unsigned src = currentInstruction[2].u.operand;
     1200
     1201    emitLoad(src, regT1, regT0);
     1202    Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
     1203
     1204    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
     1205    setTest8(Zero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
     1206
     1207    Jump wasNotImmediate = jump();
     1208
     1209    isImmediate.link(this);
     1210
     1211    set8(NotEqual, regT1, Imm32(JSValue::NullTag), regT2);
     1212    set8(NotEqual, regT1, Imm32(JSValue::UndefinedTag), regT1);
     1213    and32(regT2, regT1);
     1214
     1215    wasNotImmediate.link(this);
     1216
     1217    or32(Imm32(JSValue::FalseTag), regT1);
     1218
     1219    emitStoreBool(dst, regT1);
     1220}
     1221
     1222void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
     1223{
     1224    JITStubCall stubCall(this, cti_op_resolve_with_base);
     1225    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
     1226    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
     1227    stubCall.call(currentInstruction[2].u.operand);
     1228}
     1229
     1230void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
     1231{
     1232    JITStubCall stubCall(this, cti_op_new_func_exp);
     1233    stubCall.addArgument(ImmPtr(m_codeBlock->functionExpression(currentInstruction[2].u.operand)));
     1234    stubCall.call(currentInstruction[1].u.operand);
     1235}
     1236
     1237void JIT::emit_op_new_regexp(Instruction* currentInstruction)
     1238{
     1239    JITStubCall stubCall(this, cti_op_new_regexp);
     1240    stubCall.addArgument(ImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
     1241    stubCall.call(currentInstruction[1].u.operand);
     1242}
     1243
     1244void JIT::emit_op_throw(Instruction* currentInstruction)
     1245{
     1246    unsigned exception = currentInstruction[1].u.operand;
     1247    JITStubCall stubCall(this, cti_op_throw);
     1248    stubCall.addArgument(exception);
     1249    stubCall.call();
     1250
     1251#ifndef NDEBUG
     1252    // cti_op_throw always changes it's return address,
     1253    // this point in the code should never be reached.
     1254    breakpoint();
     1255#endif
     1256}
     1257
     1258void JIT::emit_op_next_pname(Instruction* currentInstruction)
     1259{
     1260    int dst = currentInstruction[1].u.operand;
     1261    int iter = currentInstruction[2].u.operand;
     1262    int target = currentInstruction[3].u.operand;
     1263
     1264    load32(Address(callFrameRegister, (iter * sizeof(Register))), regT0);
     1265
     1266    JITStubCall stubCall(this, cti_op_next_pname);
     1267    stubCall.addArgument(regT0);
     1268    stubCall.call();
     1269
     1270    Jump endOfIter = branchTestPtr(Zero, regT0);
     1271    emitStore(dst, regT1, regT0);
     1272    map(m_bytecodeIndex + OPCODE_LENGTH(op_next_pname), dst, regT1, regT0);
     1273    addJump(jump(), target + 3);
     1274    endOfIter.link(this);
     1275}
     1276
     1277void JIT::emit_op_push_scope(Instruction* currentInstruction)
     1278{
     1279    JITStubCall stubCall(this, cti_op_push_scope);
     1280    stubCall.addArgument(currentInstruction[1].u.operand);
     1281    stubCall.call(currentInstruction[1].u.operand);
     1282}
     1283
     1284void JIT::emit_op_pop_scope(Instruction*)
     1285{
     1286    JITStubCall(this, cti_op_pop_scope).call();
     1287}
     1288
     1289void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
     1290{
     1291    int dst = currentInstruction[1].u.operand;
     1292    int src = currentInstruction[2].u.operand;
     1293
     1294    emitLoad(src, regT1, regT0);
     1295
     1296    Jump isInt32 = branch32(Equal, regT1, Imm32(JSValue::Int32Tag));
     1297    addSlowCase(branch32(AboveOrEqual, regT1, Imm32(JSValue::DeletedValueTag)));
     1298    isInt32.link(this);
     1299
     1300    if (src != dst)
     1301        emitStore(dst, regT1, regT0);
     1302    map(m_bytecodeIndex + OPCODE_LENGTH(op_to_jsnumber), dst, regT1, regT0);
     1303}
     1304
     1305void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     1306{
     1307    int dst = currentInstruction[1].u.operand;
     1308
     1309    linkSlowCase(iter);
     1310
     1311    JITStubCall stubCall(this, cti_op_to_jsnumber);
     1312    stubCall.addArgument(regT1, regT0);
     1313    stubCall.call(dst);
     1314}
     1315
     1316void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
     1317{
     1318    JITStubCall stubCall(this, cti_op_push_new_scope);
     1319    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
     1320    stubCall.addArgument(currentInstruction[3].u.operand);
     1321    stubCall.call(currentInstruction[1].u.operand);
     1322}
     1323
     1324void JIT::emit_op_catch(Instruction* currentInstruction)
     1325{
     1326    unsigned exception = currentInstruction[1].u.operand;
     1327
     1328    // This opcode only executes after a return from cti_op_throw.
     1329
     1330    // cti_op_throw may have taken us to a call frame further up the stack; reload
     1331    // the call frame pointer to adjust.
     1332    peek(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
     1333
     1334    // Now store the exception returned by cti_op_throw.
     1335    emitStore(exception, regT1, regT0);
     1336    map(m_bytecodeIndex + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
     1337}
     1338
     1339void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
     1340{
     1341    JITStubCall stubCall(this, cti_op_jmp_scopes);
     1342    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
     1343    stubCall.call();
     1344    addJump(jump(), currentInstruction[2].u.operand + 2);
     1345}
     1346
     1347void JIT::emit_op_switch_imm(Instruction* currentInstruction)
     1348{
     1349    unsigned tableIndex = currentInstruction[1].u.operand;
     1350    unsigned defaultOffset = currentInstruction[2].u.operand;
     1351    unsigned scrutinee = currentInstruction[3].u.operand;
     1352
     1353    // create jump table for switch destinations, track this switch statement.
     1354    SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
     1355    m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
     1356    jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
     1357
     1358    JITStubCall stubCall(this, cti_op_switch_imm);
     1359    stubCall.addArgument(scrutinee);
     1360    stubCall.addArgument(Imm32(tableIndex));
     1361    stubCall.call();
     1362    jump(regT0);
     1363}
     1364
     1365void JIT::emit_op_switch_char(Instruction* currentInstruction)
     1366{
     1367    unsigned tableIndex = currentInstruction[1].u.operand;
     1368    unsigned defaultOffset = currentInstruction[2].u.operand;
     1369    unsigned scrutinee = currentInstruction[3].u.operand;
     1370
     1371    // create jump table for switch destinations, track this switch statement.
     1372    SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
     1373    m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
     1374    jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
     1375
     1376    JITStubCall stubCall(this, cti_op_switch_char);
     1377    stubCall.addArgument(scrutinee);
     1378    stubCall.addArgument(Imm32(tableIndex));
     1379    stubCall.call();
     1380    jump(regT0);
     1381}
     1382
     1383void JIT::emit_op_switch_string(Instruction* currentInstruction)
     1384{
     1385    unsigned tableIndex = currentInstruction[1].u.operand;
     1386    unsigned defaultOffset = currentInstruction[2].u.operand;
     1387    unsigned scrutinee = currentInstruction[3].u.operand;
     1388
     1389    // create jump table for switch destinations, track this switch statement.
     1390    StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
     1391    m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
     1392
     1393    JITStubCall stubCall(this, cti_op_switch_string);
     1394    stubCall.addArgument(scrutinee);
     1395    stubCall.addArgument(Imm32(tableIndex));
     1396    stubCall.call();
     1397    jump(regT0);
     1398}
     1399
     1400void JIT::emit_op_new_error(Instruction* currentInstruction)
     1401{
     1402    unsigned dst = currentInstruction[1].u.operand;
     1403    unsigned type = currentInstruction[2].u.operand;
     1404    unsigned message = currentInstruction[3].u.operand;
     1405
     1406    JITStubCall stubCall(this, cti_op_new_error);
     1407    stubCall.addArgument(Imm32(type));
     1408    stubCall.addArgument(m_codeBlock->getConstant(message));
     1409    stubCall.addArgument(Imm32(m_bytecodeIndex));
     1410    stubCall.call(dst);
     1411}
     1412
     1413void JIT::emit_op_debug(Instruction* currentInstruction)
     1414{
     1415    JITStubCall stubCall(this, cti_op_debug);
     1416    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
     1417    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
     1418    stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
     1419    stubCall.call();
     1420}
     1421
     1422
     1423void JIT::emit_op_enter(Instruction*)
     1424{
     1425    // Even though JIT code doesn't use them, we initialize our constant
     1426    // registers to zap stale pointers, to avoid unnecessarily prolonging
     1427    // object lifetime and increasing GC pressure.
     1428    for (int i = 0; i < m_codeBlock->m_numVars; ++i)
     1429        emitStore(i, jsUndefined());
     1430}
     1431
     1432void JIT::emit_op_enter_with_activation(Instruction* currentInstruction)
     1433{
     1434    emit_op_enter(currentInstruction);
     1435
     1436    JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
     1437}
     1438
     1439void JIT::emit_op_create_arguments(Instruction*)
     1440{
     1441    Jump argsNotCell = branch32(NotEqual, tagFor(RegisterFile::ArgumentsRegister, callFrameRegister), Imm32(JSValue::CellTag));
     1442    Jump argsNotNull = branchTestPtr(NonZero, payloadFor(RegisterFile::ArgumentsRegister, callFrameRegister));
     1443
     1444    // If we get here the arguments pointer is a null cell - i.e. arguments need lazy creation.
     1445    if (m_codeBlock->m_numParameters == 1)
     1446        JITStubCall(this, cti_op_create_arguments_no_params).call();
     1447    else
     1448        JITStubCall(this, cti_op_create_arguments).call();
     1449
     1450    argsNotCell.link(this);
     1451    argsNotNull.link(this);
     1452}
     1453   
     1454void JIT::emit_op_init_arguments(Instruction*)
     1455{
     1456    emitStore(RegisterFile::ArgumentsRegister, JSValue(), callFrameRegister);
     1457}
     1458
     1459void JIT::emit_op_convert_this(Instruction* currentInstruction)
     1460{
     1461    unsigned thisRegister = currentInstruction[1].u.operand;
     1462   
     1463    emitLoad(thisRegister, regT1, regT0);
     1464
     1465    addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
     1466
     1467    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
     1468    addSlowCase(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
     1469
     1470    map(m_bytecodeIndex + OPCODE_LENGTH(op_convert_this), thisRegister, regT1, regT0);
     1471}
     1472
     1473void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     1474{
     1475    unsigned thisRegister = currentInstruction[1].u.operand;
     1476
     1477    linkSlowCase(iter);
     1478    linkSlowCase(iter);
     1479
     1480    JITStubCall stubCall(this, cti_op_convert_this);
     1481    stubCall.addArgument(regT1, regT0);
     1482    stubCall.call(thisRegister);
     1483}
     1484
     1485void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
     1486{
     1487    peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
     1488    Jump noProfiler = branchTestPtr(Zero, Address(regT2));
     1489
     1490    JITStubCall stubCall(this, cti_op_profile_will_call);
     1491    stubCall.addArgument(currentInstruction[1].u.operand);
     1492    stubCall.call();
     1493    noProfiler.link(this);
     1494}
     1495
     1496void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
     1497{
     1498    peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
     1499    Jump noProfiler = branchTestPtr(Zero, Address(regT2));
     1500
     1501    JITStubCall stubCall(this, cti_op_profile_did_call);
     1502    stubCall.addArgument(currentInstruction[1].u.operand);
     1503    stubCall.call();
     1504    noProfiler.link(this);
     1505}
     1506
     1507#else // USE(JSVALUE32_64)
    371508
    381509#define RECORD_JUMP_TARGET(targetOffset) \
    391510   do { m_labels[m_bytecodeIndex + (targetOffset)].used(); } while (false)
     1511
     1512void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallPreLink, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk)
     1513{
     1514#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
     1515    // (2) The second function provides fast property access for string length
     1516    Label stringLengthBegin = align();
     1517
     1518    // Check eax is a string
     1519    Jump string_failureCases1 = emitJumpIfNotJSCell(regT0);
     1520    Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
     1521
     1522    // Checks out okay! - get the length from the Ustring.
     1523    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSString, m_value) + OBJECT_OFFSETOF(UString, m_rep)), regT0);
     1524    load32(Address(regT0, OBJECT_OFFSETOF(UString::Rep, len)), regT0);
     1525
     1526    Jump string_failureCases3 = branch32(Above, regT0, Imm32(JSImmediate::maxImmediateInt));
     1527
     1528    // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
     1529    emitFastArithIntToImmNoCheck(regT0, regT0);
     1530   
     1531    ret();
     1532#endif
     1533
     1534    // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
     1535    COMPILE_ASSERT(sizeof(CodeType) == 4, CodeTypeEnumMustBe32Bit);
     1536
     1537    Label virtualCallPreLinkBegin = align();
     1538
     1539    // Load the callee CodeBlock* into eax
     1540    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_body)), regT3);
     1541    loadPtr(Address(regT3, OBJECT_OFFSETOF(FunctionBodyNode, m_code)), regT0);
     1542    Jump hasCodeBlock1 = branchTestPtr(NonZero, regT0);
     1543    preserveReturnAddressAfterCall(regT3);
     1544    restoreArgumentReference();
     1545    Call callJSFunction1 = call();
     1546    emitGetJITStubArg(1, regT2);
     1547    emitGetJITStubArg(3, regT1);
     1548    restoreReturnAddressBeforeReturn(regT3);
     1549    hasCodeBlock1.link(this);
     1550
     1551    Jump isNativeFunc1 = branch32(Equal, Address(regT0, OBJECT_OFFSETOF(CodeBlock, m_codeType)), Imm32(NativeCode));
     1552
     1553    // Check argCount matches callee arity.
     1554    Jump arityCheckOkay1 = branch32(Equal, Address(regT0, OBJECT_OFFSETOF(CodeBlock, m_numParameters)), regT1);
     1555    preserveReturnAddressAfterCall(regT3);
     1556    emitPutJITStubArg(regT3, 2);
     1557    emitPutJITStubArg(regT0, 4);
     1558    restoreArgumentReference();
     1559    Call callArityCheck1 = call();
     1560    move(regT1, callFrameRegister);
     1561    emitGetJITStubArg(1, regT2);
     1562    emitGetJITStubArg(3, regT1);
     1563    restoreReturnAddressBeforeReturn(regT3);
     1564    arityCheckOkay1.link(this);
     1565    isNativeFunc1.link(this);
     1566   
     1567    compileOpCallInitializeCallFrame();
     1568
     1569    preserveReturnAddressAfterCall(regT3);
     1570    emitPutJITStubArg(regT3, 2);
     1571    restoreArgumentReference();
     1572    Call callDontLazyLinkCall = call();
     1573    emitGetJITStubArg(1, regT2);
     1574    restoreReturnAddressBeforeReturn(regT3);
     1575
     1576    jump(regT0);
     1577
     1578    Label virtualCallLinkBegin = align();
     1579
     1580    // Load the callee CodeBlock* into eax
     1581    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_body)), regT3);
     1582    loadPtr(Address(regT3, OBJECT_OFFSETOF(FunctionBodyNode, m_code)), regT0);
     1583    Jump hasCodeBlock2 = branchTestPtr(NonZero, regT0);
     1584    preserveReturnAddressAfterCall(regT3);
     1585    restoreArgumentReference();
     1586    Call callJSFunction2 = call();
     1587    emitGetJITStubArg(1, regT2);
     1588    emitGetJITStubArg(3, regT1);
     1589    restoreReturnAddressBeforeReturn(regT3);
     1590    hasCodeBlock2.link(this);
     1591
     1592    Jump isNativeFunc2 = branch32(Equal, Address(regT0, OBJECT_OFFSETOF(CodeBlock, m_codeType)), Imm32(NativeCode));
     1593
     1594    // Check argCount matches callee arity.
     1595    Jump arityCheckOkay2 = branch32(Equal, Address(regT0, OBJECT_OFFSETOF(CodeBlock, m_numParameters)), regT1);
     1596    preserveReturnAddressAfterCall(regT3);
     1597    emitPutJITStubArg(regT3, 2);
     1598    emitPutJITStubArg(regT0, 4);
     1599    restoreArgumentReference();
     1600    Call callArityCheck2 = call();
     1601    move(regT1, callFrameRegister);
     1602    emitGetJITStubArg(1, regT2);
     1603    emitGetJITStubArg(3, regT1);
     1604    restoreReturnAddressBeforeReturn(regT3);
     1605    arityCheckOkay2.link(this);
     1606    isNativeFunc2.link(this);
     1607
     1608    compileOpCallInitializeCallFrame();
     1609
     1610    preserveReturnAddressAfterCall(regT3);
     1611    emitPutJITStubArg(regT3, 2);
     1612    restoreArgumentReference();
     1613    Call callLazyLinkCall = call();
     1614    restoreReturnAddressBeforeReturn(regT3);
     1615
     1616    jump(regT0);
     1617
     1618    Label virtualCallBegin = align();
     1619
     1620    // Load the callee CodeBlock* into eax
     1621    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_body)), regT3);
     1622    loadPtr(Address(regT3, OBJECT_OFFSETOF(FunctionBodyNode, m_code)), regT0);
     1623    Jump hasCodeBlock3 = branchTestPtr(NonZero, regT0);
     1624    preserveReturnAddressAfterCall(regT3);
     1625    restoreArgumentReference();
     1626    Call callJSFunction3 = call();
     1627    emitGetJITStubArg(1, regT2);
     1628    emitGetJITStubArg(3, regT1);
     1629    restoreReturnAddressBeforeReturn(regT3);
     1630    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_body)), regT3); // reload the function body nody, so we can reload the code pointer.
     1631    hasCodeBlock3.link(this);
     1632   
     1633    Jump isNativeFunc3 = branch32(Equal, Address(regT0, OBJECT_OFFSETOF(CodeBlock, m_codeType)), Imm32(NativeCode));
     1634
     1635    // Check argCount matches callee arity.
     1636    Jump arityCheckOkay3 = branch32(Equal, Address(regT0, OBJECT_OFFSETOF(CodeBlock, m_numParameters)), regT1);
     1637    preserveReturnAddressAfterCall(regT3);
     1638    emitPutJITStubArg(regT3, 2);
     1639    emitPutJITStubArg(regT0, 4);
     1640    restoreArgumentReference();
     1641    Call callArityCheck3 = call();
     1642    move(regT1, callFrameRegister);
     1643    emitGetJITStubArg(1, regT2);
     1644    emitGetJITStubArg(3, regT1);
     1645    restoreReturnAddressBeforeReturn(regT3);
     1646    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_body)), regT3); // reload the function body nody, so we can reload the code pointer.
     1647    arityCheckOkay3.link(this);
     1648    isNativeFunc3.link(this);
     1649
     1650    // load ctiCode from the new codeBlock.
     1651    loadPtr(Address(regT3, OBJECT_OFFSETOF(FunctionBodyNode, m_jitCode)), regT0);
     1652   
     1653    compileOpCallInitializeCallFrame();
     1654    jump(regT0);
     1655
     1656   
     1657    Label nativeCallThunk = align();
     1658    preserveReturnAddressAfterCall(regT0);
     1659    emitPutToCallFrameHeader(regT0, RegisterFile::ReturnPC); // Push return address
     1660
     1661    // Load caller frame's scope chain into this callframe so that whatever we call can
     1662    // get to its global data.
     1663    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT1);
     1664    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT1);
     1665    emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
     1666   
     1667
     1668#if PLATFORM(X86_64)
     1669    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, X86::ecx);
     1670
     1671    // Allocate stack space for our arglist
     1672    subPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
     1673    COMPILE_ASSERT((sizeof(ArgList) & 0xf) == 0, ArgList_should_by_16byte_aligned);
     1674   
     1675    // Set up arguments
     1676    subPtr(Imm32(1), X86::ecx); // Don't include 'this' in argcount
     1677
     1678    // Push argcount
     1679    storePtr(X86::ecx, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
     1680
     1681    // Calculate the start of the callframe header, and store in edx
     1682    addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), callFrameRegister, X86::edx);
     1683   
     1684    // Calculate start of arguments as callframe header - sizeof(Register) * argcount (ecx)
     1685    mul32(Imm32(sizeof(Register)), X86::ecx, X86::ecx);
     1686    subPtr(X86::ecx, X86::edx);
     1687
     1688    // push pointer to arguments
     1689    storePtr(X86::edx, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
     1690   
     1691    // ArgList is passed by reference so is stackPointerRegister
     1692    move(stackPointerRegister, X86::ecx);
     1693   
     1694    // edx currently points to the first argument, edx-sizeof(Register) points to 'this'
     1695    loadPtr(Address(X86::edx, -(int32_t)sizeof(Register)), X86::edx);
     1696   
     1697    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86::esi);
     1698
     1699    move(callFrameRegister, X86::edi);
     1700
     1701    call(Address(X86::esi, OBJECT_OFFSETOF(JSFunction, m_data)));
     1702   
     1703    addPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
     1704#elif PLATFORM(X86)
     1705    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
     1706
     1707    /* We have two structs that we use to describe the stackframe we set up for our
     1708     * call to native code.  NativeCallFrameStructure describes the how we set up the stack
     1709     * in advance of the call.  NativeFunctionCalleeSignature describes the callframe
     1710     * as the native code expects it.  We do this as we are using the fastcall calling
     1711     * convention which results in the callee popping its arguments off the stack, but
     1712     * not the rest of the callframe so we need a nice way to ensure we increment the
     1713     * stack pointer by the right amount after the call.
     1714     */
     1715#if COMPILER(MSVC) || PLATFORM(LINUX)
     1716    struct NativeCallFrameStructure {
     1717      //  CallFrame* callFrame; // passed in EDX
     1718        JSObject* callee;
     1719        JSValue thisValue;
     1720        ArgList* argPointer;
     1721        ArgList args;
     1722        JSValue result;
     1723    };
     1724    struct NativeFunctionCalleeSignature {
     1725        JSObject* callee;
     1726        JSValue thisValue;
     1727        ArgList* argPointer;
     1728    };
     1729#else
     1730    struct NativeCallFrameStructure {
     1731      //  CallFrame* callFrame; // passed in ECX
     1732      //  JSObject* callee; // passed in EDX
     1733        JSValue thisValue;
     1734        ArgList* argPointer;
     1735        ArgList args;
     1736    };
     1737    struct NativeFunctionCalleeSignature {
     1738        JSValue thisValue;
     1739        ArgList* argPointer;
     1740    };
     1741#endif
     1742    const int NativeCallFrameSize = (sizeof(NativeCallFrameStructure) + 15) & ~15;
     1743    // Allocate system stack frame
     1744    subPtr(Imm32(NativeCallFrameSize), stackPointerRegister);
     1745
     1746    // Set up arguments
     1747    subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
     1748
     1749    // push argcount
     1750    storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_argCount)));
     1751   
     1752    // Calculate the start of the callframe header, and store in regT1
     1753    addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int)sizeof(Register)), callFrameRegister, regT1);
     1754   
     1755    // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
     1756    mul32(Imm32(sizeof(Register)), regT0, regT0);
     1757    subPtr(regT0, regT1);
     1758    storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_args)));
     1759
     1760    // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
     1761    addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, args)), stackPointerRegister, regT0);
     1762    storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, argPointer)));
     1763
     1764    // regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
     1765    loadPtr(Address(regT1, -(int)sizeof(Register)), regT1);
     1766    storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue)));
     1767
     1768#if COMPILER(MSVC) || PLATFORM(LINUX)
     1769    // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
     1770    addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, result)), stackPointerRegister, X86::ecx);
     1771
     1772    // Plant callee
     1773    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86::eax);
     1774    storePtr(X86::eax, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, callee)));
     1775
     1776    // Plant callframe
     1777    move(callFrameRegister, X86::edx);
     1778
     1779    call(Address(X86::eax, OBJECT_OFFSETOF(JSFunction, m_data)));
     1780
     1781    // JSValue is a non-POD type
     1782    loadPtr(Address(X86::eax), X86::eax);
     1783#else
     1784    // Plant callee
     1785    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86::edx);
     1786
     1787    // Plant callframe
     1788    move(callFrameRegister, X86::ecx);
     1789    call(Address(X86::edx, OBJECT_OFFSETOF(JSFunction, m_data)));
     1790#endif
     1791
     1792    // We've put a few temporaries on the stack in addition to the actual arguments
     1793    // so pull them off now
     1794    addPtr(Imm32(NativeCallFrameSize - sizeof(NativeFunctionCalleeSignature)), stackPointerRegister);
     1795
     1796#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
     1797#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
     1798#else
     1799    breakpoint();
     1800#endif
     1801
     1802    // Check for an exception
     1803    loadPtr(&(globalData->exception), regT2);
     1804    Jump exceptionHandler = branchTestPtr(NonZero, regT2);
     1805
     1806    // Grab the return address.
     1807    emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
     1808   
     1809    // Restore our caller's "r".
     1810    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
     1811   
     1812    // Return.
     1813    restoreReturnAddressBeforeReturn(regT1);
     1814    ret();
     1815
     1816    // Handle an exception
     1817    exceptionHandler.link(this);
     1818    // Grab the return address.
     1819    emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
     1820    move(ImmPtr(&globalData->exceptionLocation), regT2);
     1821    storePtr(regT1, regT2);
     1822    move(ImmPtr(reinterpret_cast<void*>(ctiVMThrowTrampoline)), regT2);
     1823    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
     1824    poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
     1825    restoreReturnAddressBeforeReturn(regT2);
     1826    ret();
     1827   
     1828
     1829#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
     1830    Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
     1831    Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
     1832    Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
     1833#endif
     1834
     1835    // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
     1836    LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()));
     1837
     1838#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
     1839    patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
     1840    patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
     1841    patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
     1842#endif
     1843    patchBuffer.link(callArityCheck1, FunctionPtr(cti_op_call_arityCheck));
     1844    patchBuffer.link(callArityCheck2, FunctionPtr(cti_op_call_arityCheck));
     1845    patchBuffer.link(callArityCheck3, FunctionPtr(cti_op_call_arityCheck));
     1846    patchBuffer.link(callJSFunction1, FunctionPtr(cti_op_call_JSFunction));
     1847    patchBuffer.link(callJSFunction2, FunctionPtr(cti_op_call_JSFunction));
     1848    patchBuffer.link(callJSFunction3, FunctionPtr(cti_op_call_JSFunction));
     1849    patchBuffer.link(callDontLazyLinkCall, FunctionPtr(cti_vm_dontLazyLinkCall));
     1850    patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
     1851
     1852    CodeRef finalCode = patchBuffer.finalizeCode();
     1853    *executablePool = finalCode.m_executablePool;
     1854
     1855    *ctiVirtualCallPreLink = trampolineAt(finalCode, virtualCallPreLinkBegin);
     1856    *ctiVirtualCallLink = trampolineAt(finalCode, virtualCallLinkBegin);
     1857    *ctiVirtualCall = trampolineAt(finalCode, virtualCallBegin);
     1858    *ctiNativeCallThunk = trampolineAt(finalCode, nativeCallThunk);
     1859#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
     1860    *ctiStringLengthTrampoline = trampolineAt(finalCode, stringLengthBegin);
     1861#else
     1862    UNUSED_PARAM(ctiStringLengthTrampoline);
     1863#endif
     1864}
    401865
    411866void JIT::emit_op_mov(Instruction* currentInstruction)
     
    631888{
    641889    if (m_codeBlock->needsFullScopeChain())
    65         JITStubCall(this, JITStubs::cti_op_end).call();
     1890        JITStubCall(this, cti_op_end).call();
    661891    ASSERT(returnValueRegister != callFrameRegister);
    671892    emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
     
    951920        emitGetVirtualRegister(op1, regT0);
    961921        emitJumpSlowCaseIfNotImmediateInteger(regT0);
    97 #if USE(ALTERNATE_JSIMMEDIATE)
     1922#if USE(JSVALUE64)
    981923        int32_t op2imm = getConstantOperandImmediateInt(op2);
    991924#else
     
    1041929        emitGetVirtualRegister(op2, regT0);
    1051930        emitJumpSlowCaseIfNotImmediateInteger(regT0);
    106 #if USE(ALTERNATE_JSIMMEDIATE)
     1931#if USE(JSVALUE64)
    1071932        int32_t op1imm = getConstantOperandImmediateInt(op1);
    1081933#else
     
    1281953        emitGetVirtualRegister(op1, regT0);
    1291954        emitJumpSlowCaseIfNotImmediateInteger(regT0);
    130 #if USE(ALTERNATE_JSIMMEDIATE)
     1955#if USE(JSVALUE64)
    1311956        int32_t op2imm = getConstantOperandImmediateInt(op2);
    1321957#else
     
    1441969void JIT::emit_op_new_object(Instruction* currentInstruction)
    1451970{
    146     JITStubCall(this, JITStubs::cti_op_new_object).call(currentInstruction[1].u.operand);
     1971    JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
    1471972}
    1481973
     
    1982023void JIT::emit_op_new_func(Instruction* currentInstruction)
    1992024{
    200     JITStubCall stubCall(this, JITStubs::cti_op_new_func);
     2025    JITStubCall stubCall(this, cti_op_new_func);
    2012026    stubCall.addArgument(ImmPtr(m_codeBlock->function(currentInstruction[2].u.operand)));
    2022027    stubCall.call(currentInstruction[1].u.operand);
     
    2152040void JIT::emit_op_load_varargs(Instruction* currentInstruction)
    2162041{
    217     JITStubCall stubCall(this, JITStubs::cti_op_load_varargs);
    218     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
    219     stubCall.call(currentInstruction[1].u.operand);
     2042    int argCountDst = currentInstruction[1].u.operand;
     2043    int argsOffset = currentInstruction[2].u.operand;
     2044
     2045    JITStubCall stubCall(this, cti_op_load_varargs);
     2046    stubCall.addArgument(Imm32(argsOffset));
     2047    stubCall.call();
     2048    // Stores a naked int32 in the register file.
     2049    store32(returnValueRegister, Address(callFrameRegister, argCountDst * sizeof(Register)));
    2202050}
    2212051
     
    2742104void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
    2752105{
    276     JITStubCall stubCall(this, JITStubs::cti_op_tear_off_activation);
     2106    JITStubCall stubCall(this, cti_op_tear_off_activation);
    2772107    stubCall.addArgument(currentInstruction[1].u.operand, regT2);
    2782108    stubCall.call();
     
    2812111void JIT::emit_op_tear_off_arguments(Instruction*)
    2822112{
    283     JITStubCall(this, JITStubs::cti_op_tear_off_arguments).call();
     2113    JITStubCall(this, cti_op_tear_off_arguments).call();
    2842114}
    2852115
     
    2882118    // We could JIT generate the deref, only calling out to C when the refcount hits zero.
    2892119    if (m_codeBlock->needsFullScopeChain())
    290         JITStubCall(this, JITStubs::cti_op_ret_scopeChain).call();
     2120        JITStubCall(this, cti_op_ret_scopeChain).call();
    2912121
    2922122    ASSERT(callFrameRegister != regT1);
     
    3102140void JIT::emit_op_new_array(Instruction* currentInstruction)
    3112141{
    312     JITStubCall stubCall(this, JITStubs::cti_op_new_array);
     2142    JITStubCall stubCall(this, cti_op_new_array);
    3132143    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
    3142144    stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
     
    3182148void JIT::emit_op_resolve(Instruction* currentInstruction)
    3192149{
    320     JITStubCall stubCall(this, JITStubs::cti_op_resolve);
     2150    JITStubCall stubCall(this, cti_op_resolve);
    3212151    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
    3222152    stubCall.call(currentInstruction[1].u.operand);
     
    3512181void JIT::emit_op_strcat(Instruction* currentInstruction)
    3522182{
    353     JITStubCall stubCall(this, JITStubs::cti_op_strcat);
     2183    JITStubCall stubCall(this, cti_op_strcat);
    3542184    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
    3552185    stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
    3562186    stubCall.call(currentInstruction[1].u.operand);
    357 }
    358 
    359 void JIT::emit_op_resolve_func(Instruction* currentInstruction)
    360 {
    361     JITStubCall stubCall(this, JITStubs::cti_op_resolve_func);
    362     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
    363     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
    364     stubCall.call(currentInstruction[2].u.operand);
    3652187}
    3662188
     
    3822204void JIT::emit_op_resolve_base(Instruction* currentInstruction)
    3832205{
    384     JITStubCall stubCall(this, JITStubs::cti_op_resolve_base);
     2206    JITStubCall stubCall(this, cti_op_resolve_base);
    3852207    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
    3862208    stubCall.call(currentInstruction[1].u.operand);
     
    3892211void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
    3902212{
    391     JITStubCall stubCall(this, JITStubs::cti_op_resolve_skip);
     2213    JITStubCall stubCall(this, cti_op_resolve_skip);
    3922214    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
    3932215    stubCall.addArgument(Imm32(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain()));
     
    4202242    // Slow case
    4212243    noMatch.link(this);
    422     JITStubCall stubCall(this, JITStubs::cti_op_resolve_global);
     2244    JITStubCall stubCall(this, cti_op_resolve_global);
    4232245    stubCall.addArgument(ImmPtr(globalObject));
    4242246    stubCall.addArgument(ImmPtr(ident));
     
    5362358    emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
    5372359    emitJumpSlowCaseIfNotImmediateInteger(regT0);
    538 #if USE(ALTERNATE_JSIMMEDIATE)
     2360#if USE(JSVALUE64)
    5392361    not32(regT0);
    5402362    emitFastArithIntToImmNoCheck(regT0, regT0);
     
    5472369void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
    5482370{
    549     JITStubCall stubCall(this, JITStubs::cti_op_resolve_with_base);
     2371    JITStubCall stubCall(this, cti_op_resolve_with_base);
    5502372    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
    5512373    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
     
    5552377void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
    5562378{
    557     JITStubCall stubCall(this, JITStubs::cti_op_new_func_exp);
     2379    JITStubCall stubCall(this, cti_op_new_func_exp);
    5582380    stubCall.addArgument(ImmPtr(m_codeBlock->functionExpression(currentInstruction[2].u.operand)));
    5592381    stubCall.call(currentInstruction[1].u.operand);
     
    5972419void JIT::emit_op_new_regexp(Instruction* currentInstruction)
    5982420{
    599     JITStubCall stubCall(this, JITStubs::cti_op_new_regexp);
     2421    JITStubCall stubCall(this, cti_op_new_regexp);
    6002422    stubCall.addArgument(ImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
    6012423    stubCall.call(currentInstruction[1].u.operand);
     
    6122434void JIT::emit_op_throw(Instruction* currentInstruction)
    6132435{
    614     JITStubCall stubCall(this, JITStubs::cti_op_throw);
     2436    JITStubCall stubCall(this, cti_op_throw);
    6152437    stubCall.addArgument(currentInstruction[1].u.operand, regT2);
    6162438    stubCall.call();
     
    6252447void JIT::emit_op_next_pname(Instruction* currentInstruction)
    6262448{
    627     JITStubCall stubCall(this, JITStubs::cti_op_next_pname);
     2449    JITStubCall stubCall(this, cti_op_next_pname);
    6282450    stubCall.addArgument(currentInstruction[2].u.operand, regT2);
    6292451    stubCall.call();
     
    6362458void JIT::emit_op_push_scope(Instruction* currentInstruction)
    6372459{
    638     JITStubCall stubCall(this, JITStubs::cti_op_push_scope);
     2460    JITStubCall stubCall(this, cti_op_push_scope);
    6392461    stubCall.addArgument(currentInstruction[1].u.operand, regT2);
    6402462    stubCall.call(currentInstruction[1].u.operand);
     
    6432465void JIT::emit_op_pop_scope(Instruction*)
    6442466{
    645     JITStubCall(this, JITStubs::cti_op_pop_scope).call();
     2467    JITStubCall(this, cti_op_pop_scope).call();
     2468}
     2469
     2470void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
     2471{
     2472    unsigned dst = currentInstruction[1].u.operand;
     2473    unsigned src1 = currentInstruction[2].u.operand;
     2474    unsigned src2 = currentInstruction[3].u.operand;
     2475
     2476    emitGetVirtualRegisters(src1, regT0, src2, regT1);
     2477
     2478    // Jump to a slow case if either operand is a number, or if both are JSCell*s.
     2479    move(regT0, regT2);
     2480    orPtr(regT1, regT2);
     2481    addSlowCase(emitJumpIfJSCell(regT2));
     2482    addSlowCase(emitJumpIfImmediateNumber(regT2));
     2483
     2484    if (type == OpStrictEq)
     2485        set32(Equal, regT1, regT0, regT0);
     2486    else
     2487        set32(NotEqual, regT1, regT0, regT0);
     2488    emitTagAsBoolImmediate(regT0);
     2489
     2490    emitPutVirtualRegister(dst);
    6462491}
    6472492
     
    6742519void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
    6752520{
    676     JITStubCall stubCall(this, JITStubs::cti_op_push_new_scope);
     2521    JITStubCall stubCall(this, cti_op_push_new_scope);
    6772522    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
    6782523    stubCall.addArgument(currentInstruction[3].u.operand, regT2);
     
    6892534void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
    6902535{
    691     JITStubCall stubCall(this, JITStubs::cti_op_jmp_scopes);
     2536    JITStubCall stubCall(this, cti_op_jmp_scopes);
    6922537    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
    6932538    stubCall.call();
     
    7072552    jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
    7082553
    709     JITStubCall stubCall(this, JITStubs::cti_op_switch_imm);
     2554    JITStubCall stubCall(this, cti_op_switch_imm);
    7102555    stubCall.addArgument(scrutinee, regT2);
    7112556    stubCall.addArgument(Imm32(tableIndex));
     
    7252570    jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
    7262571
    727     JITStubCall stubCall(this, JITStubs::cti_op_switch_char);
     2572    JITStubCall stubCall(this, cti_op_switch_char);
    7282573    stubCall.addArgument(scrutinee, regT2);
    7292574    stubCall.addArgument(Imm32(tableIndex));
     
    7422587    m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
    7432588
    744     JITStubCall stubCall(this, JITStubs::cti_op_switch_string);
     2589    JITStubCall stubCall(this, cti_op_switch_string);
    7452590    stubCall.addArgument(scrutinee, regT2);
    7462591    stubCall.addArgument(Imm32(tableIndex));
     
    7512596void JIT::emit_op_new_error(Instruction* currentInstruction)
    7522597{
    753     JITStubCall stubCall(this, JITStubs::cti_op_new_error);
     2598    JITStubCall stubCall(this, cti_op_new_error);
    7542599    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
    7552600    stubCall.addArgument(ImmPtr(JSValue::encode(m_codeBlock->getConstant(currentInstruction[3].u.operand))));
     
    7602605void JIT::emit_op_debug(Instruction* currentInstruction)
    7612606{
    762     JITStubCall stubCall(this, JITStubs::cti_op_debug);
     2607    JITStubCall stubCall(this, cti_op_debug);
    7632608    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
    7642609    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
     
    8372682        emitInitRegister(j);
    8382683
    839     JITStubCall(this, JITStubs::cti_op_push_activation).call(currentInstruction[1].u.operand);
     2684    JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
    8402685}
    8412686
     
    8442689    Jump argsCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * RegisterFile::ArgumentsRegister));
    8452690    if (m_codeBlock->m_numParameters == 1)
    846         JITStubCall(this, JITStubs::cti_op_create_arguments_no_params).call();
     2691        JITStubCall(this, cti_op_create_arguments_no_params).call();
    8472692    else
    848         JITStubCall(this, JITStubs::cti_op_create_arguments).call();
     2693        JITStubCall(this, cti_op_create_arguments).call();
    8492694    argsCreated.link(this);
    8502695}
     
    8702715    Jump noProfiler = branchTestPtr(Zero, Address(regT1));
    8712716
    872     JITStubCall stubCall(this, JITStubs::cti_op_profile_will_call);
     2717    JITStubCall stubCall(this, cti_op_profile_will_call);
    8732718    stubCall.addArgument(currentInstruction[1].u.operand, regT1);
    8742719    stubCall.call();
     
    8822727    Jump noProfiler = branchTestPtr(Zero, Address(regT1));
    8832728
    884     JITStubCall stubCall(this, JITStubs::cti_op_profile_did_call);
     2729    JITStubCall stubCall(this, cti_op_profile_did_call);
    8852730    stubCall.addArgument(currentInstruction[1].u.operand, regT1);
    8862731    stubCall.call();
     
    8952740    linkSlowCase(iter);
    8962741    linkSlowCase(iter);
    897     JITStubCall stubCall(this, JITStubs::cti_op_convert_this);
     2742    JITStubCall stubCall(this, cti_op_convert_this);
    8982743    stubCall.addArgument(regT0);
    8992744    stubCall.call(currentInstruction[1].u.operand);
     
    9122757    linkSlowCase(iter);
    9132758
    914     JITStubCall stubCall(this, JITStubs::cti_op_to_primitive);
     2759    JITStubCall stubCall(this, cti_op_to_primitive);
    9152760    stubCall.addArgument(regT0);
    9162761    stubCall.call(currentInstruction[1].u.operand);
     
    9282773
    9292774    notImm.link(this);
    930     JITStubCall stubCall(this, JITStubs::cti_op_get_by_val);
     2775    JITStubCall stubCall(this, cti_op_get_by_val);
    9312776    stubCall.addArgument(regT0);
    9322777    stubCall.addArgument(regT1);
     
    9542799    if (isOperandConstantImmediateInt(op2)) {
    9552800        linkSlowCase(iter);
    956         JITStubCall stubCall(this, JITStubs::cti_op_loop_if_less);
     2801        JITStubCall stubCall(this, cti_op_loop_if_less);
    9572802        stubCall.addArgument(regT0);
    9582803        stubCall.addArgument(op2, regT2);
     
    9612806    } else if (isOperandConstantImmediateInt(op1)) {
    9622807        linkSlowCase(iter);
    963         JITStubCall stubCall(this, JITStubs::cti_op_loop_if_less);
     2808        JITStubCall stubCall(this, cti_op_loop_if_less);
    9642809        stubCall.addArgument(op1, regT2);
    9652810        stubCall.addArgument(regT0);
     
    9692814        linkSlowCase(iter);
    9702815        linkSlowCase(iter);
    971         JITStubCall stubCall(this, JITStubs::cti_op_loop_if_less);
     2816        JITStubCall stubCall(this, cti_op_loop_if_less);
    9722817        stubCall.addArgument(regT0);
    9732818        stubCall.addArgument(regT1);
     
    9832828    if (isOperandConstantImmediateInt(op2)) {
    9842829        linkSlowCase(iter);
    985         JITStubCall stubCall(this, JITStubs::cti_op_loop_if_lesseq);
     2830        JITStubCall stubCall(this, cti_op_loop_if_lesseq);
    9862831        stubCall.addArgument(regT0);
    9872832        stubCall.addArgument(currentInstruction[2].u.operand, regT2);
     
    9912836        linkSlowCase(iter);
    9922837        linkSlowCase(iter);
    993         JITStubCall stubCall(this, JITStubs::cti_op_loop_if_lesseq);
     2838        JITStubCall stubCall(this, cti_op_loop_if_lesseq);
    9942839        stubCall.addArgument(regT0);
    9952840        stubCall.addArgument(regT1);
     
    10082853
    10092854    notImm.link(this); {
    1010         JITStubCall stubCall(this, JITStubs::cti_op_put_by_val);
     2855        JITStubCall stubCall(this, cti_op_put_by_val);
    10112856        stubCall.addArgument(regT0);
    10122857        stubCall.addArgument(regT1);
     
    10192864    linkSlowCase(iter);
    10202865    linkSlowCase(iter); {
    1021         JITStubCall stubCall(this, JITStubs::cti_op_put_by_val_array);
     2866        JITStubCall stubCall(this, cti_op_put_by_val_array);
    10222867        stubCall.addArgument(regT0);
    10232868        stubCall.addArgument(regT1);
     
    10302875{
    10312876    linkSlowCase(iter);
    1032     JITStubCall stubCall(this, JITStubs::cti_op_jtrue);
     2877    JITStubCall stubCall(this, cti_op_jtrue);
    10332878    stubCall.addArgument(regT0);
    10342879    stubCall.call();
     
    10402885    linkSlowCase(iter);
    10412886    xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), regT0);
    1042     JITStubCall stubCall(this, JITStubs::cti_op_not);
     2887    JITStubCall stubCall(this, cti_op_not);
    10432888    stubCall.addArgument(regT0);
    10442889    stubCall.call(currentInstruction[1].u.operand);
     
    10482893{
    10492894    linkSlowCase(iter);
    1050     JITStubCall stubCall(this, JITStubs::cti_op_jtrue);
     2895    JITStubCall stubCall(this, cti_op_jtrue);
    10512896    stubCall.addArgument(regT0);
    10522897    stubCall.call();
     
    10572902{
    10582903    linkSlowCase(iter);
    1059     JITStubCall stubCall(this, JITStubs::cti_op_bitnot);
     2904    JITStubCall stubCall(this, cti_op_bitnot);
    10602905    stubCall.addArgument(regT0);
    10612906    stubCall.call(currentInstruction[1].u.operand);
     
    10652910{
    10662911    linkSlowCase(iter);
    1067     JITStubCall stubCall(this, JITStubs::cti_op_jtrue);
     2912    JITStubCall stubCall(this, cti_op_jtrue);
    10682913    stubCall.addArgument(regT0);
    10692914    stubCall.call();
     
    10742919{
    10752920    linkSlowCase(iter);
    1076     JITStubCall stubCall(this, JITStubs::cti_op_bitxor);
     2921    JITStubCall stubCall(this, cti_op_bitxor);
    10772922    stubCall.addArgument(regT0);
    10782923    stubCall.addArgument(regT1);
     
    10832928{
    10842929    linkSlowCase(iter);
    1085     JITStubCall stubCall(this, JITStubs::cti_op_bitor);
     2930    JITStubCall stubCall(this, cti_op_bitor);
    10862931    stubCall.addArgument(regT0);
    10872932    stubCall.addArgument(regT1);
     
    10922937{
    10932938    linkSlowCase(iter);
    1094     JITStubCall stubCall(this, JITStubs::cti_op_eq);
     2939    JITStubCall stubCall(this, cti_op_eq);
    10952940    stubCall.addArgument(regT0);
    10962941    stubCall.addArgument(regT1);
    1097     stubCall.call(currentInstruction[1].u.operand);
     2942    stubCall.call();
     2943    emitTagAsBoolImmediate(regT0);
     2944    emitPutVirtualRegister(currentInstruction[1].u.operand);
    10982945}
    10992946
     
    11012948{
    11022949    linkSlowCase(iter);
    1103     JITStubCall stubCall(this, JITStubs::cti_op_neq);
     2950    JITStubCall stubCall(this, cti_op_eq);
    11042951    stubCall.addArgument(regT0);
    11052952    stubCall.addArgument(regT1);
    1106     stubCall.call(currentInstruction[1].u.operand);
     2953    stubCall.call();
     2954    xor32(Imm32(0x1), regT0);
     2955    emitTagAsBoolImmediate(regT0);
     2956    emitPutVirtualRegister(currentInstruction[1].u.operand);
    11072957}
    11082958
     
    11112961    linkSlowCase(iter);
    11122962    linkSlowCase(iter);
    1113     JITStubCall stubCall(this, JITStubs::cti_op_stricteq);
     2963    JITStubCall stubCall(this, cti_op_stricteq);
    11142964    stubCall.addArgument(regT0);
    11152965    stubCall.addArgument(regT1);
     
    11212971    linkSlowCase(iter);
    11222972    linkSlowCase(iter);
    1123     JITStubCall stubCall(this, JITStubs::cti_op_nstricteq);
     2973    JITStubCall stubCall(this, cti_op_nstricteq);
    11242974    stubCall.addArgument(regT0);
    11252975    stubCall.addArgument(regT1);
     
    11342984    linkSlowCase(iter);
    11352985    linkSlowCase(iter);
    1136     JITStubCall stubCall(this, JITStubs::cti_op_instanceof);
     2986    JITStubCall stubCall(this, cti_op_instanceof);
    11372987    stubCall.addArgument(currentInstruction[2].u.operand, regT2);
    11382988    stubCall.addArgument(currentInstruction[3].u.operand, regT2);
     
    11663016    linkSlowCase(iter);
    11673017
    1168     JITStubCall stubCall(this, JITStubs::cti_op_to_jsnumber);
     3018    JITStubCall stubCall(this, cti_op_to_jsnumber);
    11693019    stubCall.addArgument(regT0);
    11703020    stubCall.call(currentInstruction[1].u.operand);
    11713021}
    11723022
     3023#endif // USE(JSVALUE32_64)
    11733024
    11743025} // namespace JSC
  • trunk/JavaScriptCore/jit/JITPropertyAccess.cpp

    r46438 r46598  
    11/*
    2  * Copyright (C) 2008 Apple Inc. All rights reserved.
     2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
    33 *
    44 * Redistribution and use in source and binary forms, with or without
     
    4848namespace JSC {
    4949
     50#if USE(JSVALUE32_64)
     51
     52void JIT::emit_op_put_by_index(Instruction* currentInstruction)
     53{
     54    unsigned base = currentInstruction[1].u.operand;
     55    unsigned property = currentInstruction[2].u.operand;
     56    unsigned value = currentInstruction[3].u.operand;
     57
     58    JITStubCall stubCall(this, cti_op_put_by_index);
     59    stubCall.addArgument(base);
     60    stubCall.addArgument(Imm32(property));
     61    stubCall.addArgument(value);
     62    stubCall.call();
     63}
     64
     65void JIT::emit_op_put_getter(Instruction* currentInstruction)
     66{
     67    unsigned base = currentInstruction[1].u.operand;
     68    unsigned property = currentInstruction[2].u.operand;
     69    unsigned function = currentInstruction[3].u.operand;
     70
     71    JITStubCall stubCall(this, cti_op_put_getter);
     72    stubCall.addArgument(base);
     73    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(property)));
     74    stubCall.addArgument(function);
     75    stubCall.call();
     76}
     77
     78void JIT::emit_op_put_setter(Instruction* currentInstruction)
     79{
     80    unsigned base = currentInstruction[1].u.operand;
     81    unsigned property = currentInstruction[2].u.operand;
     82    unsigned function = currentInstruction[3].u.operand;
     83
     84    JITStubCall stubCall(this, cti_op_put_setter);
     85    stubCall.addArgument(base);
     86    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(property)));
     87    stubCall.addArgument(function);
     88    stubCall.call();
     89}
     90
     91void JIT::emit_op_del_by_id(Instruction* currentInstruction)
     92{
     93    unsigned dst = currentInstruction[1].u.operand;
     94    unsigned base = currentInstruction[2].u.operand;
     95    unsigned property = currentInstruction[3].u.operand;
     96
     97    JITStubCall stubCall(this, cti_op_del_by_id);
     98    stubCall.addArgument(base);
     99    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(property)));
     100    stubCall.call(dst);
     101}
     102
     103
     104#if !ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
     105
     106/* ------------------------------ BEGIN: !ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS) ------------------------------ */
     107
     108// Treat these as nops - the call will be handed as a regular get_by_id/op_call pair.
     109void JIT::emit_op_method_check(Instruction*) {}
     110void JIT::emitSlow_op_method_check(Instruction*, Vector<SlowCaseEntry>::iterator&) { ASSERT_NOT_REACHED(); }
     111#if ENABLE(JIT_OPTIMIZE_METHOD_CALLS)
     112#error "JIT_OPTIMIZE_METHOD_CALLS requires JIT_OPTIMIZE_PROPERTY_ACCESS"
     113#endif
     114
     115void JIT::emit_op_get_by_val(Instruction* currentInstruction)
     116{
     117    unsigned dst = currentInstruction[1].u.operand;
     118    unsigned base = currentInstruction[2].u.operand;
     119    unsigned property = currentInstruction[3].u.operand;
     120
     121    JITStubCall stubCall(this, cti_op_get_by_val);
     122    stubCall.addArgument(base);
     123    stubCall.addArgument(property);
     124    stubCall.call(dst);
     125}
     126
     127void JIT::emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&)
     128{
     129    ASSERT_NOT_REACHED();
     130}
     131
     132void JIT::emit_op_put_by_val(Instruction* currentInstruction)
     133{
     134    unsigned base = currentInstruction[1].u.operand;
     135    unsigned property = currentInstruction[2].u.operand;
     136    unsigned value = currentInstruction[3].u.operand;
     137
     138    JITStubCall stubCall(this, cti_op_put_by_val);
     139    stubCall.addArgument(base);
     140    stubCall.addArgument(property);
     141    stubCall.addArgument(value);
     142    stubCall.call();
     143}
     144
     145void JIT::emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&)
     146{
     147    ASSERT_NOT_REACHED();
     148}
     149
     150void JIT::emit_op_get_by_id(Instruction* currentInstruction)
     151{
     152    int dst = currentInstruction[1].u.operand;
     153    int base = currentInstruction[2].u.operand;
     154    int ident = currentInstruction[3].u.operand;
     155
     156    JITStubCall stubCall(this, cti_op_get_by_id_generic);
     157    stubCall.addArgument(base);
     158    stubCall.addArgument(ImmPtr(&(m_codeBlock->identifier(ident))));
     159    stubCall.call(dst);
     160
     161    m_propertyAccessInstructionIndex++;
     162}
     163
     164void JIT::emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&)
     165{
     166    m_propertyAccessInstructionIndex++;
     167    ASSERT_NOT_REACHED();
     168}
     169
     170void JIT::emit_op_put_by_id(Instruction* currentInstruction)
     171{
     172    int base = currentInstruction[1].u.operand;
     173    int ident = currentInstruction[2].u.operand;
     174    int value = currentInstruction[3].u.operand;
     175
     176    JITStubCall stubCall(this, cti_op_put_by_id_generic);
     177    stubCall.addArgument(base);
     178    stubCall.addArgument(ImmPtr(&(m_codeBlock->identifier(ident))));
     179    stubCall.addArgument(value);
     180    stubCall.call();
     181
     182    m_propertyAccessInstructionIndex++;
     183}
     184
     185void JIT::emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&)
     186{
     187    m_propertyAccessInstructionIndex++;
     188    ASSERT_NOT_REACHED();
     189}
     190
     191#else // !ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
     192
     193/* ------------------------------ BEGIN: ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS) ------------------------------ */
     194
     195#if ENABLE(JIT_OPTIMIZE_METHOD_CALLS)
     196
     197void JIT::emit_op_method_check(Instruction* currentInstruction)
     198{
     199    // Assert that the following instruction is a get_by_id.
     200    ASSERT(m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id);
     201
     202    currentInstruction += OPCODE_LENGTH(op_method_check);
     203
     204    // Do the method check - check the object & its prototype's structure inline (this is the common case).
     205    m_methodCallCompilationInfo.append(MethodCallCompilationInfo(m_propertyAccessInstructionIndex));
     206    MethodCallCompilationInfo& info = m_methodCallCompilationInfo.last();
     207
     208    int dst = currentInstruction[1].u.operand;
     209    int base = currentInstruction[2].u.operand;
     210
     211    emitLoad(base, regT1, regT0);
     212    emitJumpSlowCaseIfNotJSCell(base, regT1);
     213
     214    Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), info.structureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
     215    DataLabelPtr protoStructureToCompare, protoObj = moveWithPatch(ImmPtr(0), regT2);
     216    Jump protoStructureCheck = branchPtrWithPatch(NotEqual, Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), protoStructureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
     217
     218    // This will be relinked to load the function without doing a load.
     219    DataLabelPtr putFunction = moveWithPatch(ImmPtr(0), regT0);
     220    move(Imm32(JSValue::CellTag), regT1);
     221    Jump match = jump();
     222
     223    ASSERT(differenceBetween(info.structureToCompare, protoObj) == patchOffsetMethodCheckProtoObj);
     224    ASSERT(differenceBetween(info.structureToCompare, protoStructureToCompare) == patchOffsetMethodCheckProtoStruct);
     225    ASSERT(differenceBetween(info.structureToCompare, putFunction) == patchOffsetMethodCheckPutFunction);
     226
     227    // Link the failure cases here.
     228    structureCheck.link(this);
     229    protoStructureCheck.link(this);
     230
     231    // Do a regular(ish) get_by_id (the slow case will be link to
     232    // cti_op_get_by_id_method_check instead of cti_op_get_by_id.
     233    compileGetByIdHotPath();
     234
     235    match.link(this);
     236    emitStore(dst, regT1, regT0);
     237    map(m_bytecodeIndex + OPCODE_LENGTH(op_method_check), dst, regT1, regT0);
     238
     239    // We've already generated the following get_by_id, so make sure it's skipped over.
     240    m_bytecodeIndex += OPCODE_LENGTH(op_get_by_id);
     241}
     242
     243void JIT::emitSlow_op_method_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     244{
     245    currentInstruction += OPCODE_LENGTH(op_method_check);
     246
     247    int dst = currentInstruction[1].u.operand;
     248    int base = currentInstruction[2].u.operand;
     249    int ident = currentInstruction[3].u.operand;
     250
     251    compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter, true);
     252
     253    // We've already generated the following get_by_id, so make sure it's skipped over.
     254    m_bytecodeIndex += OPCODE_LENGTH(op_get_by_id);
     255}
     256
     257#else //!ENABLE(JIT_OPTIMIZE_METHOD_CALLS)
     258
     259// Treat these as nops - the call will be handed as a regular get_by_id/op_call pair.
     260void JIT::emit_op_method_check(Instruction*) {}
     261void JIT::emitSlow_op_method_check(Instruction*, Vector<SlowCaseEntry>::iterator&) { ASSERT_NOT_REACHED(); }
     262
     263#endif
     264
     265void JIT::emit_op_get_by_val(Instruction* currentInstruction)
     266{
     267    unsigned dst = currentInstruction[1].u.operand;
     268    unsigned base = currentInstruction[2].u.operand;
     269    unsigned property = currentInstruction[3].u.operand;
     270   
     271    emitLoad2(base, regT1, regT0, property, regT3, regT2);
     272
     273    addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
     274    emitJumpSlowCaseIfNotJSCell(base, regT1);
     275    addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsArrayVPtr)));
     276    addSlowCase(branch32(AboveOrEqual, regT2, Address(regT0, OBJECT_OFFSETOF(JSArray, m_fastAccessCutoff))));
     277
     278    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSArray, m_storage)), regT0);
     279    load32(BaseIndex(regT0, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + 4), regT1); // tag
     280    load32(BaseIndex(regT0, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), regT0); // payload
     281    emitStore(dst, regT1, regT0);
     282    map(m_bytecodeIndex + OPCODE_LENGTH(op_get_by_val), dst, regT1, regT0);
     283}
     284
     285void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     286{
     287    unsigned dst = currentInstruction[1].u.operand;
     288    unsigned base = currentInstruction[2].u.operand;
     289    unsigned property = currentInstruction[3].u.operand;
     290
     291    // The slow void JIT::emitSlow_that handles accesses to arrays (below) may jump back up to here.
     292    Label callGetByValJITStub(this);
     293
     294    linkSlowCase(iter); // property int32 check
     295    linkSlowCaseIfNotJSCell(iter, base); // base cell check
     296    linkSlowCase(iter); // base array check
     297
     298    JITStubCall stubCall(this, cti_op_get_by_val);
     299    stubCall.addArgument(base);
     300    stubCall.addArgument(property);
     301    stubCall.call(dst);
     302
     303    emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
     304
     305    linkSlowCase(iter); // array fast cut-off check
     306
     307    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSArray, m_storage)), regT0);
     308    branch32(AboveOrEqual, regT2, Address(regT0, OBJECT_OFFSETOF(ArrayStorage, m_vectorLength)), callGetByValJITStub);
     309
     310    // Missed the fast region, but it is still in the vector.
     311    load32(BaseIndex(regT0, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + 4), regT1); // tag
     312    load32(BaseIndex(regT0, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), regT0); // payload
     313
     314    // FIXME: Maybe we can optimize this comparison to JSValue().
     315    Jump skip = branch32(NotEqual, regT0, Imm32(0));
     316    branch32(Equal, regT1, Imm32(JSValue::CellTag), callGetByValJITStub);
     317
     318    skip.link(this);
     319    emitStore(dst, regT1, regT0);
     320}
     321
     322void JIT::emit_op_put_by_val(Instruction* currentInstruction)
     323{
     324    unsigned base = currentInstruction[1].u.operand;
     325    unsigned property = currentInstruction[2].u.operand;
     326    unsigned value = currentInstruction[3].u.operand;
     327
     328    emitLoad2(base, regT1, regT0, property, regT3, regT2);
     329
     330    addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
     331    emitJumpSlowCaseIfNotJSCell(base, regT1);
     332    addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsArrayVPtr)));
     333    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSArray, m_storage)), regT3);
     334
     335    Jump inFastVector = branch32(Below, regT2, Address(regT0, OBJECT_OFFSETOF(JSArray, m_fastAccessCutoff)));
     336
     337    // Check if the access is within the vector.
     338    addSlowCase(branch32(AboveOrEqual, regT2, Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_vectorLength))));
     339
     340    // This is a write to the slow part of the vector; first, we have to check if this would be the first write to this location.
     341    // FIXME: should be able to handle initial write to array; increment the the number of items in the array, and potentially update fast access cutoff.
     342    Jump skip = branch32(NotEqual, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + 4), Imm32(JSValue::CellTag));
     343    addSlowCase(branch32(Equal, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), Imm32(0)));
     344    skip.link(this);
     345
     346    inFastVector.link(this);
     347
     348    emitLoad(value, regT1, regT0);
     349    store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]))); // payload
     350    store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + 4)); // tag
     351}
     352
     353void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     354{
     355    unsigned base = currentInstruction[1].u.operand;
     356    unsigned property = currentInstruction[2].u.operand;
     357    unsigned value = currentInstruction[3].u.operand;
     358
     359    linkSlowCase(iter); // property int32 check
     360    linkSlowCaseIfNotJSCell(iter, base); // base cell check
     361    linkSlowCase(iter); // base not array check
     362
     363    JITStubCall stubPutByValCall(this, cti_op_put_by_val);
     364    stubPutByValCall.addArgument(base);
     365    stubPutByValCall.addArgument(property);
     366    stubPutByValCall.addArgument(value);
     367    stubPutByValCall.call();
     368
     369    emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
     370
     371    // Slow cases for immediate int accesses to arrays.
     372    linkSlowCase(iter); // in vector check
     373    linkSlowCase(iter); // written to slot check
     374
     375    JITStubCall stubCall(this, cti_op_put_by_val_array);
     376    stubCall.addArgument(regT1, regT0);
     377    stubCall.addArgument(regT2);
     378    stubCall.addArgument(value);
     379    stubCall.call();
     380}
     381
     382void JIT::emit_op_get_by_id(Instruction* currentInstruction)
     383{
     384    int dst = currentInstruction[1].u.operand;
     385    int base = currentInstruction[2].u.operand;
     386   
     387    emitLoad(base, regT1, regT0);
     388    emitJumpSlowCaseIfNotJSCell(base, regT1);
     389    compileGetByIdHotPath();
     390    emitStore(dst, regT1, regT0);
     391    map(m_bytecodeIndex + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
     392}
     393
     394void JIT::compileGetByIdHotPath()
     395{
     396    // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
     397    // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
     398    // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
     399    // to jump back to if one of these trampolies finds a match.
     400    Label hotPathBegin(this);
     401    m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex].hotPathBegin = hotPathBegin;
     402    m_propertyAccessInstructionIndex++;
     403
     404    DataLabelPtr structureToCompare;
     405    Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), structureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
     406    addSlowCase(structureCheck);
     407    ASSERT(differenceBetween(hotPathBegin, structureToCompare) == patchOffsetGetByIdStructure);
     408    ASSERT(differenceBetween(hotPathBegin, structureCheck) == patchOffsetGetByIdBranchToSlowCase);
     409
     410    Label externalLoad = loadPtrWithPatchToLEA(Address(regT0, OBJECT_OFFSETOF(JSObject, m_externalStorage)), regT2);
     411    Label externalLoadComplete(this);
     412    ASSERT(differenceBetween(hotPathBegin, externalLoad) == patchOffsetGetByIdExternalLoad);
     413    ASSERT(differenceBetween(externalLoad, externalLoadComplete) == patchLengthGetByIdExternalLoad);
     414
     415    DataLabel32 displacementLabel1 = loadPtrWithAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT0); // payload
     416    ASSERT(differenceBetween(hotPathBegin, displacementLabel1) == patchOffsetGetByIdPropertyMapOffset1);
     417    DataLabel32 displacementLabel2 = loadPtrWithAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT1); // tag
     418    ASSERT(differenceBetween(hotPathBegin, displacementLabel2) == patchOffsetGetByIdPropertyMapOffset2);
     419
     420    Label putResult(this);
     421    ASSERT(differenceBetween(hotPathBegin, putResult) == patchOffsetGetByIdPutResult);
     422}
     423
     424void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     425{
     426    int dst = currentInstruction[1].u.operand;
     427    int base = currentInstruction[2].u.operand;
     428    int ident = currentInstruction[3].u.operand;
     429
     430    compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter);
     431}
     432
     433void JIT::compileGetByIdSlowCase(int dst, int base, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck)
     434{
     435    // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
     436    // so that we only need track one pointer into the slow case code - we track a pointer to the location
     437    // of the call (which we can use to look up the patch information), but should a array-length or
     438    // prototype access trampoline fail we want to bail out back to here.  To do so we can subtract back
     439    // the distance from the call to the head of the slow case.
     440    linkSlowCaseIfNotJSCell(iter, base);
     441    linkSlowCase(iter);
     442
     443    Label coldPathBegin(this);
     444
     445    JITStubCall stubCall(this, isMethodCheck ? cti_op_get_by_id_method_check : cti_op_get_by_id);
     446    stubCall.addArgument(regT1, regT0);
     447    stubCall.addArgument(ImmPtr(ident));
     448    Call call = stubCall.call(dst);
     449
     450    ASSERT(differenceBetween(coldPathBegin, call) == patchOffsetGetByIdSlowCaseCall);
     451
     452    // Track the location of the call; this will be used to recover patch information.
     453    m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex].callReturnLocation = call;
     454    m_propertyAccessInstructionIndex++;
     455}
     456
     457void JIT::emit_op_put_by_id(Instruction* currentInstruction)
     458{
     459    // In order to be able to patch both the Structure, and the object offset, we store one pointer,
     460    // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
     461    // such that the Structure & offset are always at the same distance from this.
     462
     463    int base = currentInstruction[1].u.operand;
     464    int value = currentInstruction[3].u.operand;
     465
     466    emitLoad2(base, regT1, regT0, value, regT3, regT2);
     467
     468    emitJumpSlowCaseIfNotJSCell(base, regT1);
     469
     470    Label hotPathBegin(this);
     471    m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex].hotPathBegin = hotPathBegin;
     472    m_propertyAccessInstructionIndex++;
     473
     474    // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
     475    DataLabelPtr structureToCompare;
     476    addSlowCase(branchPtrWithPatch(NotEqual, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), structureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))));
     477    ASSERT(differenceBetween(hotPathBegin, structureToCompare) == patchOffsetPutByIdStructure);
     478
     479    // Plant a load from a bogus ofset in the object's property map; we will patch this later, if it is to be used.
     480    Label externalLoad = loadPtrWithPatchToLEA(Address(regT0, OBJECT_OFFSETOF(JSObject, m_externalStorage)), regT0);
     481    Label externalLoadComplete(this);
     482    ASSERT(differenceBetween(hotPathBegin, externalLoad) == patchOffsetPutByIdExternalLoad);
     483    ASSERT(differenceBetween(externalLoad, externalLoadComplete) == patchLengthPutByIdExternalLoad);
     484
     485    DataLabel32 displacementLabel1 = storePtrWithAddressOffsetPatch(regT2, Address(regT0, patchGetByIdDefaultOffset)); // payload
     486    DataLabel32 displacementLabel2 = storePtrWithAddressOffsetPatch(regT3, Address(regT0, patchGetByIdDefaultOffset)); // tag
     487    ASSERT(differenceBetween(hotPathBegin, displacementLabel1) == patchOffsetPutByIdPropertyMapOffset1);
     488    ASSERT(differenceBetween(hotPathBegin, displacementLabel2) == patchOffsetPutByIdPropertyMapOffset2);
     489}
     490
     491void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
     492{
     493    int base = currentInstruction[1].u.operand;
     494    int ident = currentInstruction[2].u.operand;
     495
     496    linkSlowCaseIfNotJSCell(iter, base);
     497    linkSlowCase(iter);
     498
     499    JITStubCall stubCall(this, cti_op_put_by_id);
     500    stubCall.addArgument(regT1, regT0);
     501    stubCall.addArgument(ImmPtr(&(m_codeBlock->identifier(ident))));
     502    stubCall.addArgument(regT3, regT2);
     503    Call call = stubCall.call();
     504
     505    // Track the location of the call; this will be used to recover patch information.
     506    m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex].callReturnLocation = call;
     507    m_propertyAccessInstructionIndex++;
     508}
     509
     510// Compile a store into an object's property storage.  May overwrite base.
     511void JIT::compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, Structure* structure, size_t cachedOffset)
     512{
     513    int offset = cachedOffset;
     514    if (structure->isUsingInlineStorage())
     515        offset += OBJECT_OFFSETOF(JSObject, m_inlineStorage) /  sizeof(Register);
     516    else
     517        loadPtr(Address(base, OBJECT_OFFSETOF(JSObject, m_externalStorage)), base);
     518    emitStore(offset, valueTag, valuePayload, base);
     519}
     520
     521// Compile a load from an object's property storage.  May overwrite base.
     522void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, Structure* structure, size_t cachedOffset)
     523{
     524    int offset = cachedOffset;
     525    if (structure->isUsingInlineStorage())
     526        offset += OBJECT_OFFSETOF(JSObject, m_inlineStorage) / sizeof(Register);
     527    else
     528        loadPtr(Address(base, OBJECT_OFFSETOF(JSObject, m_externalStorage)), base);
     529    emitLoad(offset, resultTag, resultPayload, base);
     530}
     531
     532void JIT::compileGetDirectOffset(JSObject* base, RegisterID temp, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset)
     533{
     534    if (base->isUsingInlineStorage()) {
     535        load32(reinterpret_cast<char*>(&base->m_inlineStorage[cachedOffset]), resultPayload);
     536        load32(reinterpret_cast<char*>(&base->m_inlineStorage[cachedOffset]) + 4, resultTag);
     537        return;
     538    }
     539
     540    size_t offset = cachedOffset * sizeof(JSValue);
     541
     542    PropertyStorage* protoPropertyStorage = &base->m_externalStorage;
     543    loadPtr(static_cast<void*>(protoPropertyStorage), temp);
     544    load32(Address(temp, offset), resultPayload);
     545    load32(Address(temp, offset + 4), resultTag);
     546}
     547
     548void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress)
     549{
     550    // It is assumed that regT0 contains the basePayload and regT1 contains the baseTag.  The value can be found on the stack.
     551
     552    JumpList failureCases;
     553    failureCases.append(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
     554
     555    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
     556    failureCases.append(branchPtr(NotEqual, regT2, ImmPtr(oldStructure)));
     557
     558    // Verify that nothing in the prototype chain has a setter for this property.
     559    for (RefPtr<Structure>* it = chain->head(); *it; ++it) {
     560        loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype)), regT2);
     561        loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
     562        failureCases.append(branchPtr(NotEqual, regT2, ImmPtr(it->get())));
     563    }
     564
     565    // Reallocate property storage if needed.
     566    Call callTarget;
     567    bool willNeedStorageRealloc = oldStructure->propertyStorageCapacity() != newStructure->propertyStorageCapacity();
     568    if (willNeedStorageRealloc) {
     569        // This trampoline was called to like a JIT stub; before we can can call again we need to
     570        // remove the return address from the stack, to prevent the stack from becoming misaligned.
     571        preserveReturnAddressAfterCall(regT3);
     572 
     573        JITStubCall stubCall(this, cti_op_put_by_id_transition_realloc);
     574        stubCall.skipArgument(); // base
     575        stubCall.skipArgument(); // ident
     576        stubCall.skipArgument(); // value
     577        stubCall.addArgument(Imm32(oldStructure->propertyStorageCapacity()));
     578        stubCall.addArgument(Imm32(newStructure->propertyStorageCapacity()));
     579        stubCall.call(regT0);
     580
     581        restoreReturnAddressBeforeReturn(regT3);
     582    }
     583
     584    sub32(Imm32(1), AbsoluteAddress(oldStructure->addressOfCount()));
     585    add32(Imm32(1), AbsoluteAddress(newStructure->addressOfCount()));
     586    storePtr(ImmPtr(newStructure), Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)));
     587 
     588    load32(Address(stackPointerRegister, offsetof(struct JITStackFrame, args[2]) + sizeof(void*)), regT3);
     589    load32(Address(stackPointerRegister, offsetof(struct JITStackFrame, args[2]) + sizeof(void*) + 4), regT2);
     590
     591    // Write the value
     592    compilePutDirectOffset(regT0, regT2, regT3, newStructure, cachedOffset);
     593
     594    ret();
     595   
     596    ASSERT(!failureCases.empty());
     597    failureCases.link(this);
     598    restoreArgumentReferenceForTrampoline();
     599    Call failureCall = tailRecursiveCall();
     600
     601    LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
     602
     603    patchBuffer.link(failureCall, FunctionPtr(cti_op_put_by_id_fail));
     604
     605    if (willNeedStorageRealloc) {
     606        ASSERT(m_calls.size() == 1);
     607        patchBuffer.link(m_calls[0].from, FunctionPtr(cti_op_put_by_id_transition_realloc));
     608    }
     609   
     610    CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
     611    stubInfo->stubRoutine = entryLabel;
     612    RepatchBuffer repatchBuffer(m_codeBlock);
     613    repatchBuffer.relinkCallerToTrampoline(returnAddress, entryLabel);
     614}
     615
     616void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress)
     617{
     618    RepatchBuffer repatchBuffer(codeBlock);
     619
     620    // We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
     621    // Should probably go to JITStubs::cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
     622    repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
     623
     624    int offset = sizeof(JSValue) * cachedOffset;
     625
     626    // If we're patching to use inline storage, convert the initial load to a lea; this avoids the extra load
     627    // and makes the subsequent load's offset automatically correct
     628    if (structure->isUsingInlineStorage())
     629        repatchBuffer.repatchLoadPtrToLEA(stubInfo->hotPathBegin.instructionAtOffset(patchOffsetGetByIdExternalLoad));
     630
     631    // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
     632    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetGetByIdStructure), structure);
     633    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetGetByIdPropertyMapOffset1), offset); // payload
     634    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetGetByIdPropertyMapOffset2), offset + 4); // tag
     635}
     636
     637void JIT::patchMethodCallProto(CodeBlock* codeBlock, MethodCallLinkInfo& methodCallLinkInfo, JSFunction* callee, Structure* structure, JSObject* proto)
     638{
     639    RepatchBuffer repatchBuffer(codeBlock);
     640
     641    ASSERT(!methodCallLinkInfo.cachedStructure);
     642    methodCallLinkInfo.cachedStructure = structure;
     643    structure->ref();
     644
     645    Structure* prototypeStructure = proto->structure();
     646    ASSERT(!methodCallLinkInfo.cachedPrototypeStructure);
     647    methodCallLinkInfo.cachedPrototypeStructure = prototypeStructure;
     648    prototypeStructure->ref();
     649
     650    repatchBuffer.repatch(methodCallLinkInfo.structureLabel, structure);
     651    repatchBuffer.repatch(methodCallLinkInfo.structureLabel.dataLabelPtrAtOffset(patchOffsetMethodCheckProtoObj), proto);
     652    repatchBuffer.repatch(methodCallLinkInfo.structureLabel.dataLabelPtrAtOffset(patchOffsetMethodCheckProtoStruct), prototypeStructure);
     653    repatchBuffer.repatch(methodCallLinkInfo.structureLabel.dataLabelPtrAtOffset(patchOffsetMethodCheckPutFunction), callee);
     654}
     655
     656void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress)
     657{
     658    RepatchBuffer repatchBuffer(codeBlock);
     659
     660    // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
     661    // Should probably go to cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
     662    repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_put_by_id_generic));
     663
     664    int offset = sizeof(JSValue) * cachedOffset;
     665
     666    // If we're patching to use inline storage, convert the initial load to a lea; this avoids the extra load
     667    // and makes the subsequent load's offset automatically correct
     668    if (structure->isUsingInlineStorage())
     669        repatchBuffer.repatchLoadPtrToLEA(stubInfo->hotPathBegin.instructionAtOffset(patchOffsetPutByIdExternalLoad));
     670
     671    // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
     672    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetPutByIdStructure), structure);
     673    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetPutByIdPropertyMapOffset1), offset); // payload
     674    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetPutByIdPropertyMapOffset2), offset + 4); // tag
     675}
     676
     677void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
     678{
     679    StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
     680   
     681    // regT0 holds a JSCell*
     682
     683    // Check for array
     684    Jump failureCases1 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsArrayVPtr));
     685
     686    // Checks out okay! - get the length from the storage
     687    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSArray, m_storage)), regT2);
     688    load32(Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_length)), regT2);
     689
     690    Jump failureCases2 = branch32(Above, regT2, Imm32(INT_MAX));
     691    move(regT2, regT0);
     692    move(Imm32(JSValue::Int32Tag), regT1);
     693    Jump success = jump();
     694
     695    LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
     696
     697    // Use the patch information to link the failure cases back to the original slow case routine.
     698    CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
     699    patchBuffer.link(failureCases1, slowCaseBegin);
     700    patchBuffer.link(failureCases2, slowCaseBegin);
     701
     702    // On success return back to the hot patch code, at a point it will perform the store to dest for us.
     703    patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
     704
     705    // Track the stub we have created so that it will be deleted later.
     706    CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
     707    stubInfo->stubRoutine = entryLabel;
     708
     709    // Finally patch the jump to slow case back in the hot path to jump here instead.
     710    CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
     711    RepatchBuffer repatchBuffer(m_codeBlock);
     712    repatchBuffer.relink(jumpLocation, entryLabel);
     713
     714    // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
     715    repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
     716}
     717
     718void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
     719{
     720    // regT0 holds a JSCell*
     721
     722    // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
     723    // referencing the prototype object - let's speculatively load it's table nice and early!)
     724    JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
     725
     726    Jump failureCases1 = checkStructure(regT0, structure);
     727
     728    // Check the prototype object's Structure had not changed.
     729    Structure** prototypeStructureAddress = &(protoObject->m_structure);
     730#if PLATFORM(X86_64)
     731    move(ImmPtr(prototypeStructure), regT3);
     732    Jump failureCases2 = branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3);
     733#else
     734    Jump failureCases2 = branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), ImmPtr(prototypeStructure));
     735#endif
     736
     737    // Checks out okay! - getDirectOffset
     738    compileGetDirectOffset(protoObject, regT2, regT1, regT0, cachedOffset);
     739
     740    Jump success = jump();
     741
     742    LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
     743
     744    // Use the patch information to link the failure cases back to the original slow case routine.
     745    CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
     746    patchBuffer.link(failureCases1, slowCaseBegin);
     747    patchBuffer.link(failureCases2, slowCaseBegin);
     748
     749    // On success return back to the hot patch code, at a point it will perform the store to dest for us.
     750    patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
     751
     752    // Track the stub we have created so that it will be deleted later.
     753    CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
     754    stubInfo->stubRoutine = entryLabel;
     755
     756    // Finally patch the jump to slow case back in the hot path to jump here instead.
     757    CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
     758    RepatchBuffer repatchBuffer(m_codeBlock);
     759    repatchBuffer.relink(jumpLocation, entryLabel);
     760
     761    // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
     762    repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
     763}
     764
     765
     766void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, size_t cachedOffset)
     767{
     768    // regT0 holds a JSCell*
     769   
     770    Jump failureCase = checkStructure(regT0, structure);
     771    compileGetDirectOffset(regT0, regT1, regT0, structure, cachedOffset);
     772    Jump success = jump();
     773
     774    LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
     775
     776    // Use the patch information to link the failure cases back to the original slow case routine.
     777    CodeLocationLabel lastProtoBegin = polymorphicStructures->list[currentIndex - 1].stubRoutine;
     778    if (!lastProtoBegin)
     779        lastProtoBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
     780
     781    patchBuffer.link(failureCase, lastProtoBegin);
     782
     783    // On success return back to the hot patch code, at a point it will perform the store to dest for us.
     784    patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
     785
     786    CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
     787
     788    structure->ref();
     789    polymorphicStructures->list[currentIndex].set(entryLabel, structure);
     790
     791    // Finally patch the jump to slow case back in the hot path to jump here instead.
     792    CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
     793    RepatchBuffer repatchBuffer(m_codeBlock);
     794    repatchBuffer.relink(jumpLocation, entryLabel);
     795}
     796
     797void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, size_t cachedOffset, CallFrame* callFrame)
     798{
     799    // regT0 holds a JSCell*
     800   
     801    // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
     802    // referencing the prototype object - let's speculatively load it's table nice and early!)
     803    JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
     804
     805    // Check eax is an object of the right Structure.
     806    Jump failureCases1 = checkStructure(regT0, structure);
     807
     808    // Check the prototype object's Structure had not changed.
     809    Structure** prototypeStructureAddress = &(protoObject->m_structure);
     810#if PLATFORM(X86_64)
     811    move(ImmPtr(prototypeStructure), regT3);
     812    Jump failureCases2 = branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3);
     813#else
     814    Jump failureCases2 = branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), ImmPtr(prototypeStructure));
     815#endif
     816
     817    compileGetDirectOffset(protoObject, regT2, regT1, regT0, cachedOffset);
     818
     819    Jump success = jump();
     820
     821    LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
     822
     823    // Use the patch information to link the failure cases back to the original slow case routine.
     824    CodeLocationLabel lastProtoBegin = prototypeStructures->list[currentIndex - 1].stubRoutine;
     825    patchBuffer.link(failureCases1, lastProtoBegin);
     826    patchBuffer.link(failureCases2, lastProtoBegin);
     827
     828    // On success return back to the hot patch code, at a point it will perform the store to dest for us.
     829    patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
     830
     831    CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
     832
     833    structure->ref();
     834    prototypeStructure->ref();
     835    prototypeStructures->list[currentIndex].set(entryLabel, structure, prototypeStructure);
     836
     837    // Finally patch the jump to slow case back in the hot path to jump here instead.
     838    CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
     839    RepatchBuffer repatchBuffer(m_codeBlock);
     840    repatchBuffer.relink(jumpLocation, entryLabel);
     841}
     842
     843void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset, CallFrame* callFrame)
     844{
     845    // regT0 holds a JSCell*
     846   
     847    ASSERT(count);
     848   
     849    JumpList bucketsOfFail;
     850
     851    // Check eax is an object of the right Structure.
     852    bucketsOfFail.append(checkStructure(regT0, structure));
     853
     854    Structure* currStructure = structure;
     855    RefPtr<Structure>* chainEntries = chain->head();
     856    JSObject* protoObject = 0;
     857    for (unsigned i = 0; i < count; ++i) {
     858        protoObject = asObject(currStructure->prototypeForLookup(callFrame));
     859        currStructure = chainEntries[i].get();
     860
     861        // Check the prototype object's Structure had not changed.
     862        Structure** prototypeStructureAddress = &(protoObject->m_structure);
     863#if PLATFORM(X86_64)
     864        move(ImmPtr(currStructure), regT3);
     865        bucketsOfFail.append(branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3));
     866#else
     867        bucketsOfFail.append(branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), ImmPtr(currStructure)));
     868#endif
     869    }
     870    ASSERT(protoObject);
     871
     872    compileGetDirectOffset(protoObject, regT2, regT1, regT0, cachedOffset);
     873    Jump success = jump();
     874
     875    LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
     876
     877    // Use the patch information to link the failure cases back to the original slow case routine.
     878    CodeLocationLabel lastProtoBegin = prototypeStructures->list[currentIndex - 1].stubRoutine;
     879
     880    patchBuffer.link(bucketsOfFail, lastProtoBegin);
     881
     882    // On success return back to the hot patch code, at a point it will perform the store to dest for us.
     883    patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
     884
     885    CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
     886
     887    // Track the stub we have created so that it will be deleted later.
     888    structure->ref();
     889    chain->ref();
     890    prototypeStructures->list[currentIndex].set(entryLabel, structure, chain);
     891
     892    // Finally patch the jump to slow case back in the hot path to jump here instead.
     893    CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
     894    RepatchBuffer repatchBuffer(m_codeBlock);
     895    repatchBuffer.relink(jumpLocation, entryLabel);
     896}
     897
     898void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
     899{
     900    // regT0 holds a JSCell*
     901   
     902    ASSERT(count);
     903   
     904    JumpList bucketsOfFail;
     905
     906    // Check eax is an object of the right Structure.
     907    bucketsOfFail.append(checkStructure(regT0, structure));
     908
     909    Structure* currStructure = structure;
     910    RefPtr<Structure>* chainEntries = chain->head();
     911    JSObject* protoObject = 0;
     912    for (unsigned i = 0; i < count; ++i) {
     913        protoObject = asObject(currStructure->prototypeForLookup(callFrame));
     914        currStructure = chainEntries[i].get();
     915
     916        // Check the prototype object's Structure had not changed.
     917        Structure** prototypeStructureAddress = &(protoObject->m_structure);
     918#if PLATFORM(X86_64)
     919        move(ImmPtr(currStructure), regT3);
     920        bucketsOfFail.append(branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3));
     921#else
     922        bucketsOfFail.append(branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), ImmPtr(currStructure)));
     923#endif
     924    }
     925    ASSERT(protoObject);
     926
     927    compileGetDirectOffset(protoObject, regT2, regT1, regT0, cachedOffset);
     928    Jump success = jump();
     929
     930    LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
     931
     932    // Use the patch information to link the failure cases back to the original slow case routine.
     933    patchBuffer.link(bucketsOfFail, stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall));
     934
     935    // On success return back to the hot patch code, at a point it will perform the store to dest for us.
     936    patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
     937
     938    // Track the stub we have created so that it will be deleted later.
     939    CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
     940    stubInfo->stubRoutine = entryLabel;
     941
     942    // Finally patch the jump to slow case back in the hot path to jump here instead.
     943    CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
     944    RepatchBuffer repatchBuffer(m_codeBlock);
     945    repatchBuffer.relink(jumpLocation, entryLabel);
     946
     947    // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
     948    repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
     949}
     950
     951/* ------------------------------ END: !ENABLE / ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS) ------------------------------ */
     952
     953#endif // !ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
     954
     955#else // USE(JSVALUE32_64)
     956
    50957void JIT::emit_op_get_by_val(Instruction* currentInstruction)
    51958{
    52959    emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
    53960    emitJumpSlowCaseIfNotImmediateInteger(regT1);
    54 #if USE(ALTERNATE_JSIMMEDIATE)
     961#if USE(JSVALUE64)
    55962    // This is technically incorrect - we're zero-extending an int32.  On the hot path this doesn't matter.
    56963    // We check the value as if it was a uint32 against the m_fastAccessCutoff - which will always fail if
     
    79986    emitGetVirtualRegisters(currentInstruction[1].u.operand, regT0, currentInstruction[2].u.operand, regT1);
    80987    emitJumpSlowCaseIfNotImmediateInteger(regT1);
    81 #if USE(ALTERNATE_JSIMMEDIATE)
     988#if USE(JSVALUE64)
    82989    // See comment in op_get_by_val.
    83990    zeroExtend32ToPtr(regT1, regT1);
     
    1061013void JIT::emit_op_put_by_index(Instruction* currentInstruction)
    1071014{
    108     JITStubCall stubCall(this, JITStubs::cti_op_put_by_index);
     1015    JITStubCall stubCall(this, cti_op_put_by_index);
    1091016    stubCall.addArgument(currentInstruction[1].u.operand, regT2);
    1101017    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
     
    1151022void JIT::emit_op_put_getter(Instruction* currentInstruction)
    1161023{
    117     JITStubCall stubCall(this, JITStubs::cti_op_put_getter);
     1024    JITStubCall stubCall(this, cti_op_put_getter);
    1181025    stubCall.addArgument(currentInstruction[1].u.operand, regT2);
    1191026    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
     
    1241031void JIT::emit_op_put_setter(Instruction* currentInstruction)
    1251032{
    126     JITStubCall stubCall(this, JITStubs::cti_op_put_setter);
     1033    JITStubCall stubCall(this, cti_op_put_setter);
    1271034    stubCall.addArgument(currentInstruction[1].u.operand, regT2);
    1281035    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
     
    1331040void JIT::emit_op_del_by_id(Instruction* currentInstruction)
    1341041{
    135     JITStubCall stubCall(this, JITStubs::cti_op_del_by_id);
     1042    JITStubCall stubCall(this, cti_op_del_by_id);
    1361043    stubCall.addArgument(currentInstruction[2].u.operand, regT2);
    1371044    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
     
    1581065
    1591066    emitGetVirtualRegister(baseVReg, regT0);
    160     JITStubCall stubCall(this, JITStubs::cti_op_get_by_id_generic);
     1067    JITStubCall stubCall(this, cti_op_get_by_id_generic);
    1611068    stubCall.addArgument(regT0);
    1621069    stubCall.addArgument(ImmPtr(ident));
     
    1791086    emitGetVirtualRegisters(baseVReg, regT0, valueVReg, regT1);
    1801087
    181     JITStubCall stubCall(this, JITStubs::cti_op_put_by_id_generic);
     1088    JITStubCall stubCall(this, cti_op_put_by_id_generic);
    1821089    stubCall.addArgument(regT0);
    1831090    stubCall.addArgument(ImmPtr(ident));
     
    2501157    Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
    2511158
    252     compileGetByIdSlowCase(resultVReg, baseVReg, ident, iter, m_propertyAccessInstructionIndex++, true);
     1159    compileGetByIdSlowCase(resultVReg, baseVReg, ident, iter, true);
    2531160
    2541161    // We've already generated the following get_by_id, so make sure it's skipped over.
     
    3111218    Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
    3121219
    313     compileGetByIdSlowCase(resultVReg, baseVReg, ident, iter, m_propertyAccessInstructionIndex++, false);
    314 }
    315 
    316 void JIT::compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, unsigned propertyAccessInstructionIndex, bool isMethodCheck)
     1220    compileGetByIdSlowCase(resultVReg, baseVReg, ident, iter, false);
     1221}
     1222
     1223void JIT::compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck)
    3171224{
    3181225    // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
     
    3281235    Label coldPathBegin(this);
    3291236#endif
    330     JITStubCall stubCall(this, isMethodCheck ? JITStubs::cti_op_get_by_id_method_check : JITStubs::cti_op_get_by_id);
     1237    JITStubCall stubCall(this, isMethodCheck ? cti_op_get_by_id_method_check : cti_op_get_by_id);
    3311238    stubCall.addArgument(regT0);
    3321239    stubCall.addArgument(ImmPtr(ident));
     
    3361243
    3371244    // Track the location of the call; this will be used to recover patch information.
    338     m_propertyAccessCompilationInfo[propertyAccessInstructionIndex].callReturnLocation = call;
     1245    m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex].callReturnLocation = call;
     1246    m_propertyAccessInstructionIndex++;
    3391247}
    3401248
     
    3831291    linkSlowCase(iter);
    3841292
    385     JITStubCall stubCall(this, JITStubs::cti_op_put_by_id);
     1293    JITStubCall stubCall(this, cti_op_put_by_id);
    3861294    stubCall.addArgument(regT0);
    3871295    stubCall.addArgument(ImmPtr(ident));
     
    4661374        preserveReturnAddressAfterCall(regT3);
    4671375 
    468         JITStubCall stubCall(this, JITStubs::cti_op_put_by_id_transition_realloc);
    469         stubCall.addArgument(regT0);
     1376        JITStubCall stubCall(this, cti_op_put_by_id_transition_realloc);
     1377        stubCall.skipArgument(); // base
     1378        stubCall.skipArgument(); // ident
     1379        stubCall.skipArgument(); // value
    4701380        stubCall.addArgument(Imm32(oldStructure->propertyStorageCapacity()));
    4711381        stubCall.addArgument(Imm32(newStructure->propertyStorageCapacity()));
    472         stubCall.addArgument(regT1); // This argument is not used in the stub; we set it up on the stack so that it can be restored, below.
    4731382        stubCall.call(regT0);
    474         emitGetJITStubArg(4, regT1);
     1383        emitGetJITStubArg(3, regT1);
    4751384
    4761385        restoreReturnAddressBeforeReturn(regT3);
     
    4951404    LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
    4961405
    497     patchBuffer.link(failureCall, FunctionPtr(JITStubs::cti_op_put_by_id_fail));
     1406    patchBuffer.link(failureCall, FunctionPtr(cti_op_put_by_id_fail));
    4981407
    4991408    if (willNeedStorageRealloc) {
    5001409        ASSERT(m_calls.size() == 1);
    501         patchBuffer.link(m_calls[0].from, FunctionPtr(JITStubs::cti_op_put_by_id_transition_realloc));
     1410        patchBuffer.link(m_calls[0].from, FunctionPtr(cti_op_put_by_id_transition_realloc));
    5021411    }
    5031412   
     
    5131422
    5141423    // We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
    515     // Should probably go to JITStubs::cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
    516     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(JITStubs::cti_op_get_by_id_self_fail));
     1424    // Should probably go to cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
     1425    repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
    5171426
    5181427    int offset = sizeof(JSValue) * cachedOffset;
     
    5521461
    5531462    // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
    554     // Should probably go to JITStubs::cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
    555     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(JITStubs::cti_op_put_by_id_generic));
     1463    // Should probably go to cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
     1464    repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_put_by_id_generic));
    5561465
    5571466    int offset = sizeof(JSValue) * cachedOffset;
     
    6031512
    6041513    // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
    605     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(JITStubs::cti_op_get_by_id_array_fail));
     1514    repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
    6061515}
    6071516
     
    6491558
    6501559    // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
    651     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(JITStubs::cti_op_get_by_id_proto_list));
     1560    repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
    6521561}
    6531562
     
    8281737
    8291738    // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
    830     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(JITStubs::cti_op_get_by_id_proto_list));
     1739    repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
    8311740}
    8321741
     
    8351744#endif // !ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
    8361745
     1746#endif // USE(JSVALUE32_64)
     1747
    8371748} // namespace JSC
    8381749
  • trunk/JavaScriptCore/jit/JITStubCall.h

    r45138 r46598  
    3838            : m_jit(jit)
    3939            , m_stub(reinterpret_cast<void*>(stub))
     40            , m_returnType(Cell)
     41            , m_stackIndex(stackIndexStart)
     42        {
     43        }
     44
     45        JITStubCall(JIT* jit, JSPropertyNameIterator* (JIT_STUB *stub)(STUB_ARGS_DECLARATION))
     46            : m_jit(jit)
     47            , m_stub(reinterpret_cast<void*>(stub))
     48            , m_returnType(Cell)
     49            , m_stackIndex(stackIndexStart)
     50        {
     51        }
     52
     53        JITStubCall(JIT* jit, void* (JIT_STUB *stub)(STUB_ARGS_DECLARATION))
     54            : m_jit(jit)
     55            , m_stub(reinterpret_cast<void*>(stub))
     56            , m_returnType(VoidPtr)
     57            , m_stackIndex(stackIndexStart)
     58        {
     59        }
     60
     61        JITStubCall(JIT* jit, int (JIT_STUB *stub)(STUB_ARGS_DECLARATION))
     62            : m_jit(jit)
     63            , m_stub(reinterpret_cast<void*>(stub))
     64            , m_returnType(Int)
     65            , m_stackIndex(stackIndexStart)
     66        {
     67        }
     68
     69        JITStubCall(JIT* jit, bool (JIT_STUB *stub)(STUB_ARGS_DECLARATION))
     70            : m_jit(jit)
     71            , m_stub(reinterpret_cast<void*>(stub))
     72            , m_returnType(Int)
     73            , m_stackIndex(stackIndexStart)
     74        {
     75        }
     76
     77        JITStubCall(JIT* jit, void (JIT_STUB *stub)(STUB_ARGS_DECLARATION))
     78            : m_jit(jit)
     79            , m_stub(reinterpret_cast<void*>(stub))
     80            , m_returnType(Void)
     81            , m_stackIndex(stackIndexStart)
     82        {
     83        }
     84
     85#if USE(JSVALUE32_64)
     86        JITStubCall(JIT* jit, EncodedJSValue (JIT_STUB *stub)(STUB_ARGS_DECLARATION))
     87            : m_jit(jit)
     88            , m_stub(reinterpret_cast<void*>(stub))
    4089            , m_returnType(Value)
    41             , m_argumentIndex(1) // Index 0 is reserved for restoreArgumentReference();
    42         {
    43         }
    44 
    45         JITStubCall(JIT* jit, JSPropertyNameIterator* (JIT_STUB *stub)(STUB_ARGS_DECLARATION))
    46             : m_jit(jit)
    47             , m_stub(reinterpret_cast<void*>(stub))
    48             , m_returnType(Value)
    49             , m_argumentIndex(1) // Index 0 is reserved for restoreArgumentReference();
    50         {
    51         }
    52 
    53         JITStubCall(JIT* jit, void* (JIT_STUB *stub)(STUB_ARGS_DECLARATION))
    54             : m_jit(jit)
    55             , m_stub(reinterpret_cast<void*>(stub))
    56             , m_returnType(Value)
    57             , m_argumentIndex(1) // Index 0 is reserved for restoreArgumentReference();
    58         {
    59         }
    60 
    61         JITStubCall(JIT* jit, int (JIT_STUB *stub)(STUB_ARGS_DECLARATION))
    62             : m_jit(jit)
    63             , m_stub(reinterpret_cast<void*>(stub))
    64             , m_returnType(Value)
    65             , m_argumentIndex(1) // Index 0 is reserved for restoreArgumentReference();
    66         {
    67         }
    68 
    69         JITStubCall(JIT* jit, void (JIT_STUB *stub)(STUB_ARGS_DECLARATION))
    70             : m_jit(jit)
    71             , m_stub(reinterpret_cast<void*>(stub))
    72             , m_returnType(Void)
    73             , m_argumentIndex(1) // Index 0 is reserved for restoreArgumentReference();
    74         {
    75         }
     90            , m_stackIndex(stackIndexStart)
     91        {
     92        }
     93#endif
    7694
    7795        // Arguments are added first to last.
    7896
     97        void skipArgument()
     98        {
     99            m_stackIndex += stackIndexStep;
     100        }
     101
    79102        void addArgument(JIT::Imm32 argument)
    80103        {
    81             m_jit->poke(argument, m_argumentIndex);
    82             ++m_argumentIndex;
     104            m_jit->poke(argument, m_stackIndex);
     105            m_stackIndex += stackIndexStep;
    83106        }
    84107
    85108        void addArgument(JIT::ImmPtr argument)
    86109        {
    87             m_jit->poke(argument, m_argumentIndex);
    88             ++m_argumentIndex;
     110            m_jit->poke(argument, m_stackIndex);
     111            m_stackIndex += stackIndexStep;
    89112        }
    90113
    91114        void addArgument(JIT::RegisterID argument)
    92115        {
    93             m_jit->poke(argument, m_argumentIndex);
    94             ++m_argumentIndex;
    95         }
    96 
     116            m_jit->poke(argument, m_stackIndex);
     117            m_stackIndex += stackIndexStep;
     118        }
     119
     120        void addArgument(const JSValue& value)
     121        {
     122            m_jit->poke(JIT::Imm32(value.payload()), m_stackIndex);
     123            m_jit->poke(JIT::Imm32(value.tag()), m_stackIndex + 1);
     124            m_stackIndex += stackIndexStep;
     125        }
     126
     127        void addArgument(JIT::RegisterID tag, JIT::RegisterID payload)
     128        {
     129            m_jit->poke(payload, m_stackIndex);
     130            m_jit->poke(tag, m_stackIndex + 1);
     131            m_stackIndex += stackIndexStep;
     132        }
     133
     134#if USE(JSVALUE32_64)
     135        void addArgument(unsigned srcVirtualRegister)
     136        {
     137            if (m_jit->m_codeBlock->isConstantRegisterIndex(srcVirtualRegister)) {
     138                addArgument(m_jit->getConstantOperand(srcVirtualRegister));
     139                return;
     140            }
     141
     142            m_jit->emitLoad(srcVirtualRegister, JIT::regT1, JIT::regT0);
     143            addArgument(JIT::regT1, JIT::regT0);
     144        }
     145
     146        void getArgument(size_t argumentNumber, JIT::RegisterID tag, JIT::RegisterID payload)
     147        {
     148            size_t stackIndex = stackIndexStart + (argumentNumber * stackIndexStep);
     149            m_jit->peek(payload, stackIndex);
     150            m_jit->peek(tag, stackIndex + 1);
     151        }
     152#else
    97153        void addArgument(unsigned src, JIT::RegisterID scratchRegister) // src is a virtual register.
    98154        {
     
    105161            m_jit->killLastResultRegister();
    106162        }
     163#endif
    107164
    108165        JIT::Call call()
     
    122179#endif
    123180
     181#if USE(JSVALUE32_64)
     182            m_jit->unmap();
     183#else
    124184            m_jit->killLastResultRegister();
    125             return call;
    126         }
    127 
     185#endif
     186            return call;
     187        }
     188
     189#if USE(JSVALUE32_64)
    128190        JIT::Call call(unsigned dst) // dst is a virtual register.
    129191        {
    130             ASSERT(m_returnType == Value);
     192            ASSERT(m_returnType == Value || m_returnType == Cell);
     193            JIT::Call call = this->call();
     194            if (m_returnType == Value)
     195                m_jit->emitStore(dst, JIT::regT1, JIT::regT0);
     196            else
     197                m_jit->emitStoreCell(dst, JIT::returnValueRegister);
     198            return call;
     199        }
     200#else
     201        JIT::Call call(unsigned dst) // dst is a virtual register.
     202        {
     203            ASSERT(m_returnType == VoidPtr || m_returnType == Cell);
    131204            JIT::Call call = this->call();
    132205            m_jit->emitPutVirtualRegister(dst);
    133206            return call;
    134207        }
    135 
    136         JIT::Call call(JIT::RegisterID dst)
    137         {
    138             ASSERT(m_returnType == Value);
     208#endif
     209
     210        JIT::Call call(JIT::RegisterID dst) // dst is a machine register.
     211        {
     212#if USE(JSVALUE32_64)
     213            ASSERT(m_returnType == Value || m_returnType == VoidPtr || m_returnType == Int || m_returnType == Cell);
     214#else
     215            ASSERT(m_returnType == VoidPtr || m_returnType == Int || m_returnType == Cell);
     216#endif
    139217            JIT::Call call = this->call();
    140218            if (dst != JIT::returnValueRegister)
     
    144222
    145223    private:
     224        static const size_t stackIndexStep = sizeof(EncodedJSValue) == 2 * sizeof(void*) ? 2 : 1;
     225        static const size_t stackIndexStart = 1; // Index 0 is reserved for restoreArgumentReference().
     226
    146227        JIT* m_jit;
    147228        void* m_stub;
    148         enum { Value, Void } m_returnType;
    149         size_t m_argumentIndex;
    150     };
    151 
    152     class CallEvalJITStub : public JITStubCall {
    153     public:
    154         CallEvalJITStub(JIT* jit, Instruction* instruction)
    155             : JITStubCall(jit, JITStubs::cti_op_call_eval)
    156         {
    157             int callee = instruction[2].u.operand;
    158             int argCount = instruction[3].u.operand;
    159             int registerOffset = instruction[4].u.operand;
    160 
    161             addArgument(callee, JIT::regT2);
    162             addArgument(JIT::Imm32(registerOffset));
    163             addArgument(JIT::Imm32(argCount));
    164         }
     229        enum { Void, VoidPtr, Int, Value, Cell } m_returnType;
     230        size_t m_stackIndex;
    165231    };
    166232}
  • trunk/JavaScriptCore/jit/JITStubs.cpp

    r46247 r46598  
    6363namespace JSC {
    6464
    65 
    6665#if PLATFORM(DARWIN) || PLATFORM(WIN_OS)
    6766#define SYMBOL_STRING(name) "_" #name
     
    7069#endif
    7170
     71#if USE(JSVALUE32_64)
     72
    7273#if COMPILER(GCC) && PLATFORM(X86)
    7374
    7475// These ASSERTs remind you that, if you change the layout of JITStackFrame, you
    7576// need to change the assembly trampolines below to match.
    76 COMPILE_ASSERT(offsetof(struct JITStackFrame, callFrame) == 0x38, JITStackFrame_callFrame_offset_matches_ctiTrampoline);
    77 COMPILE_ASSERT(offsetof(struct JITStackFrame, code) == 0x30, JITStackFrame_code_offset_matches_ctiTrampoline);
    78 COMPILE_ASSERT(offsetof(struct JITStackFrame, savedEBX) == 0x1c, JITStackFrame_stub_argument_space_matches_ctiTrampoline);
     77COMPILE_ASSERT(offsetof(struct JITStackFrame, code) % 16 == 0x0, JITStackFrame_maintains_16byte_stack_alignment);
     78COMPILE_ASSERT(offsetof(struct JITStackFrame, savedEBX) == 0x3c, JITStackFrame_stub_argument_space_matches_ctiTrampoline);
     79COMPILE_ASSERT(offsetof(struct JITStackFrame, callFrame) == 0x58, JITStackFrame_callFrame_offset_matches_ctiTrampoline);
     80COMPILE_ASSERT(offsetof(struct JITStackFrame, code) == 0x50, JITStackFrame_code_offset_matches_ctiTrampoline);
    7981
    8082asm volatile (
     
    8688    "pushl %edi" "\n"
    8789    "pushl %ebx" "\n"
    88     "subl $0x1c, %esp" "\n"
     90    "subl $0x3c, %esp" "\n"
    8991    "movl $512, %esi" "\n"
    90     "movl 0x38(%esp), %edi" "\n"
    91     "call *0x30(%esp)" "\n"
    92     "addl $0x1c, %esp" "\n"
     92    "movl 0x58(%esp), %edi" "\n"
     93    "call *0x50(%esp)" "\n"
     94    "addl $0x3c, %esp" "\n"
    9395    "popl %ebx" "\n"
    9496    "popl %edi" "\n"
     
    105107#endif
    106108    "call " SYMBOL_STRING(cti_vm_throw) "\n"
    107     "addl $0x1c, %esp" "\n"
     109    "addl $0x3c, %esp" "\n"
    108110    "popl %ebx" "\n"
    109111    "popl %edi" "\n"
     
    116118".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
    117119SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
    118     "addl $0x1c, %esp" "\n"
     120    "addl $0x3c, %esp" "\n"
    119121    "popl %ebx" "\n"
    120122    "popl %edi" "\n"
     
    132134// These ASSERTs remind you that, if you change the layout of JITStackFrame, you
    133135// need to change the assembly trampolines below to match.
     136COMPILE_ASSERT(offsetof(struct JITStackFrame, code) % 32 == 0x0, JITStackFrame_maintains_32byte_stack_alignment);
     137COMPILE_ASSERT(offsetof(struct JITStackFrame, savedRBX) == 0x48, JITStackFrame_stub_argument_space_matches_ctiTrampoline);
    134138COMPILE_ASSERT(offsetof(struct JITStackFrame, callFrame) == 0x90, JITStackFrame_callFrame_offset_matches_ctiTrampoline);
    135139COMPILE_ASSERT(offsetof(struct JITStackFrame, code) == 0x80, JITStackFrame_code_offset_matches_ctiTrampoline);
    136 COMPILE_ASSERT(offsetof(struct JITStackFrame, savedRBX) == 0x48, JITStackFrame_stub_argument_space_matches_ctiTrampoline);
    137140
    138141asm volatile (
     
    262265// These ASSERTs remind you that, if you change the layout of JITStackFrame, you
    263266// need to change the assembly trampolines below to match.
     267COMPILE_ASSERT(offsetof(struct JITStackFrame, code) % 16 == 0x0, JITStackFrame_maintains_16byte_stack_alignment);
     268COMPILE_ASSERT(offsetof(struct JITStackFrame, savedEBX) == 0x3c, JITStackFrame_stub_argument_space_matches_ctiTrampoline);
     269COMPILE_ASSERT(offsetof(struct JITStackFrame, callFrame) == 0x58, JITStackFrame_callFrame_offset_matches_ctiTrampoline);
     270COMPILE_ASSERT(offsetof(struct JITStackFrame, code) == 0x50, JITStackFrame_code_offset_matches_ctiTrampoline);
     271
     272extern "C" {
     273
     274    __declspec(naked) EncodedJSValue ctiTrampoline(void* code, RegisterFile*, CallFrame*, JSValue* exception, Profiler**, JSGlobalData*)
     275    {
     276        __asm {
     277            push ebp;
     278            mov ebp, esp;
     279            push esi;
     280            push edi;
     281            push ebx;
     282            sub esp, 0x3c;
     283            mov esi, 512;
     284            mov ecx, esp;
     285            mov edi, [esp + 0x58];
     286            call [esp + 0x50];
     287            add esp, 0x3c;
     288            pop ebx;
     289            pop edi;
     290            pop esi;
     291            pop ebp;
     292            ret;
     293        }
     294    }
     295
     296    __declspec(naked) void ctiVMThrowTrampoline()
     297    {
     298        __asm {
     299            mov ecx, esp;
     300            call cti_vm_throw;
     301            add esp, 0x3c;
     302            pop ebx;
     303            pop edi;
     304            pop esi;
     305            pop ebp;
     306            ret;
     307        }
     308    }
     309
     310    __declspec(naked) void ctiOpThrowNotCaught()
     311    {
     312        __asm {
     313            add esp, 0x3c;
     314            pop ebx;
     315            pop edi;
     316            pop esi;
     317            pop ebp;
     318            ret;
     319        }
     320    }
     321}
     322
     323#endif // COMPILER(GCC) && PLATFORM(X86)
     324
     325#else // USE(JSVALUE32_64)
     326
     327#if COMPILER(GCC) && PLATFORM(X86)
     328
     329// These ASSERTs remind you that, if you change the layout of JITStackFrame, you
     330// need to change the assembly trampolines below to match.
     331COMPILE_ASSERT(offsetof(struct JITStackFrame, callFrame) == 0x38, JITStackFrame_callFrame_offset_matches_ctiTrampoline);
     332COMPILE_ASSERT(offsetof(struct JITStackFrame, code) == 0x30, JITStackFrame_code_offset_matches_ctiTrampoline);
     333COMPILE_ASSERT(offsetof(struct JITStackFrame, savedEBX) == 0x1c, JITStackFrame_stub_argument_space_matches_ctiTrampoline);
     334
     335asm volatile (
     336".globl " SYMBOL_STRING(ctiTrampoline) "\n"
     337SYMBOL_STRING(ctiTrampoline) ":" "\n"
     338    "pushl %ebp" "\n"
     339    "movl %esp, %ebp" "\n"
     340    "pushl %esi" "\n"
     341    "pushl %edi" "\n"
     342    "pushl %ebx" "\n"
     343    "subl $0x1c, %esp" "\n"
     344    "movl $512, %esi" "\n"
     345    "movl 0x38(%esp), %edi" "\n"
     346    "call *0x30(%esp)" "\n"
     347    "addl $0x1c, %esp" "\n"
     348    "popl %ebx" "\n"
     349    "popl %edi" "\n"
     350    "popl %esi" "\n"
     351    "popl %ebp" "\n"
     352    "ret" "\n"
     353);
     354
     355asm volatile (
     356".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
     357SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
     358#if !USE(JIT_STUB_ARGUMENT_VA_LIST)
     359    "movl %esp, %ecx" "\n"
     360#endif
     361    "call " SYMBOL_STRING(cti_vm_throw) "\n"
     362    "addl $0x1c, %esp" "\n"
     363    "popl %ebx" "\n"
     364    "popl %edi" "\n"
     365    "popl %esi" "\n"
     366    "popl %ebp" "\n"
     367    "ret" "\n"
     368);
     369   
     370asm volatile (
     371".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
     372SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
     373    "addl $0x1c, %esp" "\n"
     374    "popl %ebx" "\n"
     375    "popl %edi" "\n"
     376    "popl %esi" "\n"
     377    "popl %ebp" "\n"
     378    "ret" "\n"
     379);
     380   
     381#elif COMPILER(GCC) && PLATFORM(X86_64)
     382
     383#if USE(JIT_STUB_ARGUMENT_VA_LIST)
     384#error "JIT_STUB_ARGUMENT_VA_LIST not supported on x86-64."
     385#endif
     386
     387// These ASSERTs remind you that, if you change the layout of JITStackFrame, you
     388// need to change the assembly trampolines below to match.
     389COMPILE_ASSERT(offsetof(struct JITStackFrame, callFrame) == 0x90, JITStackFrame_callFrame_offset_matches_ctiTrampoline);
     390COMPILE_ASSERT(offsetof(struct JITStackFrame, code) == 0x80, JITStackFrame_code_offset_matches_ctiTrampoline);
     391COMPILE_ASSERT(offsetof(struct JITStackFrame, savedRBX) == 0x48, JITStackFrame_stub_argument_space_matches_ctiTrampoline);
     392
     393asm volatile (
     394".globl " SYMBOL_STRING(ctiTrampoline) "\n"
     395SYMBOL_STRING(ctiTrampoline) ":" "\n"
     396    "pushq %rbp" "\n"
     397    "movq %rsp, %rbp" "\n"
     398    "pushq %r12" "\n"
     399    "pushq %r13" "\n"
     400    "pushq %r14" "\n"
     401    "pushq %r15" "\n"
     402    "pushq %rbx" "\n"
     403    "subq $0x48, %rsp" "\n"
     404    "movq $512, %r12" "\n"
     405    "movq $0xFFFF000000000000, %r14" "\n"
     406    "movq $0xFFFF000000000002, %r15" "\n"
     407    "movq 0x90(%rsp), %r13" "\n"
     408    "call *0x80(%rsp)" "\n"
     409    "addq $0x48, %rsp" "\n"
     410    "popq %rbx" "\n"
     411    "popq %r15" "\n"
     412    "popq %r14" "\n"
     413    "popq %r13" "\n"
     414    "popq %r12" "\n"
     415    "popq %rbp" "\n"
     416    "ret" "\n"
     417);
     418
     419asm volatile (
     420".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
     421SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
     422    "movq %rsp, %rdi" "\n"
     423    "call " SYMBOL_STRING(cti_vm_throw) "\n"
     424    "addq $0x48, %rsp" "\n"
     425    "popq %rbx" "\n"
     426    "popq %r15" "\n"
     427    "popq %r14" "\n"
     428    "popq %r13" "\n"
     429    "popq %r12" "\n"
     430    "popq %rbp" "\n"
     431    "ret" "\n"
     432);
     433
     434asm volatile (
     435".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
     436SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
     437    "addq $0x48, %rsp" "\n"
     438    "popq %rbx" "\n"
     439    "popq %r15" "\n"
     440    "popq %r14" "\n"
     441    "popq %r13" "\n"
     442    "popq %r12" "\n"
     443    "popq %rbp" "\n"
     444    "ret" "\n"
     445);
     446
     447#elif COMPILER(GCC) && PLATFORM_ARM_ARCH(7)
     448
     449#if USE(JIT_STUB_ARGUMENT_VA_LIST)
     450#error "JIT_STUB_ARGUMENT_VA_LIST not supported on ARMv7."
     451#endif
     452
     453asm volatile (
     454".text" "\n"
     455".align 2" "\n"
     456".globl " SYMBOL_STRING(ctiTrampoline) "\n"
     457".thumb" "\n"
     458".thumb_func " SYMBOL_STRING(ctiTrampoline) "\n"
     459SYMBOL_STRING(ctiTrampoline) ":" "\n"
     460    "sub sp, sp, #0x3c" "\n"
     461    "str lr, [sp, #0x20]" "\n"
     462    "str r4, [sp, #0x24]" "\n"
     463    "str r5, [sp, #0x28]" "\n"
     464    "str r6, [sp, #0x2c]" "\n"
     465    "str r1, [sp, #0x30]" "\n"
     466    "str r2, [sp, #0x34]" "\n"
     467    "str r3, [sp, #0x38]" "\n"
     468    "cpy r5, r2" "\n"
     469    "mov r6, #512" "\n"
     470    "blx r0" "\n"
     471    "ldr r6, [sp, #0x2c]" "\n"
     472    "ldr r5, [sp, #0x28]" "\n"
     473    "ldr r4, [sp, #0x24]" "\n"
     474    "ldr lr, [sp, #0x20]" "\n"
     475    "add sp, sp, #0x3c" "\n"
     476    "bx lr" "\n"
     477);
     478
     479asm volatile (
     480".text" "\n"
     481".align 2" "\n"
     482".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
     483".thumb" "\n"
     484".thumb_func " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
     485SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
     486    "cpy r0, sp" "\n"
     487    "bl " SYMBOL_STRING(cti_vm_throw) "\n"
     488    "ldr r6, [sp, #0x2c]" "\n"
     489    "ldr r5, [sp, #0x28]" "\n"
     490    "ldr r4, [sp, #0x24]" "\n"
     491    "ldr lr, [sp, #0x20]" "\n"
     492    "add sp, sp, #0x3c" "\n"
     493    "bx lr" "\n"
     494);
     495
     496asm volatile (
     497".text" "\n"
     498".align 2" "\n"
     499".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
     500".thumb" "\n"
     501".thumb_func " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
     502SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
     503    "ldr r6, [sp, #0x2c]" "\n"
     504    "ldr r5, [sp, #0x28]" "\n"
     505    "ldr r4, [sp, #0x24]" "\n"
     506    "ldr lr, [sp, #0x20]" "\n"
     507    "add sp, sp, #0x3c" "\n"
     508    "bx lr" "\n"
     509);
     510
     511#elif COMPILER(MSVC)
     512
     513#if USE(JIT_STUB_ARGUMENT_VA_LIST)
     514#error "JIT_STUB_ARGUMENT_VA_LIST configuration not supported on MSVC."
     515#endif
     516
     517// These ASSERTs remind you that, if you change the layout of JITStackFrame, you
     518// need to change the assembly trampolines below to match.
    264519COMPILE_ASSERT(offsetof(struct JITStackFrame, callFrame) == 0x38, JITStackFrame_callFrame_offset_matches_ctiTrampoline);
    265520COMPILE_ASSERT(offsetof(struct JITStackFrame, code) == 0x30, JITStackFrame_code_offset_matches_ctiTrampoline);
     
    294549        __asm {
    295550            mov ecx, esp;
    296             call JITStubs::cti_vm_throw;
     551            call cti_vm_throw;
    297552            add esp, 0x1c;
    298553            pop ebx;
     
    317572}
    318573
    319 #endif
     574#endif // COMPILER(GCC) && PLATFORM(X86)
     575
     576#endif // USE(JSVALUE32_64)
    320577
    321578#if ENABLE(OPCODE_SAMPLING)
     
    327584JITThunks::JITThunks(JSGlobalData* globalData)
    328585{
    329     JIT::compileCTIMachineTrampolines(globalData, &m_executablePool, &m_ctiArrayLengthTrampoline, &m_ctiStringLengthTrampoline, &m_ctiVirtualCallPreLink, &m_ctiVirtualCallLink, &m_ctiVirtualCall, &m_ctiNativeCallThunk);
     586    JIT::compileCTIMachineTrampolines(globalData, &m_executablePool, &m_ctiStringLengthTrampoline, &m_ctiVirtualCallPreLink, &m_ctiVirtualCallLink, &m_ctiVirtualCall, &m_ctiNativeCallThunk);
    330587
    331588#if PLATFORM_ARM_ARCH(7)
     
    359616    // Uncacheable: give up.
    360617    if (!slot.isCacheable()) {
    361         ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(JITStubs::cti_op_put_by_id_generic));
     618        ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_put_by_id_generic));
    362619        return;
    363620    }
     
    367624
    368625    if (structure->isDictionary()) {
    369         ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(JITStubs::cti_op_put_by_id_generic));
     626        ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_put_by_id_generic));
    370627        return;
    371628    }
     
    373630    // If baseCell != base, then baseCell must be a proxy for another object.
    374631    if (baseCell != slot.base()) {
    375         ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(JITStubs::cti_op_put_by_id_generic));
     632        ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_put_by_id_generic));
    376633        return;
    377634    }
     
    385642        StructureChain* prototypeChain = structure->prototypeChain(callFrame);
    386643        if (!prototypeChain->isCacheable()) {
    387             ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(JITStubs::cti_op_put_by_id_generic));
     644            ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_put_by_id_generic));
    388645            return;
    389646        }
     
    405662    // FIXME: Cache property access for immediates.
    406663    if (!baseValue.isCell()) {
    407         ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(JITStubs::cti_op_get_by_id_generic));
     664        ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
    408665        return;
    409666    }
     
    425682    // Uncacheable: give up.
    426683    if (!slot.isCacheable()) {
    427         ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(JITStubs::cti_op_get_by_id_generic));
     684        ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
    428685        return;
    429686    }
     
    433690
    434691    if (structure->isDictionary()) {
    435         ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(JITStubs::cti_op_get_by_id_generic));
     692        ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
    436693        return;
    437694    }
     
    476733    StructureChain* prototypeChain = structure->prototypeChain(callFrame);
    477734    if (!prototypeChain->isCacheable()) {
    478         ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(JITStubs::cti_op_get_by_id_generic));
     735        ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
    479736        return;
    480737    }
     
    483740}
    484741
    485 #endif
     742#endif // ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
    486743
    487744#if USE(JIT_STUB_ARGUMENT_VA_LIST)
     
    560817#define CHECK_FOR_EXCEPTION() \
    561818    do { \
    562         if (UNLIKELY(stackFrame.globalData->exception != JSValue())) \
     819        if (UNLIKELY(stackFrame.globalData->exception)) \
    563820            VM_THROW_EXCEPTION(); \
    564821    } while (0)
    565822#define CHECK_FOR_EXCEPTION_AT_END() \
    566823    do { \
    567         if (UNLIKELY(stackFrame.globalData->exception != JSValue())) \
     824        if (UNLIKELY(stackFrame.globalData->exception)) \
    568825            VM_THROW_EXCEPTION_AT_END(); \
    569826    } while (0)
    570827#define CHECK_FOR_EXCEPTION_VOID() \
    571828    do { \
    572         if (UNLIKELY(stackFrame.globalData->exception != JSValue())) { \
     829        if (UNLIKELY(stackFrame.globalData->exception)) { \
    573830            VM_THROW_EXCEPTION_AT_END(); \
    574831            return; \
    575832        } \
    576833    } while (0)
    577 
    578 namespace JITStubs {
    579834
    580835#if PLATFORM_ARM_ARCH(7)
     
    602857#endif
    603858
    604 DEFINE_STUB_FUNCTION(JSObject*, op_convert_this)
     859DEFINE_STUB_FUNCTION(EncodedJSValue, op_convert_this)
    605860{
    606861    STUB_INIT_STACK_FRAME(stackFrame);
     
    611866    JSObject* result = v1.toThisObject(callFrame);
    612867    CHECK_FOR_EXCEPTION_AT_END();
    613     return result;
     868    return JSValue::encode(result);
    614869}
    615870
     
    651906
    652907    if (rightIsNumber & leftIsString) {
    653         RefPtr<UString::Rep> value = v2.isInt32Fast() ?
    654             concatenate(asString(v1)->value().rep(), v2.getInt32Fast()) :
     908        RefPtr<UString::Rep> value = v2.isInt32() ?
     909            concatenate(asString(v1)->value().rep(), v2.asInt32()) :
    655910            concatenate(asString(v1)->value().rep(), right);
    656911
     
    8061061}
    8071062
    808 
    809 DEFINE_STUB_FUNCTION(EncodedJSValue, op_put_by_id_transition_realloc)
     1063DEFINE_STUB_FUNCTION(JSObject*, op_put_by_id_transition_realloc)
    8101064{
    8111065    STUB_INIT_STACK_FRAME(stackFrame);
    8121066
    8131067    JSValue baseValue = stackFrame.args[0].jsValue();
    814     int32_t oldSize = stackFrame.args[1].int32();
    815     int32_t newSize = stackFrame.args[2].int32();
     1068    int32_t oldSize = stackFrame.args[3].int32();
     1069    int32_t newSize = stackFrame.args[4].int32();
    8161070
    8171071    ASSERT(baseValue.isObject());
    818     asObject(baseValue)->allocatePropertyStorage(oldSize, newSize);
    819 
    820     return JSValue::encode(baseValue);
     1072    JSObject* base = asObject(baseValue);
     1073    base->allocatePropertyStorage(oldSize, newSize);
     1074
     1075    return base;
    8211076}
    8221077
     
    11171372}
    11181373
    1119 #endif
     1374#endif // ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
    11201375
    11211376DEFINE_STUB_FUNCTION(EncodedJSValue, op_instanceof)
     
    12641519}
    12651520
     1521#if ENABLE(JIT_OPTIMIZE_CALL)
    12661522DEFINE_STUB_FUNCTION(void*, vm_dontLazyLinkCall)
    12671523{
     
    12941550    return jitCode.addressForCall().executableAddress();
    12951551}
     1552#endif // !ENABLE(JIT_OPTIMIZE_CALL)
    12961553
    12971554DEFINE_STUB_FUNCTION(JSObject*, op_push_activation)
     
    13591616    Arguments* arguments = new (stackFrame.globalData) Arguments(stackFrame.callFrame);
    13601617    stackFrame.callFrame->setCalleeArguments(arguments);
    1361     stackFrame.callFrame[RegisterFile::ArgumentsRegister] = arguments;
     1618    stackFrame.callFrame[RegisterFile::ArgumentsRegister] = JSValue(arguments);
    13621619}
    13631620
     
    13681625    Arguments* arguments = new (stackFrame.globalData) Arguments(stackFrame.callFrame, Arguments::NoParameters);
    13691626    stackFrame.callFrame->setCalleeArguments(arguments);
    1370     stackFrame.callFrame[RegisterFile::ArgumentsRegister] = arguments;
     1627    stackFrame.callFrame[RegisterFile::ArgumentsRegister] = JSValue(arguments);
    13711628}
    13721629
     
    15201777    JSValue result;
    15211778
    1522     if (LIKELY(subscript.isUInt32Fast())) {
    1523         uint32_t i = subscript.getUInt32Fast();
     1779    if (LIKELY(subscript.isUInt32())) {
     1780        uint32_t i = subscript.asUInt32();
    15241781        if (isJSArray(globalData, baseValue)) {
    15251782            JSArray* jsArray = asArray(baseValue);
     
    15591816    JSValue result;
    15601817   
    1561     if (LIKELY(subscript.isUInt32Fast())) {
    1562         uint32_t i = subscript.getUInt32Fast();
     1818    if (LIKELY(subscript.isUInt32())) {
     1819        uint32_t i = subscript.asUInt32();
    15631820        if (isJSString(globalData, baseValue) && asString(baseValue)->canGetIndex(i))
    15641821            result = asString(baseValue)->getIndex(stackFrame.globalData, i);
     
    15771834}
    15781835   
    1579 
    15801836DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_val_byte_array)
    15811837{
     
    15901846    JSValue result;
    15911847
    1592     if (LIKELY(subscript.isUInt32Fast())) {
    1593         uint32_t i = subscript.getUInt32Fast();
     1848    if (LIKELY(subscript.isUInt32())) {
     1849        uint32_t i = subscript.asUInt32();
    15941850        if (isJSByteArray(globalData, baseValue) && asByteArray(baseValue)->canAccessIndex(i)) {
    15951851            // All fast byte array accesses are safe from exceptions so return immediately to avoid exception checks.
     
    16091865}
    16101866
    1611 DEFINE_STUB_FUNCTION(EncodedJSValue, op_resolve_func)
    1612 {
    1613     STUB_INIT_STACK_FRAME(stackFrame);
    1614 
    1615     CallFrame* callFrame = stackFrame.callFrame;
    1616     ScopeChainNode* scopeChain = callFrame->scopeChain();
    1617 
    1618     ScopeChainIterator iter = scopeChain->begin();
    1619     ScopeChainIterator end = scopeChain->end();
    1620 
    1621     // FIXME: add scopeDepthIsZero optimization
    1622 
    1623     ASSERT(iter != end);
    1624 
    1625     Identifier& ident = stackFrame.args[0].identifier();
    1626     JSObject* base;
    1627     do {
    1628         base = *iter;
    1629         PropertySlot slot(base);
    1630         if (base->getPropertySlot(callFrame, ident, slot)) {           
    1631             // ECMA 11.2.3 says that if we hit an activation the this value should be null.
    1632             // However, section 10.2.3 says that in the case where the value provided
    1633             // by the caller is null, the global object should be used. It also says
    1634             // that the section does not apply to internal functions, but for simplicity
    1635             // of implementation we use the global object anyway here. This guarantees
    1636             // that in host objects you always get a valid object for this.
    1637             // We also handle wrapper substitution for the global object at the same time.
    1638             JSObject* thisObj = base->toThisObject(callFrame);
    1639             JSValue result = slot.getValue(callFrame, ident);
    1640             CHECK_FOR_EXCEPTION_AT_END();
    1641 
    1642             callFrame->registers()[stackFrame.args[1].int32()] = JSValue(thisObj);
    1643             return JSValue::encode(result);
    1644         }
    1645         ++iter;
    1646     } while (iter != end);
    1647 
    1648     CodeBlock* codeBlock = callFrame->codeBlock();
    1649     unsigned vPCIndex = codeBlock->getBytecodeIndex(callFrame, STUB_RETURN_ADDRESS);
    1650     stackFrame.globalData->exception = createUndefinedVariableError(callFrame, ident, vPCIndex, codeBlock);
    1651     VM_THROW_EXCEPTION_AT_END();
    1652     return JSValue::encode(JSValue());
    1653 }
    1654 
    16551867DEFINE_STUB_FUNCTION(EncodedJSValue, op_sub)
    16561868{
     
    16821894    JSValue value = stackFrame.args[2].jsValue();
    16831895
    1684     if (LIKELY(subscript.isUInt32Fast())) {
    1685         uint32_t i = subscript.getUInt32Fast();
     1896    if (LIKELY(subscript.isUInt32())) {
     1897        uint32_t i = subscript.asUInt32();
    16861898        if (isJSArray(globalData, baseValue)) {
    16871899            JSArray* jsArray = asArray(baseValue);
     
    16941906            ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_put_by_val_byte_array));
    16951907            // All fast byte array accesses are safe from exceptions so return immediately to avoid exception checks.
    1696             if (value.isInt32Fast()) {
    1697                 jsByteArray->setIndex(i, value.getInt32Fast());
     1908            if (value.isInt32()) {
     1909                jsByteArray->setIndex(i, value.asInt32());
    16981910                return;
    16991911            } else {
     
    17331945        asArray(baseValue)->JSArray::put(callFrame, i, value);
    17341946    else {
    1735         // This should work since we're re-boxing an immediate unboxed in JIT code.
    1736         ASSERT(JSValue::makeInt32Fast(i));
    1737         Identifier property(callFrame, JSValue::makeInt32Fast(i).toString(callFrame));
    1738         // FIXME: can toString throw an exception here?
    1739         if (!stackFrame.globalData->exception) { // Don't put to an object if toString threw an exception.
    1740             PutPropertySlot slot;
    1741             baseValue.put(callFrame, property, value, slot);
    1742         }
     1947        Identifier property(callFrame, UString::from(i));
     1948        PutPropertySlot slot;
     1949        baseValue.put(callFrame, property, value, slot);
    17431950    }
    17441951
     
    17571964    JSValue value = stackFrame.args[2].jsValue();
    17581965   
    1759     if (LIKELY(subscript.isUInt32Fast())) {
    1760         uint32_t i = subscript.getUInt32Fast();
     1966    if (LIKELY(subscript.isUInt32())) {
     1967        uint32_t i = subscript.asUInt32();
    17611968        if (isJSByteArray(globalData, baseValue) && asByteArray(baseValue)->canAccessIndex(i)) {
    17621969            JSByteArray* jsByteArray = asByteArray(baseValue);
    17631970           
    17641971            // All fast byte array accesses are safe from exceptions so return immediately to avoid exception checks.
    1765             if (value.isInt32Fast()) {
    1766                 jsByteArray->setIndex(i, value.getInt32Fast());
     1972            if (value.isInt32()) {
     1973                jsByteArray->setIndex(i, value.asInt32());
    17671974                return;
    17681975            } else {
     
    18152022{
    18162023    STUB_INIT_STACK_FRAME(stackFrame);
     2024
    18172025    CallFrame* callFrame = stackFrame.callFrame;
    18182026    RegisterFile* registerFile = stackFrame.registerFile;
     
    18292037            VM_THROW_EXCEPTION();
    18302038        }
    1831         int32_t expectedParams = asFunction(callFrame->registers()[RegisterFile::Callee].jsValue())->body()->parameterCount();
     2039        int32_t expectedParams = callFrame->callee()->body()->parameterCount();
    18322040        int32_t inplaceArgs = min(providedParams, expectedParams);
    18332041       
     
    19612169
    19622170    CallFrame* callFrame = stackFrame.callFrame;
    1963     JSGlobalObject* globalObject = asGlobalObject(stackFrame.args[0].jsValue());
     2171    JSGlobalObject* globalObject = stackFrame.args[0].globalObject();
    19642172    Identifier& ident = stackFrame.args[1].identifier();
    19652173    unsigned globalResolveInfoIndex = stackFrame.args[2].int32();
     
    20852293}
    20862294
    2087 DEFINE_STUB_FUNCTION(EncodedJSValue, op_eq)
     2295#if USE(JSVALUE32_64)
     2296
     2297DEFINE_STUB_FUNCTION(bool, op_eq)
    20882298{
    20892299    STUB_INIT_STACK_FRAME(stackFrame);
     
    20922302    JSValue src2 = stackFrame.args[1].jsValue();
    20932303
    2094     CallFrame* callFrame = stackFrame.callFrame;
    2095 
    2096     ASSERT(!JSValue::areBothInt32Fast(src1, src2));
    2097     JSValue result = jsBoolean(JSValue::equalSlowCaseInline(callFrame, src1, src2));
    2098     CHECK_FOR_EXCEPTION_AT_END();
    2099     return JSValue::encode(result);
    2100 }
     2304    start:
     2305    if (src2.isUndefined()) {
     2306        return src1.isNull() ||
     2307               (src1.isCell() && asCell(src1)->structure()->typeInfo().masqueradesAsUndefined()) ||
     2308               src1.isUndefined();
     2309    }
     2310   
     2311    if (src2.isNull()) {
     2312        return src1.isUndefined() ||
     2313               (src1.isCell() && asCell(src1)->structure()->typeInfo().masqueradesAsUndefined()) ||
     2314               src1.isNull();
     2315    }
     2316
     2317    if (src1.isInt32()) {
     2318        if (src2.isDouble())
     2319            return src1.asInt32() == src2.asDouble();
     2320        double d = src2.toNumber(stackFrame.callFrame);
     2321        CHECK_FOR_EXCEPTION();
     2322        return src1.asInt32() == d;
     2323    }
     2324
     2325    if (src1.isDouble()) {
     2326        if (src2.isInt32())
     2327            return src1.asDouble() == src2.asInt32();
     2328        double d = src2.toNumber(stackFrame.callFrame);
     2329        CHECK_FOR_EXCEPTION();
     2330        return src1.asDouble() == d;
     2331    }
     2332
     2333    if (src1.isTrue()) {
     2334        if (src2.isFalse())
     2335            return false;
     2336        double d = src2.toNumber(stackFrame.callFrame);
     2337        CHECK_FOR_EXCEPTION();
     2338        return d == 1.0;
     2339    }
     2340
     2341    if (src1.isFalse()) {
     2342        if (src2.isTrue())
     2343            return false;
     2344        double d = src2.toNumber(stackFrame.callFrame);
     2345        CHECK_FOR_EXCEPTION();
     2346        return d == 0.0;
     2347    }
     2348   
     2349    if (src1.isUndefined())
     2350        return src2.isCell() && asCell(src2)->structure()->typeInfo().masqueradesAsUndefined();
     2351   
     2352    if (src1.isNull())
     2353        return src2.isCell() && asCell(src2)->structure()->typeInfo().masqueradesAsUndefined();
     2354
     2355    ASSERT(src1.isCell());
     2356
     2357    JSCell* cell1 = asCell(src1);
     2358
     2359    if (cell1->isString()) {
     2360        if (src2.isInt32())
     2361            return static_cast<JSString*>(cell1)->value().toDouble() == src2.asInt32();
     2362           
     2363        if (src2.isDouble())
     2364            return static_cast<JSString*>(cell1)->value().toDouble() == src2.asDouble();
     2365
     2366        if (src2.isTrue())
     2367            return static_cast<JSString*>(cell1)->value().toDouble() == 1.0;
     2368
     2369        if (src2.isFalse())
     2370            return static_cast<JSString*>(cell1)->value().toDouble() == 0.0;
     2371
     2372        ASSERT(src2.isCell());
     2373        JSCell* cell2 = asCell(src2);
     2374        if (cell2->isString())
     2375            return static_cast<JSString*>(cell1)->value() == static_cast<JSString*>(cell2)->value();
     2376
     2377        ASSERT(cell2->isObject());
     2378        src2 = static_cast<JSObject*>(cell2)->toPrimitive(stackFrame.callFrame);
     2379        CHECK_FOR_EXCEPTION();
     2380        goto start;
     2381    }
     2382
     2383    ASSERT(cell1->isObject());
     2384    if (src2.isObject())
     2385        return static_cast<JSObject*>(cell1) == asObject(src2);
     2386    src1 = static_cast<JSObject*>(cell1)->toPrimitive(stackFrame.callFrame);
     2387    CHECK_FOR_EXCEPTION();
     2388    goto start;
     2389}
     2390
     2391DEFINE_STUB_FUNCTION(bool, op_eq_strings)
     2392{
     2393    STUB_INIT_STACK_FRAME(stackFrame);
     2394
     2395    JSString* string1 = stackFrame.args[0].jsString();
     2396    JSString* string2 = stackFrame.args[1].jsString();
     2397
     2398    ASSERT(string1->isString());
     2399    ASSERT(string2->isString());
     2400    return string1->value() == string2->value();
     2401}
     2402
     2403#else // USE(JSVALUE32_64)
     2404
     2405DEFINE_STUB_FUNCTION(bool, op_eq)
     2406{
     2407    STUB_INIT_STACK_FRAME(stackFrame);
     2408
     2409    JSValue src1 = stackFrame.args[0].jsValue();
     2410    JSValue src2 = stackFrame.args[1].jsValue();
     2411
     2412    CallFrame* callFrame = stackFrame.callFrame;
     2413
     2414    bool result = JSValue::equalSlowCaseInline(callFrame, src1, src2);
     2415    CHECK_FOR_EXCEPTION_AT_END();
     2416    return result;
     2417}
     2418
     2419#endif // USE(JSVALUE32_64)
    21012420
    21022421DEFINE_STUB_FUNCTION(EncodedJSValue, op_lshift)
     
    21072426    JSValue shift = stackFrame.args[1].jsValue();
    21082427
    2109     int32_t left;
    2110     uint32_t right;
    2111     if (JSValue::areBothInt32Fast(val, shift))
    2112         return JSValue::encode(jsNumber(stackFrame.globalData, val.getInt32Fast() << (shift.getInt32Fast() & 0x1f)));
    2113     if (val.numberToInt32(left) && shift.numberToUInt32(right))
    2114         return JSValue::encode(jsNumber(stackFrame.globalData, left << (right & 0x1f)));
    2115 
    21162428    CallFrame* callFrame = stackFrame.callFrame;
    21172429    JSValue result = jsNumber(stackFrame.globalData, (val.toInt32(callFrame)) << (shift.toUInt32(callFrame) & 0x1f));
     
    21272439    JSValue src2 = stackFrame.args[1].jsValue();
    21282440
    2129     int32_t left;
    2130     int32_t right;
    2131     if (src1.numberToInt32(left) && src2.numberToInt32(right))
    2132         return JSValue::encode(jsNumber(stackFrame.globalData, left & right));
    2133 
     2441    ASSERT(!src1.isInt32() || !src2.isInt32());
    21342442    CallFrame* callFrame = stackFrame.callFrame;
    21352443    JSValue result = jsNumber(stackFrame.globalData, src1.toInt32(callFrame) & src2.toInt32(callFrame));
     
    21452453    JSValue shift = stackFrame.args[1].jsValue();
    21462454
    2147     int32_t left;
    2148     uint32_t right;
    2149     if (JSFastMath::canDoFastRshift(val, shift))
    2150         return JSValue::encode(JSFastMath::rightShiftImmediateNumbers(val, shift));
    2151     if (val.numberToInt32(left) && shift.numberToUInt32(right))
    2152         return JSValue::encode(jsNumber(stackFrame.globalData, left >> (right & 0x1f)));
    2153 
    21542455    CallFrame* callFrame = stackFrame.callFrame;
    21552456    JSValue result = jsNumber(stackFrame.globalData, (val.toInt32(callFrame)) >> (shift.toUInt32(callFrame) & 0x1f));
     2457
    21562458    CHECK_FOR_EXCEPTION_AT_END();
    21572459    return JSValue::encode(result);
     
    21642466    JSValue src = stackFrame.args[0].jsValue();
    21652467
    2166     int value;
    2167     if (src.numberToInt32(value))
    2168         return JSValue::encode(jsNumber(stackFrame.globalData, ~value));
    2169 
     2468    ASSERT(!src.isInt32());
    21702469    CallFrame* callFrame = stackFrame.callFrame;
    21712470    JSValue result = jsNumber(stackFrame.globalData, ~src.toInt32(callFrame));
     
    22412540}
    22422541
    2243 DEFINE_STUB_FUNCTION(EncodedJSValue, op_neq)
    2244 {
    2245     STUB_INIT_STACK_FRAME(stackFrame);
    2246 
    2247     JSValue src1 = stackFrame.args[0].jsValue();
    2248     JSValue src2 = stackFrame.args[1].jsValue();
    2249 
    2250     ASSERT(!JSValue::areBothInt32Fast(src1, src2));
    2251 
    2252     CallFrame* callFrame = stackFrame.callFrame;
    2253     JSValue result = jsBoolean(!JSValue::equalSlowCaseInline(callFrame, src1, src2));
    2254     CHECK_FOR_EXCEPTION_AT_END();
    2255     return JSValue::encode(result);
    2256 }
    2257 
    22582542DEFINE_STUB_FUNCTION(EncodedJSValue, op_post_dec)
    22592543{
     
    22792563
    22802564    CallFrame* callFrame = stackFrame.callFrame;
    2281 
    2282     if (JSFastMath::canDoFastUrshift(val, shift))
    2283         return JSValue::encode(JSFastMath::rightShiftImmediateNumbers(val, shift));
    2284     else {
    2285         JSValue result = jsNumber(stackFrame.globalData, (val.toUInt32(callFrame)) >> (shift.toUInt32(callFrame) & 0x1f));
    2286         CHECK_FOR_EXCEPTION_AT_END();
    2287         return JSValue::encode(result);
    2288     }
     2565    JSValue result = jsNumber(stackFrame.globalData, (val.toUInt32(callFrame)) >> (shift.toUInt32(callFrame) & 0x1f));
     2566    CHECK_FOR_EXCEPTION_AT_END();
     2567    return JSValue::encode(result);
    22892568}
    22902569
     
    23452624        JSValue exceptionValue;
    23462625        JSValue result = interpreter->callEval(callFrame, registerFile, argv, argCount, registerOffset, exceptionValue);
    2347         if (UNLIKELY(exceptionValue != JSValue())) {
     2626        if (UNLIKELY(exceptionValue)) {
    23482627            stackFrame.globalData->exception = exceptionValue;
    23492628            VM_THROW_EXCEPTION_AT_END();
     
    25832862    CodeBlock* codeBlock = callFrame->codeBlock();
    25842863
    2585     if (scrutinee.isInt32Fast())
    2586         return codeBlock->immediateSwitchJumpTable(tableIndex).ctiForValue(scrutinee.getInt32Fast()).executableAddress();
     2864    if (scrutinee.isInt32())
     2865        return codeBlock->immediateSwitchJumpTable(tableIndex).ctiForValue(scrutinee.asInt32()).executableAddress();
    25872866    else {
    25882867        double value;
     
    27383017}
    27393018
    2740 } // namespace JITStubs
    2741 
    27423019} // namespace JSC
    27433020
  • trunk/JavaScriptCore/jit/JITStubs.h

    r44886 r46598  
    5454    class FuncDeclNode;
    5555    class FuncExprNode;
     56    class JSGlobalObject;
    5657    class RegExp;
    5758
     
    6970        RegExp* regExp() { return static_cast<RegExp*>(asPointer); }
    7071        JSPropertyNameIterator* propertyNameIterator() { return static_cast<JSPropertyNameIterator*>(asPointer); }
     72        JSGlobalObject* globalObject() { return static_cast<JSGlobalObject*>(asPointer); }
     73        JSString* jsString() { return static_cast<JSString*>(asPointer); }
    7174        ReturnAddressPtr returnAddress() { return ReturnAddressPtr(asPointer); }
    7275    };
     
    7477#if PLATFORM(X86_64)
    7578    struct JITStackFrame {
    76         JITStubArg padding; // Unused
    77         JITStubArg args[8];
     79        void* reserved; // Unused
     80        JITStubArg args[6];
     81        void* padding[2]; // Maintain 32-byte stack alignment (possibly overkill).
    7882
    7983        void* savedRBX;
     
    96100    };
    97101#elif PLATFORM(X86)
     102#if COMPILER(MSVC)
     103#pragma pack(push)
     104#pragma pack(4)
     105#endif // COMPILER(MSVC)
    98106    struct JITStackFrame {
    99         JITStubArg padding; // Unused
     107        void* reserved; // Unused
    100108        JITStubArg args[6];
     109#if USE(JSVALUE32_64)
     110        void* padding[2]; // Maintain 16-byte stack alignment.
     111#endif
    101112
    102113        void* savedEBX;
     
    116127        ReturnAddressPtr* returnAddressSlot() { return reinterpret_cast<ReturnAddressPtr*>(this) - 1; }
    117128    };
     129#if COMPILER(MSVC)
     130#pragma pack(pop)
     131#endif // COMPILER(MSVC)
    118132#elif PLATFORM_ARM_ARCH(7)
    119133    struct JITStackFrame {
    120         JITStubArg padding; // Unused
     134        void* reserved; // Unused
    121135        JITStubArg args[6];
     136#if USE(JSVALUE32_64)
     137        void* padding[2]; // Maintain 16-byte stack alignment.
     138#endif
    122139
    123140        ReturnAddressPtr thunkReturnAddress;
     
    199216        static void tryCachePutByID(CallFrame*, CodeBlock*, ReturnAddressPtr returnAddress, JSValue baseValue, const PutPropertySlot&);
    200217       
    201         MacroAssemblerCodePtr ctiArrayLengthTrampoline() { return m_ctiArrayLengthTrampoline; }
    202218        MacroAssemblerCodePtr ctiStringLengthTrampoline() { return m_ctiStringLengthTrampoline; }
    203219        MacroAssemblerCodePtr ctiVirtualCallPreLink() { return m_ctiVirtualCallPreLink; }
     
    209225        RefPtr<ExecutablePool> m_executablePool;
    210226
    211         MacroAssemblerCodePtr m_ctiArrayLengthTrampoline;
    212227        MacroAssemblerCodePtr m_ctiStringLengthTrampoline;
    213228        MacroAssemblerCodePtr m_ctiVirtualCallPreLink;
     
    217232    };
    218233
    219 namespace JITStubs { extern "C" {
    220 
     234extern "C" {
     235    EncodedJSValue JIT_STUB cti_op_add(STUB_ARGS_DECLARATION);
     236    EncodedJSValue JIT_STUB cti_op_bitand(STUB_ARGS_DECLARATION);
     237    EncodedJSValue JIT_STUB cti_op_bitnot(STUB_ARGS_DECLARATION);
     238    EncodedJSValue JIT_STUB cti_op_bitor(STUB_ARGS_DECLARATION);
     239    EncodedJSValue JIT_STUB cti_op_bitxor(STUB_ARGS_DECLARATION);
     240    EncodedJSValue JIT_STUB cti_op_call_NotJSFunction(STUB_ARGS_DECLARATION);
     241    EncodedJSValue JIT_STUB cti_op_call_eval(STUB_ARGS_DECLARATION);
     242    EncodedJSValue JIT_STUB cti_op_construct_NotJSConstruct(STUB_ARGS_DECLARATION);
     243    EncodedJSValue JIT_STUB cti_op_convert_this(STUB_ARGS_DECLARATION);
     244    EncodedJSValue JIT_STUB cti_op_del_by_id(STUB_ARGS_DECLARATION);
     245    EncodedJSValue JIT_STUB cti_op_del_by_val(STUB_ARGS_DECLARATION);
     246    EncodedJSValue JIT_STUB cti_op_div(STUB_ARGS_DECLARATION);
     247    EncodedJSValue JIT_STUB cti_op_get_by_id(STUB_ARGS_DECLARATION);
     248    EncodedJSValue JIT_STUB cti_op_get_by_id_array_fail(STUB_ARGS_DECLARATION);
     249    EncodedJSValue JIT_STUB cti_op_get_by_id_generic(STUB_ARGS_DECLARATION);
     250    EncodedJSValue JIT_STUB cti_op_get_by_id_method_check(STUB_ARGS_DECLARATION);
     251    EncodedJSValue JIT_STUB cti_op_get_by_id_method_check_second(STUB_ARGS_DECLARATION);
     252    EncodedJSValue JIT_STUB cti_op_get_by_id_proto_fail(STUB_ARGS_DECLARATION);
     253    EncodedJSValue JIT_STUB cti_op_get_by_id_proto_list(STUB_ARGS_DECLARATION);
     254    EncodedJSValue JIT_STUB cti_op_get_by_id_proto_list_full(STUB_ARGS_DECLARATION);
     255    EncodedJSValue JIT_STUB cti_op_get_by_id_second(STUB_ARGS_DECLARATION);
     256    EncodedJSValue JIT_STUB cti_op_get_by_id_self_fail(STUB_ARGS_DECLARATION);
     257    EncodedJSValue JIT_STUB cti_op_get_by_id_string_fail(STUB_ARGS_DECLARATION);
     258    EncodedJSValue JIT_STUB cti_op_get_by_val(STUB_ARGS_DECLARATION);
     259    EncodedJSValue JIT_STUB cti_op_get_by_val_byte_array(STUB_ARGS_DECLARATION);
     260    EncodedJSValue JIT_STUB cti_op_get_by_val_string(STUB_ARGS_DECLARATION);
     261    EncodedJSValue JIT_STUB cti_op_in(STUB_ARGS_DECLARATION);
     262    EncodedJSValue JIT_STUB cti_op_instanceof(STUB_ARGS_DECLARATION);
     263    EncodedJSValue JIT_STUB cti_op_is_boolean(STUB_ARGS_DECLARATION);
     264    EncodedJSValue JIT_STUB cti_op_is_function(STUB_ARGS_DECLARATION);
     265    EncodedJSValue JIT_STUB cti_op_is_number(STUB_ARGS_DECLARATION);
     266    EncodedJSValue JIT_STUB cti_op_is_object(STUB_ARGS_DECLARATION);
     267    EncodedJSValue JIT_STUB cti_op_is_string(STUB_ARGS_DECLARATION);
     268    EncodedJSValue JIT_STUB cti_op_is_undefined(STUB_ARGS_DECLARATION);
     269    EncodedJSValue JIT_STUB cti_op_less(STUB_ARGS_DECLARATION);
     270    EncodedJSValue JIT_STUB cti_op_lesseq(STUB_ARGS_DECLARATION);
     271    EncodedJSValue JIT_STUB cti_op_lshift(STUB_ARGS_DECLARATION);
     272    EncodedJSValue JIT_STUB cti_op_mod(STUB_ARGS_DECLARATION);
     273    EncodedJSValue JIT_STUB cti_op_mul(STUB_ARGS_DECLARATION);
     274    EncodedJSValue JIT_STUB cti_op_negate(STUB_ARGS_DECLARATION);
     275    EncodedJSValue JIT_STUB cti_op_next_pname(STUB_ARGS_DECLARATION);
     276    EncodedJSValue JIT_STUB cti_op_not(STUB_ARGS_DECLARATION);
     277    EncodedJSValue JIT_STUB cti_op_nstricteq(STUB_ARGS_DECLARATION);
     278    EncodedJSValue JIT_STUB cti_op_post_dec(STUB_ARGS_DECLARATION);
     279    EncodedJSValue JIT_STUB cti_op_post_inc(STUB_ARGS_DECLARATION);
     280    EncodedJSValue JIT_STUB cti_op_pre_dec(STUB_ARGS_DECLARATION);
     281    EncodedJSValue JIT_STUB cti_op_pre_inc(STUB_ARGS_DECLARATION);
     282    EncodedJSValue JIT_STUB cti_op_resolve(STUB_ARGS_DECLARATION);
     283    EncodedJSValue JIT_STUB cti_op_resolve_base(STUB_ARGS_DECLARATION);
     284    EncodedJSValue JIT_STUB cti_op_resolve_global(STUB_ARGS_DECLARATION);
     285    EncodedJSValue JIT_STUB cti_op_resolve_skip(STUB_ARGS_DECLARATION);
     286    EncodedJSValue JIT_STUB cti_op_resolve_with_base(STUB_ARGS_DECLARATION);
     287    EncodedJSValue JIT_STUB cti_op_rshift(STUB_ARGS_DECLARATION);
     288    EncodedJSValue JIT_STUB cti_op_strcat(STUB_ARGS_DECLARATION);
     289    EncodedJSValue JIT_STUB cti_op_stricteq(STUB_ARGS_DECLARATION);
     290    EncodedJSValue JIT_STUB cti_op_sub(STUB_ARGS_DECLARATION);
     291    EncodedJSValue JIT_STUB cti_op_throw(STUB_ARGS_DECLARATION);
     292    EncodedJSValue JIT_STUB cti_op_to_jsnumber(STUB_ARGS_DECLARATION);
     293    EncodedJSValue JIT_STUB cti_op_to_primitive(STUB_ARGS_DECLARATION);
     294    EncodedJSValue JIT_STUB cti_op_typeof(STUB_ARGS_DECLARATION);
     295    EncodedJSValue JIT_STUB cti_op_urshift(STUB_ARGS_DECLARATION);
     296    EncodedJSValue JIT_STUB cti_vm_throw(STUB_ARGS_DECLARATION);
     297    JSObject* JIT_STUB cti_op_construct_JSConstruct(STUB_ARGS_DECLARATION);
     298    JSObject* JIT_STUB cti_op_new_array(STUB_ARGS_DECLARATION);
     299    JSObject* JIT_STUB cti_op_new_error(STUB_ARGS_DECLARATION);
     300    JSObject* JIT_STUB cti_op_new_func(STUB_ARGS_DECLARATION);
     301    JSObject* JIT_STUB cti_op_new_func_exp(STUB_ARGS_DECLARATION);
     302    JSObject* JIT_STUB cti_op_new_object(STUB_ARGS_DECLARATION);
     303    JSObject* JIT_STUB cti_op_new_regexp(STUB_ARGS_DECLARATION);
     304    JSObject* JIT_STUB cti_op_push_activation(STUB_ARGS_DECLARATION);
     305    JSObject* JIT_STUB cti_op_push_new_scope(STUB_ARGS_DECLARATION);
     306    JSObject* JIT_STUB cti_op_push_scope(STUB_ARGS_DECLARATION);
     307    JSObject* JIT_STUB cti_op_put_by_id_transition_realloc(STUB_ARGS_DECLARATION);
     308    JSPropertyNameIterator* JIT_STUB cti_op_get_pnames(STUB_ARGS_DECLARATION);
     309    VoidPtrPair JIT_STUB cti_op_call_arityCheck(STUB_ARGS_DECLARATION);
     310    bool JIT_STUB cti_op_eq(STUB_ARGS_DECLARATION);
     311#if USE(JSVALUE32_64)
     312    bool JIT_STUB cti_op_eq_strings(STUB_ARGS_DECLARATION);
     313#endif
     314    int JIT_STUB cti_op_jless(STUB_ARGS_DECLARATION);
     315    int JIT_STUB cti_op_jlesseq(STUB_ARGS_DECLARATION);
     316    int JIT_STUB cti_op_jtrue(STUB_ARGS_DECLARATION);
     317    int JIT_STUB cti_op_load_varargs(STUB_ARGS_DECLARATION);
     318    int JIT_STUB cti_op_loop_if_less(STUB_ARGS_DECLARATION);
     319    int JIT_STUB cti_op_loop_if_lesseq(STUB_ARGS_DECLARATION);
     320    int JIT_STUB cti_op_loop_if_true(STUB_ARGS_DECLARATION);
     321    int JIT_STUB cti_timeout_check(STUB_ARGS_DECLARATION);
    221322    void JIT_STUB cti_op_create_arguments(STUB_ARGS_DECLARATION);
    222323    void JIT_STUB cti_op_create_arguments_no_params(STUB_ARGS_DECLARATION);
     
    241342    void JIT_STUB cti_op_tear_off_arguments(STUB_ARGS_DECLARATION);
    242343    void JIT_STUB cti_register_file_check(STUB_ARGS_DECLARATION);
    243     int JIT_STUB cti_op_jless(STUB_ARGS_DECLARATION);
    244     int JIT_STUB cti_op_jlesseq(STUB_ARGS_DECLARATION);
    245     int JIT_STUB cti_op_jtrue(STUB_ARGS_DECLARATION);
    246     int JIT_STUB cti_op_load_varargs(STUB_ARGS_DECLARATION);
    247     int JIT_STUB cti_op_loop_if_less(STUB_ARGS_DECLARATION);
    248     int JIT_STUB cti_op_loop_if_lesseq(STUB_ARGS_DECLARATION);
    249     int JIT_STUB cti_op_loop_if_true(STUB_ARGS_DECLARATION);
    250     int JIT_STUB cti_timeout_check(STUB_ARGS_DECLARATION);
    251344    void* JIT_STUB cti_op_call_JSFunction(STUB_ARGS_DECLARATION);
    252345    void* JIT_STUB cti_op_switch_char(STUB_ARGS_DECLARATION);
     
    255348    void* JIT_STUB cti_vm_dontLazyLinkCall(STUB_ARGS_DECLARATION);
    256349    void* JIT_STUB cti_vm_lazyLinkCall(STUB_ARGS_DECLARATION);
    257     JSObject* JIT_STUB cti_op_construct_JSConstruct(STUB_ARGS_DECLARATION);
    258     JSObject* JIT_STUB cti_op_convert_this(STUB_ARGS_DECLARATION);
    259     JSObject* JIT_STUB cti_op_new_array(STUB_ARGS_DECLARATION);
    260     JSObject* JIT_STUB cti_op_new_error(STUB_ARGS_DECLARATION);
    261     JSObject* JIT_STUB cti_op_new_func(STUB_ARGS_DECLARATION);
    262     JSObject* JIT_STUB cti_op_new_func_exp(STUB_ARGS_DECLARATION);
    263     JSObject* JIT_STUB cti_op_new_object(STUB_ARGS_DECLARATION);
    264     JSObject* JIT_STUB cti_op_new_regexp(STUB_ARGS_DECLARATION);
    265     JSObject* JIT_STUB cti_op_push_activation(STUB_ARGS_DECLARATION);
    266     JSObject* JIT_STUB cti_op_push_new_scope(STUB_ARGS_DECLARATION);
    267     JSObject* JIT_STUB cti_op_push_scope(STUB_ARGS_DECLARATION);
    268     JSPropertyNameIterator* JIT_STUB cti_op_get_pnames(STUB_ARGS_DECLARATION);
    269     EncodedJSValue JIT_STUB cti_op_add(STUB_ARGS_DECLARATION);
    270     EncodedJSValue JIT_STUB cti_op_bitand(STUB_ARGS_DECLARATION);
    271     EncodedJSValue JIT_STUB cti_op_bitnot(STUB_ARGS_DECLARATION);
    272     EncodedJSValue JIT_STUB cti_op_bitor(STUB_ARGS_DECLARATION);
    273     EncodedJSValue JIT_STUB cti_op_bitxor(STUB_ARGS_DECLARATION);
    274     EncodedJSValue JIT_STUB cti_op_call_NotJSFunction(STUB_ARGS_DECLARATION);
    275     EncodedJSValue JIT_STUB cti_op_call_eval(STUB_ARGS_DECLARATION);
    276     EncodedJSValue JIT_STUB cti_op_construct_NotJSConstruct(STUB_ARGS_DECLARATION);
    277     EncodedJSValue JIT_STUB cti_op_del_by_id(STUB_ARGS_DECLARATION);
    278     EncodedJSValue JIT_STUB cti_op_del_by_val(STUB_ARGS_DECLARATION);
    279     EncodedJSValue JIT_STUB cti_op_div(STUB_ARGS_DECLARATION);
    280     EncodedJSValue JIT_STUB cti_op_eq(STUB_ARGS_DECLARATION);
    281     EncodedJSValue JIT_STUB cti_op_get_by_id(STUB_ARGS_DECLARATION);
    282     EncodedJSValue JIT_STUB cti_op_get_by_id_method_check(STUB_ARGS_DECLARATION);
    283     EncodedJSValue JIT_STUB cti_op_get_by_id_method_check_second(STUB_ARGS_DECLARATION);
    284     EncodedJSValue JIT_STUB cti_op_get_by_id_array_fail(STUB_ARGS_DECLARATION);
    285     EncodedJSValue JIT_STUB cti_op_get_by_id_generic(STUB_ARGS_DECLARATION);
    286     EncodedJSValue JIT_STUB cti_op_get_by_id_proto_fail(STUB_ARGS_DECLARATION);
    287     EncodedJSValue JIT_STUB cti_op_get_by_id_proto_list(STUB_ARGS_DECLARATION);
    288     EncodedJSValue JIT_STUB cti_op_get_by_id_proto_list_full(STUB_ARGS_DECLARATION);
    289     EncodedJSValue JIT_STUB cti_op_get_by_id_second(STUB_ARGS_DECLARATION);
    290     EncodedJSValue JIT_STUB cti_op_get_by_id_self_fail(STUB_ARGS_DECLARATION);
    291     EncodedJSValue JIT_STUB cti_op_get_by_id_string_fail(STUB_ARGS_DECLARATION);
    292     EncodedJSValue JIT_STUB cti_op_get_by_val(STUB_ARGS_DECLARATION);
    293     EncodedJSValue JIT_STUB cti_op_get_by_val_byte_array(STUB_ARGS_DECLARATION);
    294     EncodedJSValue JIT_STUB cti_op_get_by_val_string(STUB_ARGS_DECLARATION);
    295     EncodedJSValue JIT_STUB cti_op_put_by_id_transition_realloc(STUB_ARGS_DECLARATION);
    296     EncodedJSValue JIT_STUB cti_op_in(STUB_ARGS_DECLARATION);
    297     EncodedJSValue JIT_STUB cti_op_instanceof(STUB_ARGS_DECLARATION);
    298     EncodedJSValue JIT_STUB cti_op_is_boolean(STUB_ARGS_DECLARATION);
    299     EncodedJSValue JIT_STUB cti_op_is_function(STUB_ARGS_DECLARATION);
    300     EncodedJSValue JIT_STUB cti_op_is_number(STUB_ARGS_DECLARATION);
    301     EncodedJSValue JIT_STUB cti_op_is_object(STUB_ARGS_DECLARATION);
    302     EncodedJSValue JIT_STUB cti_op_is_string(STUB_ARGS_DECLARATION);
    303     EncodedJSValue JIT_STUB cti_op_is_undefined(STUB_ARGS_DECLARATION);
    304     EncodedJSValue JIT_STUB cti_op_less(STUB_ARGS_DECLARATION);
    305     EncodedJSValue JIT_STUB cti_op_lesseq(STUB_ARGS_DECLARATION);
    306     EncodedJSValue JIT_STUB cti_op_lshift(STUB_ARGS_DECLARATION);
    307     EncodedJSValue JIT_STUB cti_op_mod(STUB_ARGS_DECLARATION);
    308     EncodedJSValue JIT_STUB cti_op_mul(STUB_ARGS_DECLARATION);
    309     EncodedJSValue JIT_STUB cti_op_negate(STUB_ARGS_DECLARATION);
    310     EncodedJSValue JIT_STUB cti_op_neq(STUB_ARGS_DECLARATION);
    311     EncodedJSValue JIT_STUB cti_op_next_pname(STUB_ARGS_DECLARATION);
    312     EncodedJSValue JIT_STUB cti_op_not(STUB_ARGS_DECLARATION);
    313     EncodedJSValue JIT_STUB cti_op_nstricteq(STUB_ARGS_DECLARATION);
    314     EncodedJSValue JIT_STUB cti_op_pre_dec(STUB_ARGS_DECLARATION);
    315     EncodedJSValue JIT_STUB cti_op_pre_inc(STUB_ARGS_DECLARATION);
    316     EncodedJSValue JIT_STUB cti_op_resolve(STUB_ARGS_DECLARATION);
    317     EncodedJSValue JIT_STUB cti_op_resolve_base(STUB_ARGS_DECLARATION);
    318     EncodedJSValue JIT_STUB cti_op_resolve_global(STUB_ARGS_DECLARATION);
    319     EncodedJSValue JIT_STUB cti_op_resolve_skip(STUB_ARGS_DECLARATION);
    320     EncodedJSValue JIT_STUB cti_op_rshift(STUB_ARGS_DECLARATION);
    321     EncodedJSValue JIT_STUB cti_op_strcat(STUB_ARGS_DECLARATION);
    322     EncodedJSValue JIT_STUB cti_op_stricteq(STUB_ARGS_DECLARATION);
    323     EncodedJSValue JIT_STUB cti_op_sub(STUB_ARGS_DECLARATION);
    324     EncodedJSValue JIT_STUB cti_op_throw(STUB_ARGS_DECLARATION);
    325     EncodedJSValue JIT_STUB cti_op_to_jsnumber(STUB_ARGS_DECLARATION);
    326     EncodedJSValue JIT_STUB cti_op_to_primitive(STUB_ARGS_DECLARATION);
    327     EncodedJSValue JIT_STUB cti_op_typeof(STUB_ARGS_DECLARATION);
    328     EncodedJSValue JIT_STUB cti_op_urshift(STUB_ARGS_DECLARATION);
    329     EncodedJSValue JIT_STUB cti_vm_throw(STUB_ARGS_DECLARATION);
    330     EncodedJSValue JIT_STUB cti_op_post_dec(STUB_ARGS_DECLARATION);
    331     EncodedJSValue JIT_STUB cti_op_post_inc(STUB_ARGS_DECLARATION);
    332     EncodedJSValue JIT_STUB cti_op_resolve_func(STUB_ARGS_DECLARATION);
    333     EncodedJSValue JIT_STUB cti_op_resolve_with_base(STUB_ARGS_DECLARATION);
    334     VoidPtrPair JIT_STUB cti_op_call_arityCheck(STUB_ARGS_DECLARATION);
    335 
    336 }; } // extern "C" namespace JITStubs
     350} // extern "C"
    337351
    338352} // namespace JSC
  • trunk/JavaScriptCore/jsc.cpp

    r46431 r46598  
    444444}
    445445
    446 static
    447 #if !HAVE(READLINE)
    448 NO_RETURN
    449 #endif
    450 void runInteractive(GlobalObject* globalObject)
     446#define RUNNING_FROM_XCODE 0
     447
     448static void runInteractive(GlobalObject* globalObject)
    451449{
    452450    while (true) {
    453 #if HAVE(READLINE)
     451#if HAVE(READLINE) && !RUNNING_FROM_XCODE
    454452        char* line = readline(interactivePrompt);
    455453        if (!line)
     
    460458        free(line);
    461459#else
    462         puts(interactivePrompt);
     460        printf("%s", interactivePrompt);
    463461        Vector<char, 256> line;
    464462        int c;
     
    469467            line.append(c);
    470468        }
     469        if (line.isEmpty())
     470            break;
    471471        line.append('\0');
    472472        Completion completion = evaluate(globalObject->globalExec(), globalObject->globalScopeChain(), makeSource(line.data(), interpreterName));
  • trunk/JavaScriptCore/parser/Nodes.cpp

    r45904 r46598  
    356356    int identifierStart = divot() - startOffset();
    357357    generator.emitExpressionInfo(identifierStart + m_ident.size(), m_ident.size(), 0);
    358     generator.emitResolveFunction(thisRegister.get(), func.get(), m_ident);
     358    generator.emitResolveWithBase(thisRegister.get(), func.get(), m_ident);
    359359    return generator.emitCall(generator.finalDestination(dst, func.get()), func.get(), thisRegister.get(), m_args, divot(), startOffset(), endOffset());
    360360}
     
    376376RegisterID* FunctionCallDotNode::emitBytecode(BytecodeGenerator& generator, RegisterID* dst)
    377377{
    378     RefPtr<RegisterID> base = generator.emitNode(m_base);
     378    RefPtr<RegisterID> function = generator.tempDestination(dst);
     379    RefPtr<RegisterID> thisRegister = generator.newTemporary();
     380    generator.emitNode(thisRegister.get(), m_base);
    379381    generator.emitExpressionInfo(divot() - m_subexpressionDivotOffset, startOffset() - m_subexpressionDivotOffset, m_subexpressionEndOffset);
    380382    generator.emitMethodCheck();
    381     RefPtr<RegisterID> function = generator.emitGetById(generator.tempDestination(dst), base.get(), m_ident);
    382     RefPtr<RegisterID> thisRegister = generator.emitMove(generator.newTemporary(), base.get());
     383    generator.emitGetById(function.get(), thisRegister.get(), m_ident);
    383384    return generator.emitCall(generator.finalDestination(dst, function.get()), function.get(), thisRegister.get(), m_args, divot(), startOffset(), endOffset());
    384385}
     
    13741375RegisterID* ForNode::emitBytecode(BytecodeGenerator& generator, RegisterID* dst)
    13751376{
    1376     if (dst == generator.ignoredResult())
    1377         dst = 0;
    1378 
    13791377    RefPtr<LabelScope> scope = generator.newLabelScope(LabelScope::Loop);
    13801378
     
    15641562        if (clauseExpression->isNumber()) {
    15651563            double value = static_cast<NumberNode*>(clauseExpression)->value();
    1566             JSValue jsValue = JSValue::makeInt32Fast(static_cast<int32_t>(value));
    1567             if ((typeForTable & ~SwitchNumber) || !jsValue || (jsValue.getInt32Fast() != value)) {
     1564            int32_t intVal = static_cast<int32_t>(value);
     1565            if ((typeForTable & ~SwitchNumber) || (intVal != value)) {
    15681566                typeForTable = SwitchNeither;
    15691567                break;
    15701568            }
    1571             int32_t intVal = static_cast<int32_t>(value);
    1572             ASSERT(intVal == value);
    15731569            if (intVal < min_num)
    15741570                min_num = intVal;
     
    17411737RegisterID* TryNode::emitBytecode(BytecodeGenerator& generator, RegisterID* dst)
    17421738{
     1739    // NOTE: The catch and finally blocks must be labeled explicitly, so the
     1740    // optimizer knows they may be jumped to from anywhere.
     1741
    17431742    generator.emitDebugHook(WillExecuteStatement, firstLine(), lastLine());
    17441743
    17451744    RefPtr<Label> tryStartLabel = generator.newLabel();
    1746     RefPtr<Label> tryEndLabel = generator.newLabel();
    17471745    RefPtr<Label> finallyStart;
    17481746    RefPtr<RegisterID> finallyReturnAddr;
     
    17521750        generator.pushFinallyContext(finallyStart.get(), finallyReturnAddr.get());
    17531751    }
     1752
    17541753    generator.emitLabel(tryStartLabel.get());
    17551754    generator.emitNode(dst, m_tryBlock);
    1756     generator.emitLabel(tryEndLabel.get());
    17571755
    17581756    if (m_catchBlock) {
    1759         RefPtr<Label> handlerEndLabel = generator.newLabel();
    1760         generator.emitJump(handlerEndLabel.get());
    1761         RefPtr<RegisterID> exceptionRegister = generator.emitCatch(generator.newTemporary(), tryStartLabel.get(), tryEndLabel.get());
     1757        RefPtr<Label> catchEndLabel = generator.newLabel();
     1758       
     1759        // Normal path: jump over the catch block.
     1760        generator.emitJump(catchEndLabel.get());
     1761
     1762        // Uncaught exception path: the catch block.
     1763        RefPtr<Label> here = generator.emitLabel(generator.newLabel().get());
     1764        RefPtr<RegisterID> exceptionRegister = generator.emitCatch(generator.newTemporary(), tryStartLabel.get(), here.get());
    17621765        if (m_catchHasEval) {
    17631766            RefPtr<RegisterID> dynamicScopeObject = generator.emitNewObject(generator.newTemporary());
     
    17691772        generator.emitNode(dst, m_catchBlock);
    17701773        generator.emitPopScope();
    1771         generator.emitLabel(handlerEndLabel.get());
     1774        generator.emitLabel(catchEndLabel.get());
    17721775    }
    17731776
     
    17801783        RefPtr<RegisterID> highestUsedRegister = generator.highestUsedRegister();
    17811784        RefPtr<Label> finallyEndLabel = generator.newLabel();
     1785
     1786        // Normal path: invoke the finally block, then jump over it.
    17821787        generator.emitJumpSubroutine(finallyReturnAddr.get(), finallyStart.get());
    1783         // Use a label to record the subtle fact that sret will return to the
    1784         // next instruction. sret is the only way to jump without an explicit label.
    1785         generator.emitLabel(generator.newLabel().get());
    17861788        generator.emitJump(finallyEndLabel.get());
    17871789
    1788         // Finally block for exception path
    1789         RefPtr<RegisterID> tempExceptionRegister = generator.emitCatch(generator.newTemporary(), tryStartLabel.get(), generator.emitLabel(generator.newLabel().get()).get());
     1790        // Uncaught exception path: invoke the finally block, then re-throw the exception.
     1791        RefPtr<Label> here = generator.emitLabel(generator.newLabel().get());
     1792        RefPtr<RegisterID> tempExceptionRegister = generator.emitCatch(generator.newTemporary(), tryStartLabel.get(), here.get());
    17901793        generator.emitJumpSubroutine(finallyReturnAddr.get(), finallyStart.get());
    1791         // Use a label to record the subtle fact that sret will return to the
    1792         // next instruction. sret is the only way to jump without an explicit label.
    1793         generator.emitLabel(generator.newLabel().get());
    17941794        generator.emitThrow(tempExceptionRegister.get());
    17951795
    1796         // emit the finally block itself
     1796        // The finally block.
    17971797        generator.emitLabel(finallyStart.get());
    17981798        generator.emitNode(dst, m_finallyBlock);
  • trunk/JavaScriptCore/runtime/ArgList.h

    r45891 r46598  
    2323#define ArgList_h
    2424
    25 #include "JSImmediate.h"
    2625#include "Register.h"
    2726
  • trunk/JavaScriptCore/runtime/Arguments.h

    r44224 r46598  
    231231    }
    232232
     233    ALWAYS_INLINE Arguments* Register::arguments() const
     234    {
     235        if (jsValue() == JSValue())
     236            return 0;
     237        return asArguments(jsValue());
     238    }
     239   
     240
    233241} // namespace JSC
    234242
  • trunk/JavaScriptCore/runtime/Collector.cpp

    r46431 r46598  
    11831183    if (cell->isString())
    11841184        return "string";
     1185#if USE(JSVALUE32)
    11851186    if (cell->isNumber())
    11861187        return "number";
     1188#endif
    11871189    if (cell->isGetterSetter())
    11881190        return "gettersetter";
  • trunk/JavaScriptCore/runtime/Collector.h

    r45891 r46598  
    168168
    169169    // cell size needs to be a power of two for certain optimizations in collector.cpp
    170     template<> struct CellSize<sizeof(uint32_t)> { static const size_t m_value = 32; }; // 32-bit
    171     template<> struct CellSize<sizeof(uint64_t)> { static const size_t m_value = 64; }; // 64-bit
     170#if USE(JSVALUE32)
     171    template<> struct CellSize<sizeof(uint32_t)> { static const size_t m_value = 32; };
     172#else
     173    template<> struct CellSize<sizeof(uint32_t)> { static const size_t m_value = 64; };
     174#endif
     175    template<> struct CellSize<sizeof(uint64_t)> { static const size_t m_value = 64; };
     176
    172177    const size_t BLOCK_SIZE = 16 * 4096; // 64k
    173178
  • trunk/JavaScriptCore/runtime/ExceptionHelpers.h

    r46528 r46598  
    3030#define ExceptionHelpers_h
    3131
    32 #include "JSImmediate.h"
    3332
    3433namespace JSC {
  • trunk/JavaScriptCore/runtime/InitializeThreading.cpp

    r44508 r46598  
    3030#include "InitializeThreading.h"
    3131
    32 #include "JSImmediate.h"
    3332#include "Collector.h"
    3433#include "dtoa.h"
  • trunk/JavaScriptCore/runtime/JSArray.cpp

    r46180 r46598  
    135135
    136136    m_storage = static_cast<ArrayStorage*>(fastZeroedMalloc(storageSize(initialCapacity)));
     137    m_storage->m_vectorLength = initialCapacity;
     138
    137139    m_fastAccessCutoff = 0;
    138     m_storage->m_vectorLength = initialCapacity;
    139     m_storage->m_length = 0;
    140140
    141141    checkConsistency();
     
    147147    unsigned initialCapacity = min(initialLength, MIN_SPARSE_ARRAY_INDEX);
    148148
    149     m_storage = static_cast<ArrayStorage*>(fastZeroedMalloc(storageSize(initialCapacity)));
     149    m_storage = static_cast<ArrayStorage*>(fastMalloc(storageSize(initialCapacity)));
     150    m_storage->m_length = initialLength;
     151    m_storage->m_vectorLength = initialCapacity;
     152    m_storage->m_numValuesInVector = 0;
     153    m_storage->m_sparseValueMap = 0;
     154    m_storage->lazyCreationData = 0;
     155
     156    JSValue* vector = m_storage->m_vector;
     157    for (size_t i = 0; i < initialCapacity; ++i)
     158        vector[i] = JSValue();
     159
    150160    m_fastAccessCutoff = 0;
    151     m_storage->m_vectorLength = initialCapacity;
    152     m_storage->m_length = initialLength;
     161
     162    checkConsistency();
    153163
    154164    Heap::heap(this)->reportExtraMemoryCost(initialCapacity * sizeof(JSValue));
    155 
    156     checkConsistency();
    157165}
    158166
     
    160168    : JSObject(structure)
    161169{
    162     unsigned length = list.size();
    163 
    164     m_fastAccessCutoff = length;
    165 
    166     ArrayStorage* storage = static_cast<ArrayStorage*>(fastMalloc(storageSize(length)));
    167 
    168     storage->m_vectorLength = length;
    169     storage->m_numValuesInVector = length;
    170     storage->m_sparseValueMap = 0;
    171     storage->m_length = length;
     170    unsigned initialCapacity = list.size();
     171
     172    m_storage = static_cast<ArrayStorage*>(fastMalloc(storageSize(initialCapacity)));
     173    m_storage->m_length = initialCapacity;
     174    m_storage->m_vectorLength = initialCapacity;
     175    m_storage->m_numValuesInVector = initialCapacity;
     176    m_storage->m_sparseValueMap = 0;
    172177
    173178    size_t i = 0;
    174179    ArgList::const_iterator end = list.end();
    175180    for (ArgList::const_iterator it = list.begin(); it != end; ++it, ++i)
    176         storage->m_vector[i] = *it;
    177 
    178     m_storage = storage;
    179 
    180     Heap::heap(this)->reportExtraMemoryCost(storageSize(length));
    181 
    182     checkConsistency();
     181        m_storage->m_vector[i] = *it;
     182
     183    m_fastAccessCutoff = initialCapacity;
     184
     185    checkConsistency();
     186
     187    Heap::heap(this)->reportExtraMemoryCost(storageSize(initialCapacity));
    183188}
    184189
  • trunk/JavaScriptCore/runtime/JSCell.cpp

    r43153 r46598  
    9191}
    9292
    93 bool JSCell::getTruncatedInt32(int32_t&) const
    94 {
    95     return false;
    96 }
    97 
    98 bool JSCell::getTruncatedUInt32(uint32_t&) const
    99 {
    100     return false;
    101 }
    102 
    10393bool JSCell::getString(UString&stringValue) const
    10494{
  • trunk/JavaScriptCore/runtime/JSCell.h

    r46528 r46598  
    4141        friend class JSString;
    4242        friend class JSValue;
     43        friend class JSAPIValueWrapper;
    4344        friend struct VPtrSet;
    4445
     
    4950    public:
    5051        // Querying the type.
     52#if USE(JSVALUE32)
    5153        bool isNumber() const;
     54#endif
    5255        bool isString() const;
    5356        bool isObject() const;
    5457        virtual bool isGetterSetter() const;
    5558        virtual bool isObject(const ClassInfo*) const;
     59        virtual bool isAPIValueWrapper() const { return false; }
    5660
    5761        Structure* structure() const;
     
    6973        // FIXME: remove these methods, can check isNumberCell in JSValue && then call asNumberCell::*.
    7074        virtual bool getUInt32(uint32_t&) const;
    71         virtual bool getTruncatedInt32(int32_t&) const;
    72         virtual bool getTruncatedUInt32(uint32_t&) const;
    7375
    7476        // Basic conversions.
     
    125127    }
    126128
     129#if USE(JSVALUE32)
    127130    inline bool JSCell::isNumber() const
    128131    {
    129132        return Heap::isNumber(const_cast<JSCell*>(this));
    130133    }
     134#endif
    131135
    132136    inline bool JSCell::isObject() const
     
    153157    {
    154158        return Heap::markCell(this);
    155     }
    156 
    157     ALWAYS_INLINE JSCell* JSValue::asCell() const
    158     {
    159         ASSERT(isCell());
    160         return m_ptr;
    161159    }
    162160
     
    174172    inline bool JSValue::isString() const
    175173    {
    176         return !JSImmediate::isImmediate(asValue()) && asCell()->isString();
     174        return isCell() && asCell()->isString();
    177175    }
    178176
    179177    inline bool JSValue::isGetterSetter() const
    180178    {
    181         return !JSImmediate::isImmediate(asValue()) && asCell()->isGetterSetter();
     179        return isCell() && asCell()->isGetterSetter();
    182180    }
    183181
    184182    inline bool JSValue::isObject() const
    185183    {
    186         return !JSImmediate::isImmediate(asValue()) && asCell()->isObject();
     184        return isCell() && asCell()->isObject();
    187185    }
    188186
    189187    inline bool JSValue::getString(UString& s) const
    190188    {
    191         return !JSImmediate::isImmediate(asValue()) && asCell()->getString(s);
     189        return isCell() && asCell()->getString(s);
    192190    }
    193191
    194192    inline UString JSValue::getString() const
    195193    {
    196         return JSImmediate::isImmediate(asValue()) ? UString() : asCell()->getString();
     194        return isCell() ? asCell()->getString() : UString();
    197195    }
    198196
    199197    inline JSObject* JSValue::getObject() const
    200198    {
    201         return JSImmediate::isImmediate(asValue()) ? 0 : asCell()->getObject();
     199        return isCell() ? asCell()->getObject() : 0;
    202200    }
    203201
    204202    inline CallType JSValue::getCallData(CallData& callData)
    205203    {
    206         return JSImmediate::isImmediate(asValue()) ? CallTypeNone : asCell()->getCallData(callData);
     204        return isCell() ? asCell()->getCallData(callData) : CallTypeNone;
    207205    }
    208206
    209207    inline ConstructType JSValue::getConstructData(ConstructData& constructData)
    210208    {
    211         return JSImmediate::isImmediate(asValue()) ? ConstructTypeNone : asCell()->getConstructData(constructData);
     209        return isCell() ? asCell()->getConstructData(constructData) : ConstructTypeNone;
    212210    }
    213211
    214212    ALWAYS_INLINE bool JSValue::getUInt32(uint32_t& v) const
    215213    {
    216         return JSImmediate::isImmediate(asValue()) ? JSImmediate::getUInt32(asValue(), v) : asCell()->getUInt32(v);
    217     }
    218 
    219     ALWAYS_INLINE bool JSValue::getTruncatedInt32(int32_t& v) const
    220     {
    221         return JSImmediate::isImmediate(asValue()) ? JSImmediate::getTruncatedInt32(asValue(), v) : asCell()->getTruncatedInt32(v);
    222     }
    223 
    224     inline bool JSValue::getTruncatedUInt32(uint32_t& v) const
    225     {
    226         return JSImmediate::isImmediate(asValue()) ? JSImmediate::getTruncatedUInt32(asValue(), v) : asCell()->getTruncatedUInt32(v);
     214        if (isInt32()) {
     215            int32_t i = asInt32();
     216            v = static_cast<uint32_t>(i);
     217            return i >= 0;
     218        }
     219        if (isDouble()) {
     220            double d = asDouble();
     221            v = static_cast<uint32_t>(d);
     222            return v == d;
     223        }
     224        return false;
    227225    }
    228226
     
    234232    inline bool JSValue::marked() const
    235233    {
    236         return JSImmediate::isImmediate(asValue()) || asCell()->marked();
    237     }
     234        return !isCell() || asCell()->marked();
     235    }
     236
     237#if !USE(JSVALUE32_64)
     238    ALWAYS_INLINE JSCell* JSValue::asCell() const
     239    {
     240        ASSERT(isCell());
     241        return m_ptr;
     242    }
     243#endif // !USE(JSVALUE32_64)
    238244
    239245    inline JSValue JSValue::toPrimitive(ExecState* exec, PreferredPrimitiveType preferredType) const
    240246    {
    241         return JSImmediate::isImmediate(asValue()) ? asValue() : asCell()->toPrimitive(exec, preferredType);
     247        return isCell() ? asCell()->toPrimitive(exec, preferredType) : asValue();
    242248    }
    243249
    244250    inline bool JSValue::getPrimitiveNumber(ExecState* exec, double& number, JSValue& value)
    245251    {
    246         if (JSImmediate::isImmediate(asValue())) {
    247             number = JSImmediate::toDouble(asValue());
    248             value = asValue();
    249             return true;
    250         }
    251         return asCell()->getPrimitiveNumber(exec, number, value);
     252        if (isInt32()) {
     253            number = asInt32();
     254            value = *this;
     255            return true;
     256        }
     257        if (isDouble()) {
     258            number = asDouble();
     259            value = *this;
     260            return true;
     261        }
     262        if (isCell())
     263            return asCell()->getPrimitiveNumber(exec, number, value);
     264        if (isTrue()) {
     265            number = 1.0;
     266            value = *this;
     267            return true;
     268        }
     269        if (isFalse() || isNull()) {
     270            number = 0.0;
     271            value = *this;
     272            return true;
     273        }
     274        ASSERT(isUndefined());
     275        number = nonInlineNaN();
     276        value = *this;
     277        return true;
    252278    }
    253279
    254280    inline bool JSValue::toBoolean(ExecState* exec) const
    255281    {
    256         return JSImmediate::isImmediate(asValue()) ? JSImmediate::toBoolean(asValue()) : asCell()->toBoolean(exec);
     282        if (isInt32())
     283            return asInt32() != 0;
     284        if (isDouble())
     285            return asDouble() > 0.0 || asDouble() < 0.0; // false for NaN
     286        if (isCell())
     287            return asCell()->toBoolean(exec);
     288        return isTrue(); // false, null, and undefined all convert to false.
    257289    }
    258290
    259291    ALWAYS_INLINE double JSValue::toNumber(ExecState* exec) const
    260292    {
    261         return JSImmediate::isImmediate(asValue()) ? JSImmediate::toDouble(asValue()) : asCell()->toNumber(exec);
     293        if (isInt32())
     294            return asInt32();
     295        if (isDouble())
     296            return asDouble();
     297        if (isCell())
     298            return asCell()->toNumber(exec);
     299        if (isTrue())
     300            return 1.0;
     301        return isUndefined() ? nonInlineNaN() : 0; // null and false both convert to 0.
    262302    }
    263303
    264304    inline UString JSValue::toString(ExecState* exec) const
    265305    {
    266         return JSImmediate::isImmediate(asValue()) ? JSImmediate::toString(asValue()) : asCell()->toString(exec);
     306        if (isCell())
     307            return asCell()->toString(exec);
     308        if (isInt32())
     309            return UString::from(asInt32());
     310        if (isDouble())
     311            return asDouble() == 0.0 ? "0" : UString::from(asDouble());
     312        if (isTrue())
     313            return "true";
     314        if (isFalse())
     315            return "false";
     316        if (isNull())
     317            return "null";
     318        ASSERT(isUndefined());
     319        return "undefined";
     320    }
     321
     322    inline bool JSValue::needsThisConversion() const
     323    {
     324        if (UNLIKELY(!isCell()))
     325            return true;
     326        return asCell()->structure()->typeInfo().needsThisConversion();
     327    }
     328
     329    inline UString JSValue::toThisString(ExecState* exec) const
     330    {
     331        return isCell() ? asCell()->toThisString(exec) : toString(exec);
     332    }
     333
     334    inline JSValue JSValue::getJSNumber()
     335    {
     336        if (isInt32() || isDouble())
     337            return *this;
     338        if (isCell())
     339            return asCell()->getJSNumber();
     340        return JSValue();
    267341    }
    268342
    269343    inline JSObject* JSValue::toObject(ExecState* exec) const
    270344    {
    271         return JSImmediate::isImmediate(asValue()) ? JSImmediate::toObject(asValue(), exec) : asCell()->toObject(exec);
     345        return isCell() ? asCell()->toObject(exec) : toObjectSlowCase(exec);
    272346    }
    273347
    274348    inline JSObject* JSValue::toThisObject(ExecState* exec) const
    275349    {
    276         if (UNLIKELY(JSImmediate::isImmediate(asValue())))
    277             return JSImmediate::toThisObject(asValue(), exec);
    278         return asCell()->toThisObject(exec);
    279     }
    280 
    281     inline bool JSValue::needsThisConversion() const
    282     {
    283         if (UNLIKELY(JSImmediate::isImmediate(asValue())))
    284             return true;
    285         return asCell()->structure()->typeInfo().needsThisConversion();
    286     }
    287 
    288     inline UString JSValue::toThisString(ExecState* exec) const
    289     {
    290         return JSImmediate::isImmediate(asValue()) ? JSImmediate::toString(asValue()) : asCell()->toThisString(exec);
    291     }
    292 
    293     inline JSValue JSValue::getJSNumber()
    294     {
    295         return JSImmediate::isNumber(asValue()) ? asValue() : JSImmediate::isImmediate(asValue()) ? JSValue() : asCell()->getJSNumber();
     350        return isCell() ? asCell()->toThisObject(exec) : toThisObjectSlowCase(exec);
    296351    }
    297352
  • trunk/JavaScriptCore/runtime/JSFunction.cpp

    r44862 r46598  
    7373JSFunction::~JSFunction()
    7474{
    75 #if ENABLE(JIT)
    7675    // JIT code for other functions may have had calls linked directly to the code for this function; these links
    7776    // are based on a check for the this pointer value for this JSFunction - which will no longer be valid once
    7877    // this memory is freed and may be reused (potentially for another, different JSFunction).
     78#if ENABLE(JIT_OPTIMIZE_CALL)
    7979    if (m_body && m_body->isGenerated())
    8080        m_body->generatedBytecode().unlinkCallers();
    8181#endif
    8282    if (!isHostFunction())
    83         scopeChain().~ScopeChain();
     83        scopeChain().~ScopeChain(); // FIXME: Don't we need to do this in the interpreter too?
    8484}
    8585
  • trunk/JavaScriptCore/runtime/JSGlobalData.cpp

    r45553 r46598  
    119119    , notAnObjectErrorStubStructure(JSNotAnObjectErrorStub::createStructure(jsNull()))
    120120    , notAnObjectStructure(JSNotAnObject::createStructure(jsNull()))
    121 #if !USE(ALTERNATE_JSIMMEDIATE)
     121#if USE(JSVALUE32)
    122122    , numberStructure(JSNumberCell::createStructure(jsNull()))
    123123#endif
  • trunk/JavaScriptCore/runtime/JSGlobalData.h

    r46528 r46598  
    9898        RefPtr<Structure> notAnObjectErrorStubStructure;
    9999        RefPtr<Structure> notAnObjectStructure;
    100 #if !USE(ALTERNATE_JSIMMEDIATE)
     100#if USE(JSVALUE32)
    101101        RefPtr<Structure> numberStructure;
    102102#endif
  • trunk/JavaScriptCore/runtime/JSGlobalObject.h

    r45891 r46598  
    348348            return m_prototype;
    349349
     350#if USE(JSVALUE32)
    350351        if (typeInfo().type() == StringType)
    351352            return exec->lexicalGlobalObject()->stringPrototype();
     
    353354        ASSERT(typeInfo().type() == NumberType);
    354355        return exec->lexicalGlobalObject()->numberPrototype();
     356#else
     357        ASSERT(typeInfo().type() == StringType);
     358        return exec->lexicalGlobalObject()->stringPrototype();
     359#endif
    355360    }
    356361
  • trunk/JavaScriptCore/runtime/JSGlobalObjectFunctions.cpp

    r44923 r46598  
    304304    int32_t radix = args.at(1).toInt32(exec);
    305305
    306     if (value.isNumber() && (radix == 0 || radix == 10)) {
    307         if (value.isInt32Fast())
    308             return value;
    309         double d = value.uncheckedGetNumber();
     306    if (radix != 0 && radix != 10)
     307        return jsNumber(exec, parseInt(value.toString(exec), radix));
     308
     309    if (value.isInt32())
     310        return value;
     311
     312    if (value.isDouble()) {
     313        double d = value.asDouble();
    310314        if (isfinite(d))
    311315            return jsNumber(exec, (d > 0) ? floor(d) : ceil(d));
    312316        if (isnan(d) || isinf(d))
    313             return jsNaN(&exec->globalData());
     317            return jsNaN(exec);
    314318        return jsNumber(exec, 0);
    315319    }
  • trunk/JavaScriptCore/runtime/JSImmediate.cpp

    r43122 r46598  
    2121#include "config.h"
    2222#include "JSImmediate.h"
     23
     24#if !USE(JSVALUE32_64)
    2325
    2426#include "BooleanConstructor.h"
     
    7072}
    7173
    72 UString JSImmediate::toString(JSValue v)
    73 {
    74     ASSERT(isImmediate(v));
    75     if (isIntegerNumber(v))
    76         return UString::from(getTruncatedInt32(v));
    77 #if USE(ALTERNATE_JSIMMEDIATE)
    78     if (isNumber(v)) {
    79         ASSERT(isDoubleNumber(v));
    80         double value = doubleValue(v);
    81         if (value == 0.0) // +0.0 or -0.0
    82             return "0";
    83         return UString::from(value);
    84     }
    85 #else
    86         ASSERT(!isNumber(v));
    87 #endif
    88     if (jsBoolean(false) == v)
    89         return "false";
    90     if (jsBoolean(true) == v)
    91         return "true";
    92     if (v.isNull())
    93         return "null";
    94     ASSERT(v.isUndefined());
    95     return "undefined";
    96 }
     74} // namespace JSC
    9775
    98 NEVER_INLINE double JSImmediate::nonInlineNaN()
    99 {
    100     return std::numeric_limits<double>::quiet_NaN();
    101 }
    102 
    103 } // namespace JSC
     76#endif // !USE(JSVALUE32_64)
  • trunk/JavaScriptCore/runtime/JSImmediate.h

    r43153 r46598  
    2323#define JSImmediate_h
    2424
     25#include <wtf/Platform.h>
     26
     27#if !USE(JSVALUE32_64)
     28
    2529#include <wtf/Assertions.h>
    2630#include <wtf/AlwaysInline.h>
     
    4347    class UString;
    4448
    45 #if USE(ALTERNATE_JSIMMEDIATE)
     49#if USE(JSVALUE64)
    4650    inline intptr_t reinterpretDoubleToIntptr(double value)
    4751    {
     
    99103    /*
    100104     * On 64-bit platforms, we support an alternative encoding form for immediates, if
    101      * USE(ALTERNATE_JSIMMEDIATE) is defined.  When this format is used, double precision
     105     * USE(JSVALUE64) is defined.  When this format is used, double precision
    102106     * floating point values may also be encoded as JSImmediates.
    103107     *
     
    156160        friend JSValue jsNumber(JSGlobalData* globalData, unsigned long long i);
    157161
    158 #if USE(ALTERNATE_JSIMMEDIATE)
     162#if USE(JSVALUE64)
    159163        // If all bits in the mask are set, this indicates an integer number,
    160164        // if any but not all are set this value is a double precision number.
     
    178182        static const intptr_t FullTagTypeNull      = TagBitTypeOther;
    179183
    180 #if USE(ALTERNATE_JSIMMEDIATE)
     184#if USE(JSVALUE64)
    181185        static const int32_t IntegerPayloadShift  = 0;
    182186#else
     
    201205        static ALWAYS_INLINE bool isIntegerNumber(JSValue v)
    202206        {
    203 #if USE(ALTERNATE_JSIMMEDIATE)
     207#if USE(JSVALUE64)
    204208            return (rawValue(v) & TagTypeNumber) == TagTypeNumber;
    205209#else
     
    208212        }
    209213
    210 #if USE(ALTERNATE_JSIMMEDIATE)
    211         static ALWAYS_INLINE bool isDoubleNumber(JSValue v)
     214#if USE(JSVALUE64)
     215        static ALWAYS_INLINE bool isDouble(JSValue v)
    212216        {
    213217            return isNumber(v) && !isIntegerNumber(v);
     
    257261        static ALWAYS_INLINE bool areBothImmediateIntegerNumbers(JSValue v1, JSValue v2)
    258262        {
    259 #if USE(ALTERNATE_JSIMMEDIATE)
     263#if USE(JSVALUE64)
    260264            return (rawValue(v1) & rawValue(v2) & TagTypeNumber) == TagTypeNumber;
    261265#else
     
    268272        static JSObject* toObject(JSValue, ExecState*);
    269273        static JSObject* toThisObject(JSValue, ExecState*);
    270         static UString toString(JSValue);
    271274
    272275        static bool getUInt32(JSValue, uint32_t&);
     
    287290
    288291    private:
    289 #if USE(ALTERNATE_JSIMMEDIATE)
     292#if USE(JSVALUE64)
    290293        static const int minImmediateInt = ((-INT_MAX) - 1);
    291294        static const int maxImmediateInt = INT_MAX;
     
    301304        }
    302305
    303         // With USE(ALTERNATE_JSIMMEDIATE) we want the argument to be zero extended, so the
     306        // With USE(JSVALUE64) we want the argument to be zero extended, so the
    304307        // integer doesn't interfere with the tag bits in the upper word.  In the default encoding,
    305308        // if intptr_t id larger then int32_t we sign extend the value through the upper word.
    306 #if USE(ALTERNATE_JSIMMEDIATE)
     309#if USE(JSVALUE64)
    307310        static ALWAYS_INLINE JSValue makeInt(uint32_t value)
    308311#else
     
    313316        }
    314317       
    315 #if USE(ALTERNATE_JSIMMEDIATE)
     318#if USE(JSVALUE64)
    316319        static ALWAYS_INLINE JSValue makeDouble(double value)
    317320        {
     
    338341        static JSValue fromNumberOutsideIntegerRange(T);
    339342
    340 #if USE(ALTERNATE_JSIMMEDIATE)
     343#if USE(JSVALUE64)
    341344        static ALWAYS_INLINE double doubleValue(JSValue v)
    342345        {
     
    364367            return v.immediateValue();
    365368        }
    366 
    367         static double nonInlineNaN();
    368369    };
    369370
     
    375376    ALWAYS_INLINE JSValue JSImmediate::oneImmediate() { return makeInt(1); }
    376377
    377 #if USE(ALTERNATE_JSIMMEDIATE)
     378#if USE(JSVALUE64)
    378379    inline bool doubleToBoolean(double value)
    379380    {
     
    402403    }
    403404
    404 #if USE(ALTERNATE_JSIMMEDIATE)
     405#if USE(JSVALUE64)
    405406    template<typename T>
    406407    inline JSValue JSImmediate::fromNumberOutsideIntegerRange(T value)
     
    443444    ALWAYS_INLINE JSValue JSImmediate::from(int i)
    444445    {
    445 #if !USE(ALTERNATE_JSIMMEDIATE)
     446#if !USE(JSVALUE64)
    446447        if ((i < minImmediateInt) | (i > maxImmediateInt))
    447448            return fromNumberOutsideIntegerRange(i);
     
    509510            return intValue(v);
    510511
    511 #if USE(ALTERNATE_JSIMMEDIATE)
     512#if USE(JSVALUE64)
    512513        if (isNumber(v)) {
    513             ASSERT(isDoubleNumber(v));
     514            ASSERT(isDouble(v));
    514515            return doubleValue(v);
    515516        }
     
    542543    }
    543544
    544     // These are identical logic to the JSValue functions above, and faster than jsNumber(number).toInt32().
    545     int32_t toInt32(double);
    546     uint32_t toUInt32(double);
    547     int32_t toInt32SlowCase(double, bool& ok);
    548     uint32_t toUInt32SlowCase(double, bool& ok);
    549 
    550545    inline JSValue::JSValue(JSNullTag)
    551546    {
     
    576571    {
    577572        return JSImmediate::isBoolean(asValue());
     573    }
     574
     575    inline bool JSValue::isTrue() const
     576    {
     577        return asValue() == JSImmediate::trueImmediate();
     578    }
     579
     580    inline bool JSValue::isFalse() const
     581    {
     582        return asValue() == JSImmediate::falseImmediate();
    578583    }
    579584
     
    593598    }
    594599
    595     ALWAYS_INLINE int32_t JSValue::toInt32(ExecState* exec) const
    596     {
    597         int32_t i;
    598         if (getTruncatedInt32(i))
    599             return i;
    600         bool ignored;
    601         return toInt32SlowCase(toNumber(exec), ignored);
    602     }
    603 
    604     inline uint32_t JSValue::toUInt32(ExecState* exec) const
    605     {
    606         uint32_t i;
    607         if (getTruncatedUInt32(i))
    608             return i;
    609         bool ignored;
    610         return toUInt32SlowCase(toNumber(exec), ignored);
    611     }
    612 
    613     inline int32_t toInt32(double val)
    614     {
    615         if (!(val >= -2147483648.0 && val < 2147483648.0)) {
    616             bool ignored;
    617             return toInt32SlowCase(val, ignored);
    618         }
    619         return static_cast<int32_t>(val);
    620     }
    621 
    622     inline uint32_t toUInt32(double val)
    623     {
    624         if (!(val >= 0.0 && val < 4294967296.0)) {
    625             bool ignored;
    626             return toUInt32SlowCase(val, ignored);
    627         }
    628         return static_cast<uint32_t>(val);
    629     }
    630 
    631     inline int32_t JSValue::toInt32(ExecState* exec, bool& ok) const
    632     {
    633         int32_t i;
    634         if (getTruncatedInt32(i)) {
    635             ok = true;
    636             return i;
    637         }
    638         return toInt32SlowCase(toNumber(exec), ok);
    639     }
    640 
    641     inline uint32_t JSValue::toUInt32(ExecState* exec, bool& ok) const
    642     {
    643         uint32_t i;
    644         if (getTruncatedUInt32(i)) {
    645             ok = true;
    646             return i;
    647         }
    648         return toUInt32SlowCase(toNumber(exec), ok);
    649     }
    650 
    651600    inline bool JSValue::isCell() const
    652601    {
     
    654603    }
    655604
    656     inline bool JSValue::isInt32Fast() const
     605    inline bool JSValue::isInt32() const
    657606    {
    658607        return JSImmediate::isIntegerNumber(asValue());
    659608    }
    660609
    661     inline int32_t JSValue::getInt32Fast() const
    662     {
    663         ASSERT(isInt32Fast());
     610    inline int32_t JSValue::asInt32() const
     611    {
     612        ASSERT(isInt32());
    664613        return JSImmediate::getTruncatedInt32(asValue());
    665614    }
    666615
    667     inline bool JSValue::isUInt32Fast() const
     616    inline bool JSValue::isUInt32() const
    668617    {
    669618        return JSImmediate::isPositiveIntegerNumber(asValue());
    670619    }
    671620
    672     inline uint32_t JSValue::getUInt32Fast() const
    673     {
    674         ASSERT(isUInt32Fast());
     621    inline uint32_t JSValue::asUInt32() const
     622    {
     623        ASSERT(isUInt32());
    675624        return JSImmediate::getTruncatedUInt32(asValue());
    676     }
    677 
    678     inline JSValue JSValue::makeInt32Fast(int32_t i)
    679     {
    680         return JSImmediate::from(i);
    681     }
    682 
    683     inline bool JSValue::areBothInt32Fast(JSValue v1, JSValue v2)
    684     {
    685         return JSImmediate::areBothImmediateIntegerNumbers(v1, v2);
    686625    }
    687626
     
    736675        {
    737676            ASSERT(canDoFastRshift(val, shift) || canDoFastUrshift(val, shift));
    738 #if USE(ALTERNATE_JSIMMEDIATE)
     677#if USE(JSVALUE64)
    739678            return JSImmediate::makeValue(static_cast<intptr_t>(static_cast<uint32_t>(static_cast<int32_t>(JSImmediate::rawValue(val)) >> ((JSImmediate::rawValue(shift) >> JSImmediate::IntegerPayloadShift) & 0x1f))) | JSImmediate::TagTypeNumber);
    740679#else
     
    784723} // namespace JSC
    785724
     725#endif // !USE(JSVALUE32_64)
     726
    786727#endif // JSImmediate_h
  • trunk/JavaScriptCore/runtime/JSNumberCell.cpp

    r43165 r46598  
    2424#include "JSNumberCell.h"
    2525
     26#if USE(JSVALUE32)
     27
    2628#include "NumberObject.h"
    2729#include "UString.h"
    2830
    2931namespace JSC {
    30 
    31 #if !USE(ALTERNATE_JSIMMEDIATE)
    3232
    3333JSValue JSNumberCell::toPrimitive(ExecState*, PreferredPrimitiveType) const
     
    8383}
    8484
    85 bool JSNumberCell::getTruncatedInt32(int32_t& int32) const
    86 {
    87     if (!(m_value >= -2147483648.0 && m_value < 2147483648.0))
    88         return false;
    89     int32 = static_cast<int32_t>(m_value);
    90     return true;
    91 }
    92 
    93 bool JSNumberCell::getTruncatedUInt32(uint32_t& uint32) const
    94 {
    95     if (!(m_value >= 0.0 && m_value < 4294967296.0))
    96         return false;
    97     uint32 = static_cast<uint32_t>(m_value);
    98     return true;
    99 }
    100 
    10185JSValue JSNumberCell::getJSNumber()
    10286{
     
    11498}
    11599
    116 JSValue jsAPIMangledNumber(ExecState* exec, double d)
    117 {
    118     return new (exec) JSNumberCell(JSNumberCell::APIMangled, d);
    119 }
     100} // namespace JSC
    120101
    121 #else
     102#else // USE(JSVALUE32)
     103
     104// Keep our exported symbols lists happy.
     105namespace JSC {
     106
     107JSValue jsNumberCell(ExecState*, double);
    122108
    123109JSValue jsNumberCell(ExecState*, double)
     
    127113}
    128114
    129 JSValue jsAPIMangledNumber(ExecState*, double)
    130 {
    131     ASSERT_NOT_REACHED();
    132     return JSValue();
    133 }
     115} // namespace JSC
    134116
    135 #endif
    136 
    137 } // namespace JSC
     117#endif // USE(JSVALUE32)
  • trunk/JavaScriptCore/runtime/JSNumberCell.h

    r43165 r46598  
    3636    extern const double Inf;
    3737
     38#if USE(JSVALUE32)
    3839    JSValue jsNumberCell(ExecState*, double);
    39     JSValue jsAPIMangledNumber(ExecState*, double);
    40 
    41 #if !USE(ALTERNATE_JSIMMEDIATE)
    4240
    4341    class Identifier;
     
    5452        friend JSValue jsNumberCell(JSGlobalData*, double);
    5553        friend JSValue jsNumberCell(ExecState*, double);
    56         friend JSValue jsAPIMangledNumber(ExecState*, double);
     54
    5755    public:
    5856        double value() const { return m_value; }
     
    6866        virtual JSObject* toThisObject(ExecState*) const;
    6967        virtual JSValue getJSNumber();
    70 
    71         static const uintptr_t JSAPIMangledMagicNumber = 0xbbadbeef;
    72         bool isAPIMangledNumber() const { return m_structure == reinterpret_cast<Structure*>(JSAPIMangledMagicNumber); }
    7368
    7469        void* operator new(size_t size, ExecState* exec)
     
    105100        }
    106101
    107         enum APIMangledTag { APIMangled };
    108         JSNumberCell(APIMangledTag, double value)
    109             : JSCell(reinterpret_cast<Structure*>(JSAPIMangledMagicNumber))
    110             , m_value(value)
    111         {
    112         }
    113 
    114102        virtual bool getUInt32(uint32_t&) const;
    115         virtual bool getTruncatedInt32(int32_t&) const;
    116         virtual bool getTruncatedUInt32(uint32_t&) const;
    117103
    118104        double m_value;
     
    132118    }
    133119
    134 
    135120    inline JSValue::JSValue(ExecState* exec, double d)
    136121    {
     
    193178    }
    194179
    195     inline JSValue::JSValue(JSGlobalData* globalData, long i)
    196     {
    197         JSValue v = JSImmediate::from(i);
    198         *this = v ? v : jsNumberCell(globalData, i);
    199     }
    200 
    201     inline JSValue::JSValue(JSGlobalData* globalData, unsigned long i)
    202     {
    203         JSValue v = JSImmediate::from(i);
    204         *this = v ? v : jsNumberCell(globalData, i);
    205     }
    206 
    207     inline JSValue::JSValue(JSGlobalData* globalData, long long i)
    208     {
    209         JSValue v = JSImmediate::from(i);
    210         *this = v ? v : jsNumberCell(globalData, static_cast<double>(i));
    211     }
    212 
    213     inline JSValue::JSValue(JSGlobalData* globalData, unsigned long long i)
    214     {
    215         JSValue v = JSImmediate::from(i);
    216         *this = v ? v : jsNumberCell(globalData, static_cast<double>(i));
    217     }
    218 
    219     inline bool JSValue::isDoubleNumber() const
     180    inline bool JSValue::isDouble() const
    220181    {
    221182        return isNumberCell(asValue());
    222183    }
    223184
    224     inline double JSValue::getDoubleNumber() const
     185    inline double JSValue::asDouble() const
    225186    {
    226187        return asNumberCell(asValue())->value();
     
    229190    inline bool JSValue::isNumber() const
    230191    {
    231         return JSImmediate::isNumber(asValue()) || isDoubleNumber();
     192        return JSImmediate::isNumber(asValue()) || isDouble();
    232193    }
    233194
     
    235196    {
    236197        ASSERT(isNumber());
    237         return JSImmediate::isImmediate(asValue()) ? JSImmediate::toDouble(asValue()) : getDoubleNumber();
    238     }
    239 
    240     inline bool JSValue::isAPIMangledNumber()
    241     {
    242         ASSERT(isNumber());
    243         return JSImmediate::isImmediate(asValue()) ? false : asNumberCell(asValue())->isAPIMangledNumber();
    244     }
    245 
    246 #else
    247 
     198        return JSImmediate::isImmediate(asValue()) ? JSImmediate::toDouble(asValue()) : asDouble();
     199    }
     200
     201#endif // USE(JSVALUE32)
     202
     203#if USE(JSVALUE64)
    248204    inline JSValue::JSValue(ExecState*, double d)
    249205    {
     
    316272    }
    317273
    318     inline JSValue::JSValue(JSGlobalData*, long i)
    319     {
    320         JSValue v = JSImmediate::from(i);
    321         ASSERT(v);
    322         *this = v;
    323     }
    324 
    325     inline JSValue::JSValue(JSGlobalData*, unsigned long i)
    326     {
    327         JSValue v = JSImmediate::from(i);
    328         ASSERT(v);
    329         *this = v;
    330     }
    331 
    332     inline JSValue::JSValue(JSGlobalData*, long long i)
    333     {
    334         JSValue v = JSImmediate::from(static_cast<double>(i));
    335         ASSERT(v);
    336         *this = v;
    337     }
    338 
    339     inline JSValue::JSValue(JSGlobalData*, unsigned long long i)
    340     {
    341         JSValue v = JSImmediate::from(static_cast<double>(i));
    342         ASSERT(v);
    343         *this = v;
    344     }
    345 
    346     inline bool JSValue::isDoubleNumber() const
    347     {
    348         return JSImmediate::isDoubleNumber(asValue());
    349     }
    350 
    351     inline double JSValue::getDoubleNumber() const
     274    inline bool JSValue::isDouble() const
     275    {
     276        return JSImmediate::isDouble(asValue());
     277    }
     278
     279    inline double JSValue::asDouble() const
    352280    {
    353281        return JSImmediate::doubleValue(asValue());
     
    365293    }
    366294
    367 #endif
     295#endif // USE(JSVALUE64)
     296
     297#if USE(JSVALUE32) || USE(JSVALUE64)
    368298
    369299    inline JSValue::JSValue(ExecState*, char i)
     
    391321    }
    392322
    393     inline JSValue::JSValue(JSGlobalData*, char i)
    394     {
    395         ASSERT(JSImmediate::from(i));
    396         *this = JSImmediate::from(i);
    397     }
    398 
    399     inline JSValue::JSValue(JSGlobalData*, unsigned char i)
    400     {
    401         ASSERT(JSImmediate::from(i));
    402         *this = JSImmediate::from(i);
    403     }
    404 
    405     inline JSValue::JSValue(JSGlobalData*, short i)
    406     {
    407         ASSERT(JSImmediate::from(i));
    408         *this = JSImmediate::from(i);
    409     }
    410 
    411     inline JSValue::JSValue(JSGlobalData*, unsigned short i)
    412     {
    413         ASSERT(JSImmediate::from(i));
    414         *this = JSImmediate::from(i);
    415     }
    416 
    417323    inline JSValue jsNaN(ExecState* exec)
    418324    {
     
    434340    inline bool JSValue::getNumber(double &result) const
    435341    {
    436         if (isInt32Fast())
    437             result = getInt32Fast();
    438         else if (LIKELY(isDoubleNumber()))
    439             result = getDoubleNumber();
     342        if (isInt32())
     343            result = asInt32();
     344        else if (LIKELY(isDouble()))
     345            result = asDouble();
    440346        else {
    441347            ASSERT(!isNumber());
     
    445351    }
    446352
    447     inline bool JSValue::numberToInt32(int32_t& arg)
    448     {
    449         if (isInt32Fast())
    450             arg = getInt32Fast();
    451         else if (LIKELY(isDoubleNumber()))
    452             arg = JSC::toInt32(getDoubleNumber());
    453         else {
    454             ASSERT(!isNumber());
    455             return false;
    456         }
    457         return true;
    458     }
    459 
    460     inline bool JSValue::numberToUInt32(uint32_t& arg)
    461     {
    462         if (isUInt32Fast())
    463             arg = getUInt32Fast();
    464         else if (LIKELY(isDoubleNumber()))
    465             arg = JSC::toUInt32(getDoubleNumber());
    466         else if (isInt32Fast()) {
    467             // FIXME: I think this case can be merged with the uint case; toUInt32SlowCase
    468             // on a negative value is equivalent to simple static_casting.
    469             bool ignored;
    470             arg = toUInt32SlowCase(getInt32Fast(), ignored);
    471         } else {
    472             ASSERT(!isNumber());
    473             return false;
    474         }
    475         return true;
    476     }
     353#endif // USE(JSVALUE32) || USE(JSVALUE64)
    477354
    478355} // namespace JSC
  • trunk/JavaScriptCore/runtime/JSObject.h

    r46528 r46598  
    3434#include "Structure.h"
    3535#include "JSGlobalData.h"
     36#include <wtf/StdLibExtras.h>
    3637
    3738namespace JSC {
     
    196197        bool isUsingInlineStorage() const { return m_structure->isUsingInlineStorage(); }
    197198
    198         static const size_t inlineStorageCapacity = 3;
     199        static const size_t inlineStorageCapacity = sizeof(EncodedJSValue) == 2 * sizeof(void*) ? 4 : 3;
    199200        static const size_t nonInlineBaseStorageCapacity = 16;
    200201
     
    226227        const HashEntry* findPropertyHashEntry(ExecState*, const Identifier& propertyName) const;
    227228        Structure* createInheritorID();
    228 
    229         RefPtr<Structure> m_inheritorID;
    230229
    231230        union {
     
    233232            EncodedJSValue m_inlineStorage[inlineStorageCapacity];
    234233        };
     234
     235        RefPtr<Structure> m_inheritorID;
    235236    };
    236 
    237     JSObject* asObject(JSValue);
    238 
    239     JSObject* constructEmptyObject(ExecState*);
     237   
     238JSObject* constructEmptyObject(ExecState*);
    240239
    241240inline JSObject* asObject(JSValue value)
     
    252251    ASSERT(m_structure->isEmpty());
    253252    ASSERT(prototype().isNull() || Heap::heap(this) == Heap::heap(prototype()));
     253#if USE(JSVALUE64) || USE(JSVALUE32_64)
     254    ASSERT(OBJECT_OFFSETOF(JSObject, m_inlineStorage) % sizeof(double) == 0);
     255#endif
    254256}
    255257
     
    543545{
    544546    if (UNLIKELY(!isCell())) {
    545         JSObject* prototype = JSImmediate::prototype(asValue(), exec);
     547        JSObject* prototype = synthesizePrototype(exec);
    546548        if (propertyName == exec->propertyNames().underscoreProto)
    547549            return prototype;
     
    571573{
    572574    if (UNLIKELY(!isCell())) {
    573         JSObject* prototype = JSImmediate::prototype(asValue(), exec);
     575        JSObject* prototype = synthesizePrototype(exec);
    574576        if (!prototype->getPropertySlot(exec, propertyName, slot))
    575577            return jsUndefined();
     
    591593{
    592594    if (UNLIKELY(!isCell())) {
    593         JSImmediate::toObject(asValue(), exec)->put(exec, propertyName, value, slot);
     595        synthesizeObject(exec)->put(exec, propertyName, value, slot);
    594596        return;
    595597    }
     
    600602{
    601603    if (UNLIKELY(!isCell())) {
    602         JSImmediate::toObject(asValue(), exec)->put(exec, propertyName, value);
     604        synthesizeObject(exec)->put(exec, propertyName, value);
    603605        return;
    604606    }
  • trunk/JavaScriptCore/runtime/JSString.h

    r46528 r46598  
    2424#define JSString_h
    2525
     26#include "CallFrame.h"
    2627#include "CommonIdentifiers.h"
    27 #include "CallFrame.h"
    2828#include "Identifier.h"
    2929#include "JSNumberCell.h"
     
    209209    inline JSString* JSValue::toThisJSString(ExecState* exec)
    210210    {
    211         return JSImmediate::isImmediate(asValue()) ? jsString(exec, JSImmediate::toString(asValue())) : asCell()->toThisJSString(exec);
     211        return isCell() ? asCell()->toThisJSString(exec) : jsString(exec, toString(exec));
    212212    }
    213213
  • trunk/JavaScriptCore/runtime/JSValue.cpp

    r43122 r46598  
    2424#include "JSValue.h"
    2525
     26#include "BooleanConstructor.h"
     27#include "BooleanPrototype.h"
     28#include "ExceptionHelpers.h"
     29#include "JSGlobalObject.h"
    2630#include "JSFunction.h"
     31#include "JSNotAnObject.h"
     32#include "NumberObject.h"
    2733#include <wtf/MathExtras.h>
     34#include <wtf/StringExtras.h>
    2835
    2936namespace JSC {
     
    3441double JSValue::toInteger(ExecState* exec) const
    3542{
    36     if (isInt32Fast())
    37         return getInt32Fast();
     43    if (isInt32())
     44        return asInt32();
    3845    double d = toNumber(exec);
    3946    return isnan(d) ? 0.0 : trunc(d);
     
    4249double JSValue::toIntegerPreserveNaN(ExecState* exec) const
    4350{
    44     if (isInt32Fast())
    45         return getInt32Fast();
     51    if (isInt32())
     52        return asInt32();
    4653    return trunc(toNumber(exec));
    4754}
     55
     56JSObject* JSValue::toObjectSlowCase(ExecState* exec) const
     57{
     58    ASSERT(!isCell());
     59
     60    if (isInt32() || isDouble())
     61        return constructNumber(exec, asValue());
     62    if (isTrue() || isFalse())
     63        return constructBooleanFromImmediateBoolean(exec, asValue());
     64    ASSERT(isUndefinedOrNull());
     65    JSNotAnObjectErrorStub* exception = createNotAnObjectErrorStub(exec, isNull());
     66    exec->setException(exception);
     67    return new (exec) JSNotAnObject(exec, exception);
     68}
     69
     70JSObject* JSValue::toThisObjectSlowCase(ExecState* exec) const
     71{
     72    ASSERT(!isCell());
     73
     74    if (isInt32() || isDouble())
     75        return constructNumber(exec, asValue());
     76    if (isTrue() || isFalse())
     77        return constructBooleanFromImmediateBoolean(exec, asValue());
     78    ASSERT(isUndefinedOrNull());
     79    return exec->globalThisValue();
     80}
     81
     82JSObject* JSValue::synthesizeObject(ExecState* exec) const
     83{
     84    ASSERT(!isCell());
     85    if (isNumber())
     86        return constructNumber(exec, asValue());
     87    if (isBoolean())
     88        return constructBooleanFromImmediateBoolean(exec, asValue());
     89   
     90    JSNotAnObjectErrorStub* exception = createNotAnObjectErrorStub(exec, isNull());
     91    exec->setException(exception);
     92    return new (exec) JSNotAnObject(exec, exception);
     93}
     94
     95JSObject* JSValue::synthesizePrototype(ExecState* exec) const
     96{
     97    ASSERT(!isCell());
     98    if (isNumber())
     99        return exec->lexicalGlobalObject()->numberPrototype();
     100    if (isBoolean())
     101        return exec->lexicalGlobalObject()->booleanPrototype();
     102
     103    JSNotAnObjectErrorStub* exception = createNotAnObjectErrorStub(exec, isNull());
     104    exec->setException(exception);
     105    return new (exec) JSNotAnObject(exec, exception);
     106}
     107
     108#ifndef NDEBUG
     109char* JSValue::description()
     110{
     111    static const size_t size = 32;
     112    static char description[size];
     113    if (isInt32())
     114        snprintf(description, size, "Int32: %d", asInt32());
     115    else if (isDouble())
     116        snprintf(description, size, "Double: %lf", asDouble());
     117    else if (isCell())
     118        snprintf(description, size, "Cell: %p", asCell());
     119    else if (isTrue())
     120        snprintf(description, size, "True");
     121    else if (isFalse())
     122        snprintf(description, size, "False");
     123    else if (isNull())
     124        snprintf(description, size, "Null");
     125    else {
     126        ASSERT(isUndefined());
     127        snprintf(description, size, "Undefined");
     128    }
     129
     130    return description;
     131}
     132#endif
    48133
    49134int32_t toInt32SlowCase(double d, bool& ok)
     
    85170}
    86171
     172NEVER_INLINE double nonInlineNaN()
     173{
     174    return std::numeric_limits<double>::quiet_NaN();
     175}
     176
    87177} // namespace JSC
  • trunk/JavaScriptCore/runtime/JSValue.h

    r43160 r46598  
    2929#include "CallData.h"
    3030#include "ConstructData.h"
     31#include <math.h>
     32#include <wtf/AlwaysInline.h>
     33#include <wtf/Assertions.h>
    3134#include <wtf/HashTraits.h>
    32 #include <wtf/AlwaysInline.h>
     35#include <wtf/MathExtras.h>
    3336
    3437namespace JSC {
     
    4952    enum PreferredPrimitiveType { NoPreference, PreferNumber, PreferString };
    5053
     54#if USE(JSVALUE32_64)
     55    typedef int64_t EncodedJSValue;
     56#else
    5157    typedef void* EncodedJSValue;
     58#endif
     59
     60    double nonInlineNaN();
     61    int32_t toInt32SlowCase(double, bool& ok);
     62    uint32_t toUInt32SlowCase(double, bool& ok);
    5263
    5364    class JSValue {
    5465        friend class JSImmediate;
    55         friend struct JSValueHashTraits;
    56 
    57         static JSValue makeImmediate(intptr_t value)
    58         {
    59             return JSValue(reinterpret_cast<JSCell*>(value));
    60         }
    61 
    62         intptr_t immediateValue()
    63         {
    64             return reinterpret_cast<intptr_t>(m_ptr);
    65         }
    66        
     66        friend struct EncodedJSValueHashTraits;
     67        friend class JIT;
     68        friend class JITStubs;
     69        friend class JITStubCall;
     70
    6771    public:
     72        static EncodedJSValue encode(JSValue value);
     73        static JSValue decode(EncodedJSValue ptr);
     74#if !USE(JSVALUE32_64)
     75    private:
     76        static JSValue makeImmediate(intptr_t value);
     77        intptr_t immediateValue();
     78    public:
     79#endif
    6880        enum JSNullTag { JSNull };
    6981        enum JSUndefinedTag { JSUndefined };
    7082        enum JSTrueTag { JSTrue };
    7183        enum JSFalseTag { JSFalse };
    72 
    73         static EncodedJSValue encode(JSValue value);
    74         static JSValue decode(EncodedJSValue ptr);
    7584
    7685        JSValue();
     
    95104        JSValue(ExecState*, unsigned long long);
    96105        JSValue(JSGlobalData*, double);
    97         JSValue(JSGlobalData*, char);
    98         JSValue(JSGlobalData*, unsigned char);
    99         JSValue(JSGlobalData*, short);
    100         JSValue(JSGlobalData*, unsigned short);
    101106        JSValue(JSGlobalData*, int);
    102107        JSValue(JSGlobalData*, unsigned);
    103         JSValue(JSGlobalData*, long);
    104         JSValue(JSGlobalData*, unsigned long);
    105         JSValue(JSGlobalData*, long long);
    106         JSValue(JSGlobalData*, unsigned long long);
    107108
    108109        operator bool() const;
    109         bool operator==(const JSValue other) const;
    110         bool operator!=(const JSValue other) const;
     110        bool operator==(const JSValue& other) const;
     111        bool operator!=(const JSValue& other) const;
     112
     113        bool isInt32() const;
     114        bool isUInt32() const;
     115        bool isDouble() const;
     116        bool isTrue() const;
     117        bool isFalse() const;
     118
     119        int32_t asInt32() const;
     120        uint32_t asUInt32() const;
     121        double asDouble() const;
    111122
    112123        // Querying the type.
     
    135146        // Extracting integer values.
    136147        bool getUInt32(uint32_t&) const;
    137         bool getTruncatedInt32(int32_t&) const;
    138         bool getTruncatedUInt32(uint32_t&) const;
    139148       
    140149        // Basic conversions.
     
    152161
    153162        // Integer conversions.
    154         // 'x.numberToInt32(output)' is equivalent to 'x.isNumber() && x.toInt32(output)'
    155163        double toInteger(ExecState*) const;
    156164        double toIntegerPreserveNaN(ExecState*) const;
    157165        int32_t toInt32(ExecState*) const;
    158166        int32_t toInt32(ExecState*, bool& ok) const;
    159         bool numberToInt32(int32_t& arg);
    160167        uint32_t toUInt32(ExecState*) const;
    161168        uint32_t toUInt32(ExecState*, bool& ok) const;
    162         bool numberToUInt32(uint32_t& arg);
    163 
    164         // Fast integer operations; these values return results where the value is trivially available
    165         // in a convenient form, for use in optimizations.  No assumptions should be made based on the
    166         // results of these operations, for example !isInt32Fast() does not necessarily indicate the
    167         // result of getNumber will not be 0.
    168         bool isInt32Fast() const;
    169         int32_t getInt32Fast() const;
    170         bool isUInt32Fast() const;
    171         uint32_t getUInt32Fast() const;
    172         static JSValue makeInt32Fast(int32_t);
    173         static bool areBothInt32Fast(JSValue, JSValue);
    174169
    175170        // Floating point conversions (this is a convenience method for webcore;
    176171        // signle precision float is not a representation used in JS or JSC).
    177172        float toFloat(ExecState* exec) const { return static_cast<float>(toNumber(exec)); }
    178 
    179         // API Mangled Numbers
    180         bool isAPIMangledNumber();
    181173
    182174        // Garbage collection.
     
    209201        JSCell* asCell() const;
    210202
     203#ifndef NDEBUG
     204        char* description();
     205#endif
     206
    211207    private:
    212208        enum HashTableDeletedValueTag { HashTableDeletedValue };
     
    214210
    215211        inline const JSValue asValue() const { return *this; }
    216 
    217         bool isDoubleNumber() const;
    218         double getDoubleNumber() const;
    219 
     212        JSObject* toObjectSlowCase(ExecState*) const;
     213        JSObject* toThisObjectSlowCase(ExecState*) const;
     214
     215        enum { Int32Tag =        0xffffffff };
     216        enum { CellTag =         0xfffffffe };
     217        enum { TrueTag =         0xfffffffd };
     218        enum { FalseTag =        0xfffffffc };
     219        enum { NullTag =         0xfffffffb };
     220        enum { UndefinedTag =    0xfffffffa };
     221        enum { DeletedValueTag = 0xfffffff9 };
     222
     223        enum { LowestTag =  DeletedValueTag };
     224
     225        uint32_t tag() const;
     226        int32_t payload() const;
     227
     228        JSObject* synthesizePrototype(ExecState*) const;
     229        JSObject* synthesizeObject(ExecState*) const;
     230
     231#if USE(JSVALUE32_64)
     232        union {
     233            EncodedJSValue asEncodedJSValue;
     234            double asDouble;
     235#if PLATFORM(BIG_ENDIAN)
     236            struct {
     237                int32_t tag;
     238                int32_t payload;
     239            } asBits;
     240#else
     241            struct {
     242                int32_t payload;
     243                int32_t tag;
     244            } asBits;
     245#endif
     246        } u;
     247#else // USE(JSVALUE32_64)
    220248        JSCell* m_ptr;
     249#endif // USE(JSVALUE32_64)
    221250    };
    222251
    223     struct JSValueHashTraits : HashTraits<EncodedJSValue> {
     252#if USE(JSVALUE32_64)
     253    typedef IntHash<EncodedJSValue> EncodedJSValueHash;
     254
     255    struct EncodedJSValueHashTraits : HashTraits<EncodedJSValue> {
     256        static const bool emptyValueIsZero = false;
     257        static EncodedJSValue emptyValue() { return JSValue::encode(JSValue()); }
    224258        static void constructDeletedValue(EncodedJSValue& slot) { slot = JSValue::encode(JSValue(JSValue::HashTableDeletedValue)); }
    225259        static bool isDeletedValue(EncodedJSValue value) { return value == JSValue::encode(JSValue(JSValue::HashTableDeletedValue)); }
    226260    };
     261#else
     262    typedef PtrHash<EncodedJSValue> EncodedJSValueHash;
     263
     264    struct EncodedJSValueHashTraits : HashTraits<EncodedJSValue> {
     265        static void constructDeletedValue(EncodedJSValue& slot) { slot = JSValue::encode(JSValue(JSValue::HashTableDeletedValue)); }
     266        static bool isDeletedValue(EncodedJSValue value) { return value == JSValue::encode(JSValue(JSValue::HashTableDeletedValue)); }
     267    };
     268#endif
    227269
    228270    // Stand-alone helper functions.
     
    302344    }
    303345
    304     ALWAYS_INLINE JSValue jsNumber(JSGlobalData* globalData, char i)
     346    ALWAYS_INLINE JSValue jsNumber(JSGlobalData* globalData, int i)
    305347    {
    306348        return JSValue(globalData, i);
    307349    }
    308350
    309     ALWAYS_INLINE JSValue jsNumber(JSGlobalData* globalData, unsigned char i)
    310     {
    311         return JSValue(globalData, i);
    312     }
    313 
    314     ALWAYS_INLINE JSValue jsNumber(JSGlobalData* globalData, short i)
    315     {
    316         return JSValue(globalData, i);
    317     }
    318 
    319     ALWAYS_INLINE JSValue jsNumber(JSGlobalData* globalData, unsigned short i)
    320     {
    321         return JSValue(globalData, i);
    322     }
    323 
    324     ALWAYS_INLINE JSValue jsNumber(JSGlobalData* globalData, int i)
    325     {
    326         return JSValue(globalData, i);
    327     }
    328 
    329351    ALWAYS_INLINE JSValue jsNumber(JSGlobalData* globalData, unsigned i)
    330     {
    331         return JSValue(globalData, i);
    332     }
    333 
    334     ALWAYS_INLINE JSValue jsNumber(JSGlobalData* globalData, long i)
    335     {
    336         return JSValue(globalData, i);
    337     }
    338 
    339     ALWAYS_INLINE JSValue jsNumber(JSGlobalData* globalData, unsigned long i)
    340     {
    341         return JSValue(globalData, i);
    342     }
    343 
    344     ALWAYS_INLINE JSValue jsNumber(JSGlobalData* globalData, long long i)
    345     {
    346         return JSValue(globalData, i);
    347     }
    348 
    349     ALWAYS_INLINE JSValue jsNumber(JSGlobalData* globalData, unsigned long long i)
    350352    {
    351353        return JSValue(globalData, i);
     
    358360    inline bool operator!=(const JSCell* a, const JSValue b) { return JSValue(a) != b; }
    359361
     362    inline int32_t toInt32(double val)
     363    {
     364        if (!(val >= -2147483648.0 && val < 2147483648.0)) {
     365            bool ignored;
     366            return toInt32SlowCase(val, ignored);
     367        }
     368        return static_cast<int32_t>(val);
     369    }
     370
     371    inline uint32_t toUInt32(double val)
     372    {
     373        if (!(val >= 0.0 && val < 4294967296.0)) {
     374            bool ignored;
     375            return toUInt32SlowCase(val, ignored);
     376        }
     377        return static_cast<uint32_t>(val);
     378    }
     379
     380    ALWAYS_INLINE int32_t JSValue::toInt32(ExecState* exec) const
     381    {
     382        if (isInt32())
     383            return asInt32();
     384        bool ignored;
     385        return toInt32SlowCase(toNumber(exec), ignored);
     386    }
     387
     388    inline uint32_t JSValue::toUInt32(ExecState* exec) const
     389    {
     390        if (isUInt32())
     391            return asInt32();
     392        bool ignored;
     393        return toUInt32SlowCase(toNumber(exec), ignored);
     394    }
     395
     396    inline int32_t JSValue::toInt32(ExecState* exec, bool& ok) const
     397    {
     398        if (isInt32()) {
     399            ok = true;
     400            return asInt32();
     401        }
     402        return toInt32SlowCase(toNumber(exec), ok);
     403    }
     404
     405    inline uint32_t JSValue::toUInt32(ExecState* exec, bool& ok) const
     406    {
     407        if (isUInt32()) {
     408            ok = true;
     409            return asInt32();
     410        }
     411        return toUInt32SlowCase(toNumber(exec), ok);
     412    }
     413
     414#if USE(JSVALUE32_64)
     415    inline JSValue jsNaN(ExecState* exec)
     416    {
     417        return JSValue(exec, nonInlineNaN());
     418    }
     419
    360420    // JSValue member functions.
    361421    inline EncodedJSValue JSValue::encode(JSValue value)
    362422    {
     423        return value.u.asEncodedJSValue;
     424    }
     425
     426    inline JSValue JSValue::decode(EncodedJSValue encodedJSValue)
     427    {
     428        JSValue v;
     429        v.u.asEncodedJSValue = encodedJSValue;
     430        return v;
     431    }
     432
     433    inline JSValue::JSValue()
     434    {
     435        u.asBits.tag = CellTag;
     436        u.asBits.payload = 0;
     437    }
     438
     439    inline JSValue::JSValue(JSNullTag)
     440    {
     441        u.asBits.tag = NullTag;
     442        u.asBits.payload = 0;
     443    }
     444   
     445    inline JSValue::JSValue(JSUndefinedTag)
     446    {
     447        u.asBits.tag = UndefinedTag;
     448        u.asBits.payload = 0;
     449    }
     450   
     451    inline JSValue::JSValue(JSTrueTag)
     452    {
     453        u.asBits.tag = TrueTag;
     454        u.asBits.payload = 0;
     455    }
     456   
     457    inline JSValue::JSValue(JSFalseTag)
     458    {
     459        u.asBits.tag = FalseTag;
     460        u.asBits.payload = 0;
     461    }
     462
     463    inline JSValue::JSValue(HashTableDeletedValueTag)
     464    {
     465        u.asBits.tag = DeletedValueTag;
     466        u.asBits.payload = 0;
     467    }
     468
     469    inline JSValue::JSValue(JSCell* ptr)
     470    {
     471        u.asBits.tag = CellTag;
     472        u.asBits.payload = reinterpret_cast<int32_t>(ptr);
     473    }
     474
     475    inline JSValue::JSValue(const JSCell* ptr)
     476    {
     477        u.asBits.tag = CellTag;
     478        u.asBits.payload = reinterpret_cast<int32_t>(const_cast<JSCell*>(ptr));
     479    }
     480
     481    inline JSValue::operator bool() const
     482    {
     483        return u.asBits.payload || tag() != CellTag;
     484    }
     485
     486    inline bool JSValue::operator==(const JSValue& other) const
     487    {
     488        return u.asEncodedJSValue == other.u.asEncodedJSValue;
     489    }
     490
     491    inline bool JSValue::operator!=(const JSValue& other) const
     492    {
     493        return u.asEncodedJSValue != other.u.asEncodedJSValue;
     494    }
     495
     496    inline bool JSValue::isUndefined() const
     497    {
     498        return tag() == UndefinedTag;
     499    }
     500
     501    inline bool JSValue::isNull() const
     502    {
     503        return tag() == NullTag;
     504    }
     505
     506    inline bool JSValue::isUndefinedOrNull() const
     507    {
     508        return isUndefined() || isNull();
     509    }
     510
     511    inline bool JSValue::isCell() const
     512    {
     513        return tag() == CellTag;
     514    }
     515
     516    inline bool JSValue::isInt32() const
     517    {
     518        return tag() == Int32Tag;
     519    }
     520
     521    inline bool JSValue::isUInt32() const
     522    {
     523        return tag() == Int32Tag && asInt32() > -1;
     524    }
     525
     526    inline bool JSValue::isDouble() const
     527    {
     528        return tag() < LowestTag;
     529    }
     530
     531    inline bool JSValue::isTrue() const
     532    {
     533        return tag() == TrueTag;
     534    }
     535
     536    inline bool JSValue::isFalse() const
     537    {
     538        return tag() == FalseTag;
     539    }
     540
     541    inline uint32_t JSValue::tag() const
     542    {
     543        return u.asBits.tag;
     544    }
     545   
     546    inline int32_t JSValue::payload() const
     547    {
     548        return u.asBits.payload;
     549    }
     550   
     551    inline int32_t JSValue::asInt32() const
     552    {
     553        ASSERT(isInt32());
     554        return u.asBits.payload;
     555    }
     556   
     557    inline uint32_t JSValue::asUInt32() const
     558    {
     559        ASSERT(isUInt32());
     560        return u.asBits.payload;
     561    }
     562   
     563    inline double JSValue::asDouble() const
     564    {
     565        ASSERT(isDouble());
     566        return u.asDouble;
     567    }
     568   
     569    ALWAYS_INLINE JSCell* JSValue::asCell() const
     570    {
     571        ASSERT(isCell());
     572        return reinterpret_cast<JSCell*>(u.asBits.payload);
     573    }
     574
     575    inline JSValue::JSValue(ExecState* exec, double d)
     576    {
     577        const int32_t asInt32 = static_cast<int32_t>(d);
     578        if (asInt32 != d || (!asInt32 && signbit(d))) { // true for -0.0
     579            u.asDouble = d;
     580            return;
     581        }
     582        *this = JSValue(exec, static_cast<int32_t>(d));
     583    }
     584
     585    inline JSValue::JSValue(ExecState* exec, char i)
     586    {
     587        *this = JSValue(exec, static_cast<int32_t>(i));
     588    }
     589
     590    inline JSValue::JSValue(ExecState* exec, unsigned char i)
     591    {
     592        *this = JSValue(exec, static_cast<int32_t>(i));
     593    }
     594
     595    inline JSValue::JSValue(ExecState* exec, short i)
     596    {
     597        *this = JSValue(exec, static_cast<int32_t>(i));
     598    }
     599
     600    inline JSValue::JSValue(ExecState* exec, unsigned short i)
     601    {
     602        *this = JSValue(exec, static_cast<int32_t>(i));
     603    }
     604
     605    inline JSValue::JSValue(ExecState*, int i)
     606    {
     607        u.asBits.tag = Int32Tag;
     608        u.asBits.payload = i;
     609    }
     610
     611    inline JSValue::JSValue(ExecState* exec, unsigned i)
     612    {
     613        if (static_cast<int32_t>(i) < 0) {
     614            *this = JSValue(exec, static_cast<double>(i));
     615            return;
     616        }
     617        *this = JSValue(exec, static_cast<int32_t>(i));
     618    }
     619
     620    inline JSValue::JSValue(ExecState* exec, long i)
     621    {
     622        if (static_cast<int32_t>(i) != i) {
     623            *this = JSValue(exec, static_cast<double>(i));
     624            return;
     625        }
     626        *this = JSValue(exec, static_cast<int32_t>(i));
     627    }
     628
     629    inline JSValue::JSValue(ExecState* exec, unsigned long i)
     630    {
     631        if (static_cast<uint32_t>(i) != i) {
     632            *this = JSValue(exec, static_cast<double>(i));
     633            return;
     634        }
     635        *this = JSValue(exec, static_cast<uint32_t>(i));
     636    }
     637
     638    inline JSValue::JSValue(ExecState* exec, long long i)
     639    {
     640        if (static_cast<int32_t>(i) != i) {
     641            *this = JSValue(exec, static_cast<double>(i));
     642            return;
     643        }
     644        *this = JSValue(exec, static_cast<int32_t>(i));
     645    }
     646
     647    inline JSValue::JSValue(ExecState* exec, unsigned long long i)
     648    {
     649        if (static_cast<uint32_t>(i) != i) {
     650            *this = JSValue(exec, static_cast<double>(i));
     651            return;
     652        }
     653        *this = JSValue(exec, static_cast<uint32_t>(i));
     654    }
     655
     656    inline JSValue::JSValue(JSGlobalData* globalData, double d)
     657    {
     658        const int32_t asInt32 = static_cast<int32_t>(d);
     659        if (asInt32 != d || (!asInt32 && signbit(d))) { // true for -0.0
     660            u.asDouble = d;
     661            return;
     662        }
     663        *this = JSValue(globalData, static_cast<int32_t>(d));
     664    }
     665   
     666    inline JSValue::JSValue(JSGlobalData*, int i)
     667    {
     668        u.asBits.tag = Int32Tag;
     669        u.asBits.payload = i;
     670    }
     671   
     672    inline JSValue::JSValue(JSGlobalData* globalData, unsigned i)
     673    {
     674        if (static_cast<int32_t>(i) < 0) {
     675            *this = JSValue(globalData, static_cast<double>(i));
     676            return;
     677        }
     678        *this = JSValue(globalData, static_cast<int32_t>(i));
     679    }
     680
     681    inline bool JSValue::isNumber() const
     682    {
     683        return isInt32() || isDouble();
     684    }
     685
     686    inline bool JSValue::isBoolean() const
     687    {
     688        return isTrue() || isFalse();
     689    }
     690
     691    inline bool JSValue::getBoolean(bool& v) const
     692    {
     693        if (isTrue()) {
     694            v = true;
     695            return true;
     696        }
     697        if (isFalse()) {
     698            v = false;
     699            return true;
     700        }
     701       
     702        return false;
     703    }
     704
     705    inline bool JSValue::getBoolean() const
     706    {
     707        ASSERT(isBoolean());
     708        return tag() == TrueTag;
     709    }
     710
     711    inline double JSValue::uncheckedGetNumber() const
     712    {
     713        ASSERT(isNumber());
     714        return isInt32() ? asInt32() : asDouble();
     715    }
     716
     717    ALWAYS_INLINE JSValue JSValue::toJSNumber(ExecState* exec) const
     718    {
     719        return isNumber() ? asValue() : jsNumber(exec, this->toNumber(exec));
     720    }
     721
     722    inline bool JSValue::getNumber(double& result) const
     723    {
     724        if (isInt32()) {
     725            result = asInt32();
     726            return true;
     727        }
     728        if (isDouble()) {
     729            result = asDouble();
     730            return true;
     731        }
     732        return false;
     733    }
     734
     735#else // USE(JSVALUE32_64)
     736
     737    // JSValue member functions.
     738    inline EncodedJSValue JSValue::encode(JSValue value)
     739    {
    363740        return reinterpret_cast<EncodedJSValue>(value.m_ptr);
    364741    }
     
    369746    }
    370747
     748    inline JSValue JSValue::makeImmediate(intptr_t value)
     749    {
     750        return JSValue(reinterpret_cast<JSCell*>(value));
     751    }
     752
     753    inline intptr_t JSValue::immediateValue()
     754    {
     755        return reinterpret_cast<intptr_t>(m_ptr);
     756    }
     757   
    371758    // 0x0 can never occur naturally because it has a tag of 00, indicating a pointer value, but a payload of 0x0, which is in the (invalid) zero page.
    372759    inline JSValue::JSValue()
     
    396783    }
    397784
    398     inline bool JSValue::operator==(const JSValue other) const
     785    inline bool JSValue::operator==(const JSValue& other) const
    399786    {
    400787        return m_ptr == other.m_ptr;
    401788    }
    402789
    403     inline bool JSValue::operator!=(const JSValue other) const
     790    inline bool JSValue::operator!=(const JSValue& other) const
    404791    {
    405792        return m_ptr != other.m_ptr;
     
    415802        return asValue() == jsNull();
    416803    }
     804#endif // USE(JSVALUE32_64)
    417805
    418806} // namespace JSC
  • trunk/JavaScriptCore/runtime/Operations.h

    r44224 r46598  
    3939    inline bool JSValue::equal(ExecState* exec, JSValue v1, JSValue v2)
    4040    {
    41         if (JSImmediate::areBothImmediateIntegerNumbers(v1, v2))
     41        if (v1.isInt32() && v2.isInt32())
    4242            return v1 == v2;
    4343
     
    4747    ALWAYS_INLINE bool JSValue::equalSlowCaseInline(ExecState* exec, JSValue v1, JSValue v2)
    4848    {
    49         ASSERT(!JSImmediate::areBothImmediateIntegerNumbers(v1, v2));
    50 
    5149        do {
    5250            if (v1.isNumber() && v2.isNumber())
     
    6159                if (v2.isUndefinedOrNull())
    6260                    return true;
    63                 if (JSImmediate::isImmediate(v2))
     61                if (!v2.isCell())
    6462                    return false;
    6563                return v2.asCell()->structure()->typeInfo().masqueradesAsUndefined();
     
    6765
    6866            if (v2.isUndefinedOrNull()) {
    69                 if (JSImmediate::isImmediate(v1))
     67                if (!v1.isCell())
    7068                    return false;
    7169                return v1.asCell()->structure()->typeInfo().masqueradesAsUndefined();
     
    7977                    return false;
    8078                v1 = p1;
    81                 if (JSImmediate::areBothImmediateIntegerNumbers(v1, v2))
     79                if (v1.isInt32() && v2.isInt32())
    8280                    return v1 == v2;
    8381                continue;
     
    8987                    return false;
    9088                v2 = p2;
    91                 if (JSImmediate::areBothImmediateIntegerNumbers(v1, v2))
     89                if (v1.isInt32() && v2.isInt32())
    9290                    return v1 == v2;
    9391                continue;
     
    115113    ALWAYS_INLINE bool JSValue::strictEqualSlowCaseInline(JSValue v1, JSValue v2)
    116114    {
    117         ASSERT(!JSImmediate::isEitherImmediate(v1, v2));
     115        ASSERT(v1.isCell() && v2.isCell());
    118116
    119117        if (v1.asCell()->isString() && v2.asCell()->isString())
     
    125123    inline bool JSValue::strictEqual(JSValue v1, JSValue v2)
    126124    {
    127         if (JSImmediate::areBothImmediateIntegerNumbers(v1, v2))
     125        if (v1.isInt32() && v2.isInt32())
    128126            return v1 == v2;
    129127
     
    131129            return v1.uncheckedGetNumber() == v2.uncheckedGetNumber();
    132130
    133         if (JSImmediate::isEitherImmediate(v1, v2))
     131        if (!v1.isCell() || !v2.isCell())
    134132            return v1 == v2;
    135133
     
    139137    inline bool jsLess(CallFrame* callFrame, JSValue v1, JSValue v2)
    140138    {
    141         if (JSValue::areBothInt32Fast(v1, v2))
    142             return v1.getInt32Fast() < v2.getInt32Fast();
     139        if (v1.isInt32() && v2.isInt32())
     140            return v1.asInt32() < v2.asInt32();
    143141
    144142        double n1;
     
    164162    inline bool jsLessEq(CallFrame* callFrame, JSValue v1, JSValue v2)
    165163    {
    166         if (JSValue::areBothInt32Fast(v1, v2))
    167             return v1.getInt32Fast() <= v2.getInt32Fast();
     164        if (v1.isInt32() && v2.isInt32())
     165            return v1.asInt32() <= v2.asInt32();
    168166
    169167        double n1;
     
    214212
    215213        if (rightIsNumber & leftIsString) {
    216             RefPtr<UString::Rep> value = v2.isInt32Fast() ?
    217                 concatenate(asString(v1)->value().rep(), v2.getInt32Fast()) :
     214            RefPtr<UString::Rep> value = v2.isInt32() ?
     215                concatenate(asString(v1)->value().rep(), v2.asInt32()) :
    218216                concatenate(asString(v1)->value().rep(), right);
    219217
     
    316314            if (LIKELY(v.isString()))
    317315                result.append(asString(v)->value());
    318             else if (v.isInt32Fast())
    319                 result.appendNumeric(v.getInt32Fast());
     316            else if (v.isInt32())
     317                result.appendNumeric(v.asInt32());
    320318            else {
    321319                double d;
  • trunk/JavaScriptCore/runtime/PropertySlot.h

    r44757 r46598  
    2424#include "Identifier.h"
    2525#include "JSValue.h"
    26 #include "JSImmediate.h"
    2726#include "Register.h"
    2827#include <wtf/Assertions.h>
     
    4039    public:
    4140        PropertySlot()
    42             : m_offset(WTF::notFound)
    43         {
    44             clearBase();
     41        {
     42            clearBase();
     43            clearOffset();
    4544            clearValue();
    4645        }
     
    4847        explicit PropertySlot(const JSValue base)
    4948            : m_slotBase(base)
    50             , m_offset(WTF::notFound)
    51         {
     49        {
     50            clearOffset();
    5251            clearValue();
    5352        }
     
    8382        {
    8483            ASSERT(valueSlot);
    85             m_getValue = JSC_VALUE_SLOT_MARKER;
    86             clearBase();
     84            clearBase();
     85            clearOffset();
     86            m_getValue = JSC_VALUE_SLOT_MARKER;
    8787            m_data.valueSlot = valueSlot;
    8888        }
     
    108108        {
    109109            ASSERT(value);
    110             m_getValue = JSC_VALUE_SLOT_MARKER;
    111             clearBase();
     110            clearBase();
     111            clearOffset();
     112            m_getValue = JSC_VALUE_SLOT_MARKER;
    112113            m_value = value;
    113114            m_data.valueSlot = &m_value;
     
    117118        {
    118119            ASSERT(registerSlot);
     120            clearBase();
     121            clearOffset();
    119122            m_getValue = JSC_REGISTER_SLOT_MARKER;
    120             clearBase();
    121123            m_data.registerSlot = registerSlot;
    122124        }
     
    148150        void setUndefined()
    149151        {
    150             clearBase();
    151152            setValue(jsUndefined());
    152153        }
     
    154155        JSValue slotBase() const
    155156        {
    156             ASSERT(m_slotBase);
    157157            return m_slotBase;
    158158        }
     
    177177            m_value = JSValue();
    178178#endif
     179        }
     180
     181        void clearOffset()
     182        {
     183            // Clear offset even in release builds, in case this PropertySlot has been used before.
     184            // (For other data members, we don't need to clear anything because reuse would meaningfully overwrite them.)
     185            m_offset = WTF::notFound;
    179186        }
    180187
  • trunk/JavaScriptCore/runtime/StringPrototype.cpp

    r46180 r46598  
    377377    unsigned len = s.size();
    378378    JSValue a0 = args.at(0);
    379     if (a0.isUInt32Fast()) {
    380         uint32_t i = a0.getUInt32Fast();
     379    if (a0.isUInt32()) {
     380        uint32_t i = a0.asUInt32();
    381381        if (i < len)
    382382            return jsSingleCharacterSubstring(exec, s, i);
     
    394394    unsigned len = s.size();
    395395    JSValue a0 = args.at(0);
    396     if (a0.isUInt32Fast()) {
    397         uint32_t i = a0.getUInt32Fast();
     396    if (a0.isUInt32()) {
     397        uint32_t i = a0.asUInt32();
    398398        if (i < len)
    399399            return jsNumber(exec, s.data()[i]);
     
    427427    if (a1.isUndefined())
    428428        pos = 0;
    429     else if (a1.isUInt32Fast())
    430         pos = min<uint32_t>(a1.getUInt32Fast(), len);
     429    else if (a1.isUInt32())
     430        pos = min<uint32_t>(a1.asUInt32(), len);
    431431    else {
    432432        double dpos = a1.toInteger(exec);
  • trunk/JavaScriptCore/wtf/MainThread.cpp

    r40969 r46598  
    3030#include "MainThread.h"
    3131
     32#include "StdLibExtras.h"
    3233#include "CurrentTime.h"
    3334#include "Deque.h"
    34 #include "StdLibExtras.h"
    3535#include "Threading.h"
    3636
  • trunk/JavaScriptCore/wtf/Platform.h

    r46472 r46598  
    558558#endif
    559559
    560 #if !defined(WTF_USE_ALTERNATE_JSIMMEDIATE) && PLATFORM(X86_64) && PLATFORM(MAC)
    561 #define WTF_USE_ALTERNATE_JSIMMEDIATE 1
    562 #endif
     560#if !defined(WTF_USE_JSVALUE64) && !defined(WTF_USE_JSVALUE32) && !defined(WTF_USE_JSVALUE32_64)
     561#if PLATFORM(X86_64) && PLATFORM(MAC)
     562#define WTF_USE_JSVALUE64 1
     563#else
     564#define WTF_USE_JSVALUE32 1
     565#endif
     566#endif // !defined(WTF_USE_JSVALUE64) && !defined(WTF_USE_JSVALUE32) && !defined(WTF_USE_JSVALUE32_64)
    563567
    564568#if !defined(ENABLE_REPAINT_THROTTLING)
     
    609613#define ENABLE_JIT_OPTIMIZE_PROPERTY_ACCESS 1
    610614#endif
    611 #ifndef ENABLE_JIT_OPTIMIZE_ARITHMETIC
    612 #define ENABLE_JIT_OPTIMIZE_ARITHMETIC 1
    613 #endif
    614615#ifndef ENABLE_JIT_OPTIMIZE_METHOD_CALLS
    615616#define ENABLE_JIT_OPTIMIZE_METHOD_CALLS 1
  • trunk/JavaScriptCore/wtf/StdLibExtras.h

    r43121 r46598  
    4242#endif
    4343
     44// OBJECT_OFFSETOF: Like the C++ offsetof macro, but you can use it with classes.
     45// The magic number 0x4000 is insignificant. We use it to avoid using NULL, since
     46// NULL can cause compiler problems, especially in cases of multiple inheritance.
     47#define OBJECT_OFFSETOF(class, field) (reinterpret_cast<ptrdiff_t>(&(reinterpret_cast<class*>(0x4000)->field)) - 0x4000)
     48
    4449namespace WTF {
    4550
  • trunk/WebCore/ChangeLog

    r46596 r46598  
     1=== End merge of nitro-extreme branch 2009-07-30 ===
     2
     32009-05-11  Geoffrey Garen  <[email protected]>
     4
     5        Reviewed by Sam Weinig.
     6
     7        Make WebCore compile with the new JS number representation.
     8
     9        * ForwardingHeaders/runtime/JSAPIValueWrapper.h: Added.
     10        * ForwardingHeaders/runtime/JSNumberCell.h: Removed.
     11        * bindings/js/ScriptEventListener.cpp:
     12        * bindings/scripts/CodeGeneratorJS.pm:
     13        * bridge/c/c_instance.cpp:
     14
     15=== Start merge of nitro-extreme branch 2009-07-30 ===
     16
    1172009-07-30  Dean McNamee  <[email protected]>
    218
Note: See TracChangeset for help on using the changeset viewer.