Ignore:
Timestamp:
Dec 12, 2016, 1:46:45 PM (9 years ago)
Author:
[email protected]
Message:

REGRESSION(r209653): speedometer crashes making virtual slow path tailcalls
https://bugs.webkit.org/show_bug.cgi?id=165748

Reviewed by Filip Pizlo.

JSTests:

New regression test.

  • stress/regress-165748.js: Added.

(sum1):
(sum2):
(sum3):
(sum4):
(sum5):
(sum6):
(tailCaller):
(test):

Source/JavaScriptCore:

The virtual slow path for tailcalls always passes arguments on the stack.
The fix here is to link to the stack argument entrypoint instead of a register
argument entrypoint.

While fixing this bug, I found that we weren't clearing the code origin when
shuffling the call frame for a register argument tailcall.

Also rolling back in r209653, r209654, r209663, and r209673.

  • jit/CallFrameShuffler.cpp:

(JSC::CallFrameShuffler::prepareAny):

  • jit/ThunkGenerators.cpp:

(JSC::virtualThunkFor):

Source/WTF:

Rolling back in r209653, r209654, r209663, and r209673.

  • wtf/Platform.h:
File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/Source/JavaScriptCore/jit/JITThunks.cpp

    r209678 r209725  
    4545}
    4646
    47 MacroAssemblerCodePtr JITThunks::ctiNativeCall(VM* vm)
     47JITEntryPointsWithRef JITThunks::jitEntryNativeCall(VM* vm)
    4848{
    49     if (!vm->canUseJIT())
    50         return MacroAssemblerCodePtr::createLLIntCodePtr(llint_native_call_trampoline);
    51     return ctiStub(vm, nativeCallGenerator).code();
     49    if (!vm->canUseJIT()) {
     50        MacroAssemblerCodePtr nativeCallStub = MacroAssemblerCodePtr::createLLIntCodePtr(llint_native_call_trampoline);
     51        return JITEntryPointsWithRef(MacroAssemblerCodeRef::createSelfManagedCodeRef(nativeCallStub), nativeCallStub, nativeCallStub);
     52    }
     53    return jitEntryStub(vm, nativeCallGenerator);
    5254}
    5355
    54 MacroAssemblerCodePtr JITThunks::ctiNativeConstruct(VM* vm)
     56JITEntryPointsWithRef JITThunks::jitEntryNativeConstruct(VM* vm)
    5557{
    56     if (!vm->canUseJIT())
    57         return MacroAssemblerCodePtr::createLLIntCodePtr(llint_native_construct_trampoline);
    58     return ctiStub(vm, nativeConstructGenerator).code();
     58    if (!vm->canUseJIT()) {
     59        MacroAssemblerCodePtr nativeConstructStub = MacroAssemblerCodePtr::createLLIntCodePtr(llint_native_construct_trampoline);
     60        return JITEntryPointsWithRef(MacroAssemblerCodeRef::createSelfManagedCodeRef(nativeConstructStub), nativeConstructStub, nativeConstructStub);
     61    }
     62    return jitEntryStub(vm, nativeConstructGenerator);
    5963}
    6064
     
    8387}
    8488
     89JITEntryPointsWithRef JITThunks::jitEntryStub(VM* vm, JITEntryGenerator generator)
     90{
     91    LockHolder locker(m_lock);
     92    JITEntryStubMap::AddResult entry = m_jitEntryStubMap.add(generator, JITEntryPointsWithRef());
     93    if (entry.isNewEntry) {
     94        // Compilation thread can only retrieve existing entries.
     95        ASSERT(!isCompilationThread());
     96        entry.iterator->value = generator(vm);
     97    }
     98    return entry.iterator->value;
     99}
     100
     101JITJSCallThunkEntryPointsWithRef JITThunks::jitCallThunkEntryStub(VM* vm, JITCallThunkEntryGenerator generator)
     102{
     103    LockHolder locker(m_lock);
     104    JITCallThunkEntryStubMap::AddResult entry = m_jitCallThunkEntryStubMap.add(generator, JITJSCallThunkEntryPointsWithRef());
     105    if (entry.isNewEntry) {
     106        // Compilation thread can only retrieve existing entries.
     107        ASSERT(!isCompilationThread());
     108        entry.iterator->value = generator(vm);
     109    }
     110    return entry.iterator->value;
     111}
     112
    85113void JITThunks::finalize(Handle<Unknown> handle, void*)
    86114{
     
    94122}
    95123
    96 NativeExecutable* JITThunks::hostFunctionStub(VM* vm, NativeFunction function, NativeFunction constructor, ThunkGenerator generator, Intrinsic intrinsic, const DOMJIT::Signature* signature, const String& name)
     124NativeExecutable* JITThunks::hostFunctionStub(VM* vm, NativeFunction function, NativeFunction constructor, JITEntryGenerator generator, Intrinsic intrinsic, const DOMJIT::Signature* signature, const String& name)
    97125{
    98126    ASSERT(!isCompilationThread());   
     
    104132    RefPtr<JITCode> forCall;
    105133    if (generator) {
    106         MacroAssemblerCodeRef entry = generator(vm);
    107         forCall = adoptRef(new DirectJITCode(entry, entry.code(), JITCode::HostCallThunk));
     134        JITEntryPointsWithRef entry = generator(vm);
     135        forCall = adoptRef(new DirectJITCode(entry, JITCode::HostCallThunk));
    108136    } else
    109         forCall = adoptRef(new NativeJITCode(JIT::compileCTINativeCall(vm, function), JITCode::HostCallThunk));
     137        forCall = adoptRef(new DirectJITCode(JIT::compileNativeCallEntryPoints(vm, function), JITCode::HostCallThunk));
    110138   
    111     RefPtr<JITCode> forConstruct = adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createSelfManagedCodeRef(ctiNativeConstruct(vm)), JITCode::HostCallThunk));
     139    RefPtr<JITCode> forConstruct = adoptRef(new DirectJITCode(jitEntryNativeConstruct(vm), JITCode::HostCallThunk));
    112140   
    113141    NativeExecutable* nativeExecutable = NativeExecutable::create(*vm, forCall, function, forConstruct, constructor, intrinsic, signature, name);
     
    116144}
    117145
    118 NativeExecutable* JITThunks::hostFunctionStub(VM* vm, NativeFunction function, ThunkGenerator generator, Intrinsic intrinsic, const String& name)
     146NativeExecutable* JITThunks::hostFunctionStub(VM* vm, NativeFunction function, JITEntryGenerator generator, Intrinsic intrinsic, const String& name)
    119147{
    120148    return hostFunctionStub(vm, function, callHostFunctionAsConstructor, generator, intrinsic, nullptr, name);
Note: See TracChangeset for help on using the changeset viewer.