Title: [192856] trunk/Source/_javascript_Core
Revision
192856
Author
[email protected]
Date
2015-11-30 20:43:28 -0800 (Mon, 30 Nov 2015)

Log Message

FTL lazy slow paths should work with B3
https://bugs.webkit.org/show_bug.cgi?id=151667

Reviewed by Geoffrey Garen.

This adds all of the glue necessary to make FTL::LazySlowPath work with B3. The B3 approach
allows us to put all of the code in FTL::LowerDFGToLLVM, instead of having supporting data
structures on the side and a bunch of complex code in FTLCompile.cpp.

* b3/B3CheckSpecial.cpp:
(JSC::B3::CheckSpecial::generate):
* b3/B3LowerToAir.cpp:
(JSC::B3::Air::LowerToAir::run):
* b3/B3PatchpointSpecial.cpp:
(JSC::B3::PatchpointSpecial::generate):
* b3/B3StackmapValue.h:
* ftl/FTLJSTailCall.cpp:
(JSC::FTL::DFG::recoveryFor):
(JSC::FTL::JSTailCall::emit):
* ftl/FTLLazySlowPath.cpp:
(JSC::FTL::LazySlowPath::LazySlowPath):
(JSC::FTL::LazySlowPath::generate):
* ftl/FTLLazySlowPath.h:
(JSC::FTL::LazySlowPath::createGenerator):
(JSC::FTL::LazySlowPath::patchableJump):
(JSC::FTL::LazySlowPath::done):
(JSC::FTL::LazySlowPath::patchpoint):
(JSC::FTL::LazySlowPath::usedRegisters):
(JSC::FTL::LazySlowPath::callSiteIndex):
(JSC::FTL::LazySlowPath::stub):
* ftl/FTLLocation.cpp:
(JSC::FTL::Location::forValueRep):
(JSC::FTL::Location::forStackmaps):
(JSC::FTL::Location::dump):
(JSC::FTL::Location::isGPR):
(JSC::FTL::Location::gpr):
(JSC::FTL::Location::isFPR):
(JSC::FTL::Location::fpr):
(JSC::FTL::Location::restoreInto):
* ftl/FTLLocation.h:
(JSC::FTL::Location::Location):
(JSC::FTL::Location::forRegister):
(JSC::FTL::Location::forIndirect):
(JSC::FTL::Location::forConstant):
(JSC::FTL::Location::kind):
(JSC::FTL::Location::hasReg):
(JSC::FTL::Location::reg):
(JSC::FTL::Location::hasOffset):
(JSC::FTL::Location::offset):
(JSC::FTL::Location::hash):
(JSC::FTL::Location::hasDwarfRegNum): Deleted.
(JSC::FTL::Location::dwarfRegNum): Deleted.
(JSC::FTL::Location::hasDwarfReg): Deleted.
(JSC::FTL::Location::dwarfReg): Deleted.
* ftl/FTLLowerDFGToLLVM.cpp:
(JSC::FTL::DFG::LowerDFGToLLVM::LowerDFGToLLVM):
(JSC::FTL::DFG::LowerDFGToLLVM::lazySlowPath):
* jit/RegisterSet.cpp:
(JSC::RegisterSet::stubUnavailableRegisters):
(JSC::RegisterSet::macroScratchRegisters):
(JSC::RegisterSet::calleeSaveRegisters):
* jit/RegisterSet.h:

Modified Paths

Diff

Modified: trunk/Source/_javascript_Core/ChangeLog (192855 => 192856)


--- trunk/Source/_javascript_Core/ChangeLog	2015-12-01 03:39:59 UTC (rev 192855)
+++ trunk/Source/_javascript_Core/ChangeLog	2015-12-01 04:43:28 UTC (rev 192856)
@@ -1,3 +1,68 @@
+2015-11-30  Filip Pizlo  <[email protected]>
+
+        FTL lazy slow paths should work with B3
+        https://bugs.webkit.org/show_bug.cgi?id=151667
+
+        Reviewed by Geoffrey Garen.
+
+        This adds all of the glue necessary to make FTL::LazySlowPath work with B3. The B3 approach
+        allows us to put all of the code in FTL::LowerDFGToLLVM, instead of having supporting data
+        structures on the side and a bunch of complex code in FTLCompile.cpp.
+
+        * b3/B3CheckSpecial.cpp:
+        (JSC::B3::CheckSpecial::generate):
+        * b3/B3LowerToAir.cpp:
+        (JSC::B3::Air::LowerToAir::run):
+        * b3/B3PatchpointSpecial.cpp:
+        (JSC::B3::PatchpointSpecial::generate):
+        * b3/B3StackmapValue.h:
+        * ftl/FTLJSTailCall.cpp:
+        (JSC::FTL::DFG::recoveryFor):
+        (JSC::FTL::JSTailCall::emit):
+        * ftl/FTLLazySlowPath.cpp:
+        (JSC::FTL::LazySlowPath::LazySlowPath):
+        (JSC::FTL::LazySlowPath::generate):
+        * ftl/FTLLazySlowPath.h:
+        (JSC::FTL::LazySlowPath::createGenerator):
+        (JSC::FTL::LazySlowPath::patchableJump):
+        (JSC::FTL::LazySlowPath::done):
+        (JSC::FTL::LazySlowPath::patchpoint):
+        (JSC::FTL::LazySlowPath::usedRegisters):
+        (JSC::FTL::LazySlowPath::callSiteIndex):
+        (JSC::FTL::LazySlowPath::stub):
+        * ftl/FTLLocation.cpp:
+        (JSC::FTL::Location::forValueRep):
+        (JSC::FTL::Location::forStackmaps):
+        (JSC::FTL::Location::dump):
+        (JSC::FTL::Location::isGPR):
+        (JSC::FTL::Location::gpr):
+        (JSC::FTL::Location::isFPR):
+        (JSC::FTL::Location::fpr):
+        (JSC::FTL::Location::restoreInto):
+        * ftl/FTLLocation.h:
+        (JSC::FTL::Location::Location):
+        (JSC::FTL::Location::forRegister):
+        (JSC::FTL::Location::forIndirect):
+        (JSC::FTL::Location::forConstant):
+        (JSC::FTL::Location::kind):
+        (JSC::FTL::Location::hasReg):
+        (JSC::FTL::Location::reg):
+        (JSC::FTL::Location::hasOffset):
+        (JSC::FTL::Location::offset):
+        (JSC::FTL::Location::hash):
+        (JSC::FTL::Location::hasDwarfRegNum): Deleted.
+        (JSC::FTL::Location::dwarfRegNum): Deleted.
+        (JSC::FTL::Location::hasDwarfReg): Deleted.
+        (JSC::FTL::Location::dwarfReg): Deleted.
+        * ftl/FTLLowerDFGToLLVM.cpp:
+        (JSC::FTL::DFG::LowerDFGToLLVM::LowerDFGToLLVM):
+        (JSC::FTL::DFG::LowerDFGToLLVM::lazySlowPath):
+        * jit/RegisterSet.cpp:
+        (JSC::RegisterSet::stubUnavailableRegisters):
+        (JSC::RegisterSet::macroScratchRegisters):
+        (JSC::RegisterSet::calleeSaveRegisters):
+        * jit/RegisterSet.h:
+
 2015-11-30  Geoffrey Garen  <[email protected]>
 
         Use a better RNG for Math.random()

Modified: trunk/Source/_javascript_Core/b3/B3CheckSpecial.cpp (192855 => 192856)


--- trunk/Source/_javascript_Core/b3/B3CheckSpecial.cpp	2015-12-01 03:39:59 UTC (rev 192855)
+++ trunk/Source/_javascript_Core/b3/B3CheckSpecial.cpp	2015-12-01 04:43:28 UTC (rev 192856)
@@ -212,6 +212,7 @@
                 params.value = value;
                 params.reps = reps;
                 params.usedRegisters = value->m_usedRegisters;
+                params.context = &context;
 
                 value->m_generator->run(jit, params);
             }));

Modified: trunk/Source/_javascript_Core/b3/B3LowerToAir.cpp (192855 => 192856)


--- trunk/Source/_javascript_Core/b3/B3LowerToAir.cpp	2015-12-01 03:39:59 UTC (rev 192855)
+++ trunk/Source/_javascript_Core/b3/B3LowerToAir.cpp	2015-12-01 04:43:28 UTC (rev 192856)
@@ -119,8 +119,10 @@
 
             // Make sure that the successors are set up correctly.
             ASSERT(block->successors().size() <= 2);
-            for (B3::BasicBlock* successor : block->successorBlocks())
-                m_blockToBlock[block]->successors().append(m_blockToBlock[successor]);
+            for (B3::FrequentedBlock successor : block->successors()) {
+                m_blockToBlock[block]->successors().append(
+                    Air::FrequentedBlock(m_blockToBlock[successor.block()], successor.frequency()));
+            }
         }
 
         Air::InsertionSet insertionSet(m_code);

Modified: trunk/Source/_javascript_Core/b3/B3PatchpointSpecial.cpp (192855 => 192856)


--- trunk/Source/_javascript_Core/b3/B3PatchpointSpecial.cpp	2015-12-01 03:39:59 UTC (rev 192855)
+++ trunk/Source/_javascript_Core/b3/B3PatchpointSpecial.cpp	2015-12-01 04:43:28 UTC (rev 192856)
@@ -99,6 +99,7 @@
     params.value = value;
     params.reps = reps;
     params.usedRegisters = value->m_usedRegisters;
+    params.context = &context;
 
     value->m_generator->run(jit, params);
 

Modified: trunk/Source/_javascript_Core/b3/B3StackmapValue.h (192855 => 192856)


--- trunk/Source/_javascript_Core/b3/B3StackmapValue.h	2015-12-01 03:39:59 UTC (rev 192855)
+++ trunk/Source/_javascript_Core/b3/B3StackmapValue.h	2015-12-01 04:43:28 UTC (rev 192856)
@@ -39,6 +39,10 @@
 
 class StackmapValue;
 
+namespace Air {
+struct GenerationContext;
+}
+
 struct StackmapGenerationParams {
     // This is the stackmap value that we're generating.
     StackmapValue* value;
@@ -49,6 +53,9 @@
     
     // This tells you the registers that were used.
     RegisterSet usedRegisters;
+
+    // The Air::GenerationContext gives you even more power.
+    Air::GenerationContext* context;
 };
 
 typedef void StackmapGeneratorFunction(CCallHelpers&, const StackmapGenerationParams&);

Modified: trunk/Source/_javascript_Core/ftl/FTLJSTailCall.cpp (192855 => 192856)


--- trunk/Source/_javascript_Core/ftl/FTLJSTailCall.cpp	2015-12-01 03:39:59 UTC (rev 192855)
+++ trunk/Source/_javascript_Core/ftl/FTLJSTailCall.cpp	2015-12-01 04:43:28 UTC (rev 192856)
@@ -71,11 +71,11 @@
         switch (location.kind()) {
         case Location::Register:
             // We handle the addend outside
-            return ValueRecovery::inRegister(location.dwarfReg().reg(), format);
+            return ValueRecovery::inRegister(location.reg(), format);
 
         case Location::Indirect:
             // Oh LLVM, you crazy...
-            RELEASE_ASSERT(location.dwarfReg().reg() == Reg(MacroAssembler::framePointerRegister));
+            RELEASE_ASSERT(location.reg() == Reg(MacroAssembler::framePointerRegister));
             RELEASE_ASSERT(!(location.offset() % sizeof(void*)));
             // DataFormatInt32 and DataFormatBoolean should be already be boxed.
             RELEASE_ASSERT(format != DataFormatInt32 && format != DataFormatBoolean);
@@ -263,7 +263,7 @@
         shuffleData.args[i] = recoveryFor(m_arguments[i], *record, jitCode.stackmaps);
         if (FTL::Location addend = getRegisterWithAddend(m_arguments[i], *record, jitCode.stackmaps)) {
             withAddend.add(
-                addend.dwarfReg().reg(),
+                addend.reg(),
                 Vector<std::pair<ValueRecovery*, int32_t>>()).iterator->value.append(
                     std::make_pair(&shuffleData.args[i], addend.addend()));
             numAddends++;

Modified: trunk/Source/_javascript_Core/ftl/FTLLazySlowPath.cpp (192855 => 192856)


--- trunk/Source/_javascript_Core/ftl/FTLLazySlowPath.cpp	2015-12-01 03:39:59 UTC (rev 192855)
+++ trunk/Source/_javascript_Core/ftl/FTLLazySlowPath.cpp	2015-12-01 04:43:28 UTC (rev 192856)
@@ -34,16 +34,31 @@
 namespace JSC { namespace FTL {
 
 LazySlowPath::LazySlowPath(
-    CodeLocationLabel patchpoint, CodeLocationLabel exceptionTarget,
-    const RegisterSet& usedRegisters, CallSiteIndex callSiteIndex, RefPtr<Generator> generator,
-    GPRReg newZeroReg, ScratchRegisterAllocator scratchRegisterAllocator)
+#if FTL_USES_B3
+    CodeLocationJump patchableJump, CodeLocationLabel done,
+#else // FTL_USES_B3
+    CodeLocationLabel patchpoint,
+#endif // FTL_USES_B3
+    CodeLocationLabel exceptionTarget,
+    const RegisterSet& usedRegisters, CallSiteIndex callSiteIndex, RefPtr<Generator> generator
+#if !FTL_USES_B3
+    , GPRReg newZeroReg, ScratchRegisterAllocator scratchRegisterAllocator
+#endif // !FTL_USES_B3
+    )
+#if FTL_USES_B3
+    : m_patchableJump(patchableJump)
+    , m_done(done)
+#else // FTL_USES_B3
     : m_patchpoint(patchpoint)
+#endif // FTL_USES_B3
     , m_exceptionTarget(exceptionTarget)
     , m_usedRegisters(usedRegisters)
     , m_callSiteIndex(callSiteIndex)
     , m_generator(generator)
+#if !FTL_USES_B3
     , m_newZeroValueRegister(newZeroReg)
     , m_scratchRegisterAllocator(scratchRegisterAllocator)
+#endif // !FTL_USES_B3
 {
 }
 
@@ -63,6 +78,7 @@
     params.exceptionJumps = m_exceptionTarget ? &exceptionJumps : nullptr;
     params.lazySlowPath = this;
 
+#if !FTL_USES_B3
     unsigned bytesSaved = m_scratchRegisterAllocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
     // This is needed because LLVM may create a stackmap location that is the register SP.
     // But on arm64, SP is also the same register number as ZR, so LLVM is telling us that it has
@@ -71,9 +87,11 @@
     // into a non-SP register.
     if (m_newZeroValueRegister != InvalidGPRReg)
         jit.move(CCallHelpers::TrustedImm32(0), m_newZeroValueRegister);
+#endif // !FTL_USES_B3
 
     m_generator->run(jit, params);
 
+#if !FTL_USES_B3
     CCallHelpers::Label doneLabel;
     CCallHelpers::Jump jumpToEndOfPatchpoint;
     if (bytesSaved) {
@@ -81,18 +99,27 @@
         m_scratchRegisterAllocator.restoreReusedRegistersByPopping(jit, bytesSaved, ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
         jumpToEndOfPatchpoint = jit.jump();
     }
+#endif // !FTL_USES_B3
 
     LinkBuffer linkBuffer(vm, jit, codeBlock, JITCompilationMustSucceed);
+#if FTL_USES_B3
+    linkBuffer.link(params.doneJumps, m_done);
+#else // FTL_USES_B3
     if (bytesSaved) {
         linkBuffer.link(params.doneJumps, linkBuffer.locationOf(doneLabel));
         linkBuffer.link(jumpToEndOfPatchpoint, m_patchpoint.labelAtOffset(MacroAssembler::maxJumpReplacementSize()));
     } else
         linkBuffer.link(params.doneJumps, m_patchpoint.labelAtOffset(MacroAssembler::maxJumpReplacementSize()));
+#endif // FTL_USES_B3
     if (m_exceptionTarget)
         linkBuffer.link(exceptionJumps, m_exceptionTarget);
     m_stub = FINALIZE_CODE_FOR(codeBlock, linkBuffer, ("Lazy slow path call stub"));
 
+#if FTL_USES_B3
+    MacroAssembler::repatchJump(m_patchableJump, CodeLocationLabel(m_stub.code()));
+#else // FTL_USES_B3
     MacroAssembler::replaceWithJump(m_patchpoint, CodeLocationLabel(m_stub.code()));
+#endif // FTL_USES_B3
 }
 
 } } // namespace JSC::FTL

Modified: trunk/Source/_javascript_Core/ftl/FTLLazySlowPath.h (192855 => 192856)


--- trunk/Source/_javascript_Core/ftl/FTLLazySlowPath.h	2015-12-01 03:39:59 UTC (rev 192855)
+++ trunk/Source/_javascript_Core/ftl/FTLLazySlowPath.h	2015-12-01 04:43:28 UTC (rev 192856)
@@ -66,12 +66,26 @@
     }
     
     LazySlowPath(
-        CodeLocationLabel patchpoint, CodeLocationLabel exceptionTarget, const RegisterSet& usedRegisters,
-        CallSiteIndex, RefPtr<Generator>, GPRReg newZeroReg, ScratchRegisterAllocator);
+#if FTL_USES_B3
+        CodeLocationJump patchableJump, CodeLocationLabel done,
+#else // FLT_USES_B3
+        CodeLocationLabel patchpoint,
+#endif // FTL_USES_B3
+        CodeLocationLabel exceptionTarget, const RegisterSet& usedRegisters,
+        CallSiteIndex, RefPtr<Generator>
+#if !FTL_USES_B3
+        , GPRReg newZeroReg, ScratchRegisterAllocator
+#endif // FTL_USES_B3
+        );
 
     ~LazySlowPath();
 
+#if FTL_USES_B3
+    CodeLocationJump patchableJump() const { return m_patchableJump; }
+    CodeLocationLabel done() const { return m_done; }
+#else // FTL_USES_B3
     CodeLocationLabel patchpoint() const { return m_patchpoint; }
+#endif // FTL_USES_B3
     const RegisterSet& usedRegisters() const { return m_usedRegisters; }
     CallSiteIndex callSiteIndex() const { return m_callSiteIndex; }
 
@@ -80,14 +94,21 @@
     MacroAssemblerCodeRef stub() const { return m_stub; }
 
 private:
+#if FTL_USES_B3
+    CodeLocationJump m_patchableJump;
+    CodeLocationLabel m_done;
+#else // FTL_USES_B3
     CodeLocationLabel m_patchpoint;
+#endif // FTL_USES_B3
     CodeLocationLabel m_exceptionTarget;
     RegisterSet m_usedRegisters;
     CallSiteIndex m_callSiteIndex;
     MacroAssemblerCodeRef m_stub;
     RefPtr<Generator> m_generator;
+#if !FTL_USES_B3
     GPRReg m_newZeroValueRegister;
     ScratchRegisterAllocator m_scratchRegisterAllocator;
+#endif // FTL_USES_B3
 };
 
 } } // namespace JSC::FTL

Modified: trunk/Source/_javascript_Core/ftl/FTLLocation.cpp (192855 => 192856)


--- trunk/Source/_javascript_Core/ftl/FTLLocation.cpp	2015-12-01 03:39:59 UTC (rev 192855)
+++ trunk/Source/_javascript_Core/ftl/FTLLocation.cpp	2015-12-01 04:43:28 UTC (rev 192856)
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013, 2014 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -28,6 +28,7 @@
 
 #if ENABLE(FTL_JIT)
 
+#include "B3ValueRep.h"
 #include "FTLSaveRestore.h"
 #include "RegisterSet.h"
 #include <wtf/CommaPrinter.h>
@@ -36,6 +37,25 @@
 
 namespace JSC { namespace FTL {
 
+using namespace B3;
+
+#if FTL_USES_B3
+Location Location::forValueRep(const ValueRep& rep)
+{
+    switch (rep.kind()) {
+    case ValueRep::Register:
+        return forRegister(rep.reg(), 0);
+    case ValueRep::Stack:
+        return forIndirect(GPRInfo::callFrameRegister, rep.offsetFromFP());
+    case ValueRep::Constant:
+        return forConstant(rep.value());
+    default:
+        RELEASE_ASSERT_NOT_REACHED();
+        return Location();
+    }
+}
+#endif // FTL_USES_B3
+
 Location Location::forStackmaps(const StackMaps* stackmaps, const StackMaps::Location& location)
 {
     switch (location.kind) {
@@ -66,8 +86,8 @@
 void Location::dump(PrintStream& out) const
 {
     out.print("(", kind());
-    if (hasDwarfReg())
-        out.print(", ", dwarfReg());
+    if (hasReg())
+        out.print(", ", reg());
     if (hasOffset())
         out.print(", ", offset());
     if (hasAddend())
@@ -84,22 +104,22 @@
 
 bool Location::isGPR() const
 {
-    return kind() == Register && dwarfReg().reg().isGPR();
+    return kind() == Register && reg().isGPR();
 }
 
 GPRReg Location::gpr() const
 {
-    return dwarfReg().reg().gpr();
+    return reg().gpr();
 }
 
 bool Location::isFPR() const
 {
-    return kind() == Register && dwarfReg().reg().isFPR();
+    return kind() == Register && reg().isFPR();
 }
 
 FPRReg Location::fpr() const
 {
-    return dwarfReg().reg().fpr();
+    return reg().fpr();
 }
 
 void Location::restoreInto(MacroAssembler& jit, char* savedRegisters, GPRReg result, unsigned numFramesToPop) const

Modified: trunk/Source/_javascript_Core/ftl/FTLLocation.h (192855 => 192856)


--- trunk/Source/_javascript_Core/ftl/FTLLocation.h	2015-12-01 03:39:59 UTC (rev 192855)
+++ trunk/Source/_javascript_Core/ftl/FTLLocation.h	2015-12-01 04:43:28 UTC (rev 192856)
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013, 2014 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -28,14 +28,22 @@
 
 #if ENABLE(FTL_JIT)
 
+#include "DFGCommon.h"
 #include "FPRInfo.h"
 #include "FTLDWARFRegister.h"
 #include "FTLStackMaps.h"
 #include "GPRInfo.h"
+#include "Reg.h"
 #include <wtf/HashMap.h>
 
-namespace JSC { namespace FTL {
+namespace JSC {
 
+namespace B3 {
+class ValueRep;
+} // namespace B3
+
+namespace FTL {
+
 class Location {
 public:
     enum Kind {
@@ -56,25 +64,35 @@
     {
         u.constant = 1;
     }
-    
-    static Location forRegister(DWARFRegister dwarfReg, int32_t addend)
+
+    static Location forRegister(Reg reg, int32_t addend)
     {
         Location result;
         result.m_kind = Register;
-        result.u.variable.dwarfRegNum = dwarfReg.dwarfRegNum();
+        result.u.variable.regIndex = reg.index();
         result.u.variable.offset = addend;
         return result;
     }
     
-    static Location forIndirect(DWARFRegister dwarfReg, int32_t offset)
+    static Location forRegister(DWARFRegister dwarfReg, int32_t addend)
     {
+        return forRegister(dwarfReg.reg(), addend);
+    }
+    
+    static Location forIndirect(Reg reg, int32_t offset)
+    {
         Location result;
         result.m_kind = Indirect;
-        result.u.variable.dwarfRegNum = dwarfReg.dwarfRegNum();
+        result.u.variable.regIndex = reg.index();
         result.u.variable.offset = offset;
         return result;
     }
     
+    static Location forIndirect(DWARFRegister dwarfReg, int32_t offset)
+    {
+        return forIndirect(dwarfReg.reg(), offset);
+    }
+    
     static Location forConstant(int64_t constant)
     {
         Location result;
@@ -83,22 +101,23 @@
         return result;
     }
 
+#if FTL_USES_B3
+    static Location forValueRep(const B3::ValueRep&);
+#endif // FTL_USES_B3
+
     // You can pass a null StackMaps if you are confident that the location doesn't
     // involve a wide constant.
     static Location forStackmaps(const StackMaps*, const StackMaps::Location&);
     
     Kind kind() const { return m_kind; }
-    
-    bool hasDwarfRegNum() const { return kind() == Register || kind() == Indirect; }
-    int16_t dwarfRegNum() const
+
+    bool hasReg() const { return kind() == Register || kind() == Indirect; }
+    Reg reg() const
     {
-        ASSERT(hasDwarfRegNum());
-        return u.variable.dwarfRegNum;
+        ASSERT(hasReg());
+        return Reg::fromIndex(u.variable.regIndex);
     }
     
-    bool hasDwarfReg() const { return hasDwarfRegNum(); }
-    DWARFRegister dwarfReg() const { return DWARFRegister(dwarfRegNum()); }
-    
     bool hasOffset() const { return kind() == Indirect; }
     int32_t offset() const
     {
@@ -142,11 +161,11 @@
             break;
 
         case Register:
-            result ^= u.variable.dwarfRegNum;
+            result ^= u.variable.regIndex;
             break;
             
         case Indirect:
-            result ^= u.variable.dwarfRegNum;
+            result ^= u.variable.regIndex;
             result ^= u.variable.offset;
             break;
             
@@ -182,7 +201,7 @@
     union {
         int64_t constant;
         struct {
-            int16_t dwarfRegNum;
+            unsigned regIndex;
             int32_t offset;
         } variable;
     } u;

Modified: trunk/Source/_javascript_Core/ftl/FTLLowerDFGToLLVM.cpp (192855 => 192856)


--- trunk/Source/_javascript_Core/ftl/FTLLowerDFGToLLVM.cpp	2015-12-01 03:39:59 UTC (rev 192855)
+++ trunk/Source/_javascript_Core/ftl/FTLLowerDFGToLLVM.cpp	2015-12-01 04:43:28 UTC (rev 192856)
@@ -28,6 +28,8 @@
 
 #if ENABLE(FTL_JIT)
 
+#include "AirGenerationContext.h"
+#include "AllowMacroScratchRegisterUsage.h"
 #include "CodeBlockWithJITType.h"
 #include "DFGAbstractInterpreterInlines.h"
 #include "DFGDominators.h"
@@ -100,6 +102,7 @@
     } while (false)
 
 class LowerDFGToLLVM {
+    WTF_MAKE_NONCOPYABLE(LowerDFGToLLVM);
 public:
     LowerDFGToLLVM(State& state)
         : m_graph(state.graph)
@@ -7835,14 +7838,78 @@
     LValue lazySlowPath(const Functor& functor, const Vector<LValue>& userArguments)
     {
 #if FTL_USES_B3
-        UNUSED_PARAM(functor);
-
+        CodeOrigin origin = m_node->origin.semantic;
+        
         B3::PatchpointValue* result = m_out.patchpoint(B3::Int64);
         for (LValue arg : userArguments)
-            result->append(ConstrainedValue(arg, ValueRep::SomeRegister));
+            result->append(ConstrainedValue(arg, B3::ValueRep::SomeRegister));
+
+        // FIXME: As part of handling exceptions, we need to append OSR exit state here.
+        
+        result->clobber(RegisterSet::macroScratchRegisters());
+        State* state = &m_ftlState;
+
         result->setGenerator(
-            [&] (CCallHelpers& jit, const B3::StackmapGenerationParams&) {
-                jit.oops();
+            [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
+                Vector<Location> locations;
+                for (const B3::ValueRep& rep : params.reps)
+                    locations.append(Location::forValueRep(rep));
+
+                RefPtr<LazySlowPath::Generator> generator = functor(locations);
+                
+                CCallHelpers::PatchableJump patchableJump = jit.patchableJump();
+                CCallHelpers::Label done = jit.label();
+
+                RegisterSet usedRegisters = params.usedRegisters;
+
+                // FIXME: As part of handling exceptions, we need to create a concrete OSRExit here.
+                // Doing so should automagically register late paths that emit exit thunks.
+                
+                params.context->latePaths.append(
+                    createSharedTask<Air::GenerationContext::LatePathFunction>(
+                        [=] (CCallHelpers& jit, Air::GenerationContext&) {
+                            AllowMacroScratchRegisterUsage allowScratch(jit);
+                            patchableJump.m_jump.link(&jit);
+                            unsigned index = state->jitCode->lazySlowPaths.size();
+                            state->jitCode->lazySlowPaths.append(nullptr);
+                            jit.pushToSaveImmediateWithoutTouchingRegisters(
+                                CCallHelpers::TrustedImm32(index));
+                            CCallHelpers::Jump generatorJump = jit.jump();
+
+                            // Note that so long as we're here, we don't really know if our late path
+                            // runs before or after any other late paths that we might depend on, like
+                            // the exception thunk.
+
+                            RefPtr<JITCode> jitCode = state->jitCode;
+                            VM* vm = &state->graph.m_vm;
+
+                            jit.addLinkTask(
+                                [=] (LinkBuffer& linkBuffer) {
+                                    linkBuffer.link(
+                                        generatorJump, CodeLocationLabel(
+                                            vm->getCTIStub(
+                                                lazySlowPathGenerationThunkGenerator).code()));
+                                    
+                                    CodeLocationJump linkedPatchableJump = CodeLocationJump(
+                                        linkBuffer.locationOf(patchableJump));
+                                    CodeLocationLabel linkedDone = linkBuffer.locationOf(done);
+
+                                    // FIXME: Need a story for exceptions in FTL-B3. That basically means
+                                    // doing a lookup of the exception entrypoint here. We will have an
+                                    // OSR exit data structure of some sort.
+                                    // https://bugs.webkit.org/show_bug.cgi?id=151686
+                                    CodeLocationLabel exceptionTarget;
+                                    CallSiteIndex callSiteIndex =
+                                        jitCode->common.addUniqueCallSiteIndex(origin);
+                                    
+                                    std::unique_ptr<LazySlowPath> lazySlowPath =
+                                        std::make_unique<LazySlowPath>(
+                                            linkedPatchableJump, linkedDone, exceptionTarget,
+                                            usedRegisters, callSiteIndex, generator);
+                                    
+                                    jitCode->lazySlowPaths[index] = WTF::move(lazySlowPath);
+                                });
+                        }));
             });
         return result;
 #else

Modified: trunk/Source/_javascript_Core/jit/RegisterSet.cpp (192855 => 192856)


--- trunk/Source/_javascript_Core/jit/RegisterSet.cpp	2015-12-01 03:39:59 UTC (rev 192855)
+++ trunk/Source/_javascript_Core/jit/RegisterSet.cpp	2015-12-01 04:43:28 UTC (rev 192856)
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013, 2014 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -80,6 +80,15 @@
     return RegisterSet(specialRegisters(), vmCalleeSaveRegisters());
 }
 
+RegisterSet RegisterSet::macroScratchRegisters()
+{
+#if CPU(X86_64)
+    return RegisterSet(MacroAssembler::s_scratchRegister);
+#else
+    return RegisterSet();
+#endif
+}
+
 RegisterSet RegisterSet::calleeSaveRegisters()
 {
     RegisterSet result;

Modified: trunk/Source/_javascript_Core/jit/RegisterSet.h (192855 => 192856)


--- trunk/Source/_javascript_Core/jit/RegisterSet.h	2015-12-01 03:39:59 UTC (rev 192855)
+++ trunk/Source/_javascript_Core/jit/RegisterSet.h	2015-12-01 04:43:28 UTC (rev 192856)
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013, 2014 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -59,6 +59,7 @@
 #endif
     static RegisterSet volatileRegistersForJSCall();
     static RegisterSet stubUnavailableRegisters(); // The union of callee saves and special registers.
+    static RegisterSet macroScratchRegisters();
     JS_EXPORT_PRIVATE static RegisterSet allGPRs();
     JS_EXPORT_PRIVATE static RegisterSet allFPRs();
     static RegisterSet allRegisters();
_______________________________________________
webkit-changes mailing list
[email protected]
https://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to