Title: [189049] branches/jsc-tailcall/Source/_javascript_Core

Diff

Modified: branches/jsc-tailcall/Source/_javascript_Core/_javascript_Core.xcodeproj/project.pbxproj (189048 => 189049)


--- branches/jsc-tailcall/Source/_javascript_Core/_javascript_Core.xcodeproj/project.pbxproj	2015-08-27 21:02:39 UTC (rev 189048)
+++ branches/jsc-tailcall/Source/_javascript_Core/_javascript_Core.xcodeproj/project.pbxproj	2015-08-27 21:13:37 UTC (rev 189049)
@@ -961,6 +961,8 @@
 		623A37EC1B87A7C000754209 /* RegisterMap.h in Headers */ = {isa = PBXBuildFile; fileRef = 623A37EB1B87A7BD00754209 /* RegisterMap.h */; };
 		627673231B680C1E00FD9F2E /* CallMode.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 627673211B680C1E00FD9F2E /* CallMode.cpp */; };
 		627673241B680C1E00FD9F2E /* CallMode.h in Headers */ = {isa = PBXBuildFile; fileRef = 627673221B680C1E00FD9F2E /* CallMode.h */; settings = {ATTRIBUTES = (Private, ); }; };
+		62774DAA1B8D4B190006F05A /* FTLJSTailCall.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 62774DA81B8D4B190006F05A /* FTLJSTailCall.cpp */; };
+		62774DAB1B8D4B190006F05A /* FTLJSTailCall.h in Headers */ = {isa = PBXBuildFile; fileRef = 62774DA91B8D4B190006F05A /* FTLJSTailCall.h */; };
 		62D2D38F1ADF103F000206C1 /* FunctionRareData.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 62D2D38D1ADF103F000206C1 /* FunctionRareData.cpp */; };
 		62D2D3901ADF103F000206C1 /* FunctionRareData.h in Headers */ = {isa = PBXBuildFile; fileRef = 62D2D38E1ADF103F000206C1 /* FunctionRareData.h */; settings = {ATTRIBUTES = (Private, ); }; };
 		62D755D41B84FB3D001801FA /* CallFrameShuffler64.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 62D755D31B84FB39001801FA /* CallFrameShuffler64.cpp */; };
@@ -2683,6 +2685,8 @@
 		623A37EB1B87A7BD00754209 /* RegisterMap.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RegisterMap.h; sourceTree = "<group>"; };
 		627673211B680C1E00FD9F2E /* CallMode.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = CallMode.cpp; sourceTree = "<group>"; };
 		627673221B680C1E00FD9F2E /* CallMode.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CallMode.h; sourceTree = "<group>"; };
+		62774DA81B8D4B190006F05A /* FTLJSTailCall.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = FTLJSTailCall.cpp; path = ftl/FTLJSTailCall.cpp; sourceTree = "<group>"; };
+		62774DA91B8D4B190006F05A /* FTLJSTailCall.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = FTLJSTailCall.h; path = ftl/FTLJSTailCall.h; sourceTree = "<group>"; };
 		62A9A29E1B0BED4800BD54CA /* DFGLazyNode.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = DFGLazyNode.cpp; path = dfg/DFGLazyNode.cpp; sourceTree = "<group>"; };
 		62A9A29F1B0BED4800BD54CA /* DFGLazyNode.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = DFGLazyNode.h; path = dfg/DFGLazyNode.h; sourceTree = "<group>"; };
 		62D2D38D1ADF103F000206C1 /* FunctionRareData.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = FunctionRareData.cpp; sourceTree = "<group>"; };
@@ -3767,6 +3771,8 @@
 				0FD1202E1A8AED12000F5280 /* FTLJSCallBase.h */,
 				0FD120311A8C85BD000F5280 /* FTLJSCallVarargs.cpp */,
 				0FD120321A8C85BD000F5280 /* FTLJSCallVarargs.h */,
+				62774DA81B8D4B190006F05A /* FTLJSTailCall.cpp */,
+				62774DA91B8D4B190006F05A /* FTLJSTailCall.h */,
 				0F8F2B93172E049E007DBDA5 /* FTLLink.cpp */,
 				0F8F2B94172E049E007DBDA5 /* FTLLink.h */,
 				0FCEFADD180738C000472CE4 /* FTLLocation.cpp */,
@@ -6187,6 +6193,7 @@
 				0F2B66F317B6B5AB00A7AE3F /* JSGenericTypedArrayViewConstructorInlines.h in Headers */,
 				0F2B66F417B6B5AB00A7AE3F /* JSGenericTypedArrayViewInlines.h in Headers */,
 				0F5A1274192D9FDF008764A3 /* DFGDoesGC.h in Headers */,
+				62774DAB1B8D4B190006F05A /* FTLJSTailCall.h in Headers */,
 				0F2B66F517B6B5AB00A7AE3F /* JSGenericTypedArrayViewPrototype.h in Headers */,
 				0F2B66F617B6B5AB00A7AE3F /* JSGenericTypedArrayViewPrototypeInlines.h in Headers */,
 				BC18C4210E16F5CD00B34460 /* JSGlobalObject.h in Headers */,
@@ -7496,6 +7503,7 @@
 				142D6F0813539A2800B02E86 /* MarkedBlock.cpp in Sources */,
 				14D2F3DA139F4BE200491031 /* MarkedSpace.cpp in Sources */,
 				142D6F1113539A4100B02E86 /* MarkStack.cpp in Sources */,
+				62774DAA1B8D4B190006F05A /* FTLJSTailCall.cpp in Sources */,
 				14469DDF107EC7E700650446 /* MathObject.cpp in Sources */,
 				90213E3D123A40C200D422F3 /* MemoryStatistics.cpp in Sources */,
 				0FB5467D14F5CFD6002C2989 /* MethodOfGettingAValueProfile.cpp in Sources */,

Modified: branches/jsc-tailcall/Source/_javascript_Core/bytecode/ValueRecovery.h (189048 => 189049)


--- branches/jsc-tailcall/Source/_javascript_Core/bytecode/ValueRecovery.h	2015-08-27 21:02:39 UTC (rev 189048)
+++ branches/jsc-tailcall/Source/_javascript_Core/bytecode/ValueRecovery.h	2015-08-27 21:13:37 UTC (rev 189049)
@@ -31,6 +31,7 @@
 #if ENABLE(JIT)
 #include "GPRInfo.h"
 #include "FPRInfo.h"
+#include "Reg.h"
 #endif
 #include "JSCJSValue.h"
 #include "MacroAssembler.h"
@@ -84,6 +85,17 @@
     bool isSet() const { return m_technique != DontKnow; }
     bool operator!() const { return !isSet(); }
     explicit operator bool() const { return isSet(); }
+
+#if ENABLE(JIT)
+    static ValueRecovery inRegister(Reg reg, DataFormat dataFormat)
+    {
+        if (reg.isGPR())
+            return inGPR(reg.gpr(), dataFormat);
+
+        ASSERT(reg.isFPR());
+        return inFPR(reg.fpr(), dataFormat);
+    }
+#endif
     
     static ValueRecovery inGPR(MacroAssembler::RegisterID gpr, DataFormat dataFormat)
     {

Modified: branches/jsc-tailcall/Source/_javascript_Core/dfg/DFGNode.h (189048 => 189049)


--- branches/jsc-tailcall/Source/_javascript_Core/dfg/DFGNode.h	2015-08-27 21:02:39 UTC (rev 189048)
+++ branches/jsc-tailcall/Source/_javascript_Core/dfg/DFGNode.h	2015-08-27 21:13:37 UTC (rev 189049)
@@ -1106,6 +1106,19 @@
         }
     }
 
+    bool isFunctionTerminal()
+    {
+        switch (op()) {
+        case Return:
+        case TailCall:
+        case TailCallVarargs:
+        case TailCallForwardVarargs:
+            return true;
+        default:
+            return false;
+        }
+    }
+
     unsigned targetBytecodeOffsetDuringParsing()
     {
         ASSERT(isJump());

Modified: branches/jsc-tailcall/Source/_javascript_Core/dfg/DFGTierUpCheckInjectionPhase.cpp (189048 => 189049)


--- branches/jsc-tailcall/Source/_javascript_Core/dfg/DFGTierUpCheckInjectionPhase.cpp	2015-08-27 21:02:39 UTC (rev 189048)
+++ branches/jsc-tailcall/Source/_javascript_Core/dfg/DFGTierUpCheckInjectionPhase.cpp	2015-08-27 21:13:37 UTC (rev 189049)
@@ -92,7 +92,7 @@
             }
             
             NodeAndIndex terminal = block->findTerminal();
-            if (terminal.node->op() == Return) {
+            if (terminal.node->isFunctionTerminal()) {
                 insertionSet.insertNode(
                     terminal.index, SpecNone, CheckTierUpAtReturn, terminal.node->origin);
             }

Modified: branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLCapabilities.cpp (189048 => 189049)


--- branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLCapabilities.cpp	2015-08-27 21:02:39 UTC (rev 189048)
+++ branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLCapabilities.cpp	2015-08-27 21:13:37 UTC (rev 189049)
@@ -125,10 +125,16 @@
     case NotifyWrite:
     case StoreBarrier:
     case Call:
+    case TailCall:
+    case TailCallInlinedCaller:
     case Construct:
     case CallVarargs:
+    case TailCallVarargs:
+    case TailCallVarargsInlinedCaller:
+    case ConstructVarargs:
     case CallForwardVarargs:
-    case ConstructVarargs:
+    case TailCallForwardVarargs:
+    case TailCallForwardVarargsInlinedCaller:
     case ConstructForwardVarargs:
     case LoadVarargs:
     case NativeCall:

Modified: branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLCompile.cpp (189048 => 189049)


--- branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLCompile.cpp	2015-08-27 21:02:39 UTC (rev 189048)
+++ branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLCompile.cpp	2015-08-27 21:13:37 UTC (rev 189049)
@@ -615,6 +615,22 @@
             call.link(vm, linkBuffer, state.finalizer->handleExceptionsLinkBuffer->entrypoint());
         });
     }
+
+    adjustCallICsForStackmaps(state.jsTailCalls, recordMap);
+
+    for (unsigned i = state.jsTailCalls.size(); i--;) {
+        JSTailCall& call = state.jsTailCalls[i];
+
+        CCallHelpers fastPathJIT(&vm, codeBlock);
+        call.emit(*state.jitCode.get(), fastPathJIT);
+
+        char* startOfIC = bitwise_cast<char*>(generatedFunction) + call.m_instructionOffset;
+        size_t sizeOfIC = call.estimatedSize();
+
+        generateInlineIfPossibleOutOfLineIfNot(state, vm, codeBlock, fastPathJIT, startOfIC, sizeOfIC, "tail call inline cache", [&] (LinkBuffer& linkBuffer, CCallHelpers&, bool) {
+            call.link(vm, linkBuffer);
+        });
+    }
     
     RepatchBuffer repatchBuffer(codeBlock);
 

Modified: branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLJSCall.cpp (189048 => 189049)


--- branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLJSCall.cpp	2015-08-27 21:02:39 UTC (rev 189048)
+++ branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLJSCall.cpp	2015-08-27 21:13:37 UTC (rev 189049)
@@ -48,7 +48,7 @@
     , m_stackmapID(stackmapID)
     , m_instructionOffset(0)
 {
-    ASSERT(node->op() == Call || node->op() == Construct);
+    ASSERT(node->op() == Call || node->op() == Construct || node->op() == TailCallInlinedCaller);
 }
 
 void JSCall::emit(CCallHelpers& jit, unsigned stackSize)

Modified: branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLJSCallBase.cpp (189048 => 189049)


--- branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLJSCallBase.cpp	2015-08-27 21:02:39 UTC (rev 189048)
+++ branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLJSCallBase.cpp	2015-08-27 21:13:37 UTC (rev 189049)
@@ -65,6 +65,8 @@
     m_slowCall = jit.nearCall();
     
     done.link(&jit);
+
+    m_callLinkInfo->setUpCall(m_type, m_origin, GPRInfo::regT0);
 }
 
 void JSCallBase::link(VM& vm, LinkBuffer& linkBuffer)
@@ -72,9 +74,8 @@
     linkBuffer.link(
         m_slowCall, FunctionPtr(vm.getCTIStub(linkCallThunkGenerator).code().executableAddress()));
 
-    m_callLinkInfo->setUpCallFromFTL(m_type, m_origin, linkBuffer.locationOfNearCall(m_slowCall),
-        linkBuffer.locationOf(m_targetToCheck), linkBuffer.locationOfNearCall(m_fastCall),
-        GPRInfo::regT0);
+    m_callLinkInfo->setCallLocations(linkBuffer.locationOfNearCall(m_slowCall),
+        linkBuffer.locationOf(m_targetToCheck), linkBuffer.locationOfNearCall(m_fastCall));
 }
 
 } } // namespace JSC::FTL

Modified: branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLJSCallBase.h (189048 => 189049)


--- branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLJSCallBase.h	2015-08-27 21:02:39 UTC (rev 189048)
+++ branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLJSCallBase.h	2015-08-27 21:13:37 UTC (rev 189049)
@@ -50,7 +50,7 @@
     void emit(CCallHelpers&);
     void link(VM&, LinkBuffer&);
     
-private:
+protected:
     CallLinkInfo::CallType m_type;
     CodeOrigin m_origin;
     CCallHelpers::DataLabelPtr m_targetToCheck;

Modified: branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLJSCallVarargs.cpp (189048 => 189049)


--- branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLJSCallVarargs.cpp	2015-08-27 21:02:39 UTC (rev 189048)
+++ branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLJSCallVarargs.cpp	2015-08-27 21:13:37 UTC (rev 189049)
@@ -57,6 +57,7 @@
 {
     ASSERT(
         node->op() == CallVarargs || node->op() == CallForwardVarargs
+        || node->op() == TailCallVarargsInlinedCaller || node->op() == TailCallForwardVarargsInlinedCaller
         || node->op() == ConstructVarargs || node->op() == ConstructForwardVarargs);
 }
 
@@ -83,11 +84,13 @@
     
     switch (m_node->op()) {
     case CallVarargs:
+    case TailCallVarargsInlinedCaller:
     case ConstructVarargs:
         argumentsGPR = GPRInfo::argumentGPR1;
         thisGPR = GPRInfo::argumentGPR2;
         break;
     case CallForwardVarargs:
+    case TailCallForwardVarargsInlinedCaller:
     case ConstructForwardVarargs:
         thisGPR = GPRInfo::argumentGPR1;
         forwarding = true;

Added: branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLJSTailCall.cpp (0 => 189049)


--- branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLJSTailCall.cpp	                        (rev 0)
+++ branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLJSTailCall.cpp	2015-08-27 21:13:37 UTC (rev 189049)
@@ -0,0 +1,319 @@
+/*
+ * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
+ */
+
+#include "config.h"
+#include "FTLJSTailCall.h"
+
+#if ENABLE(FTL_JIT)
+
+#include "CallFrameShuffler.h"
+#include "DFGNode.h"
+#include "LinkBuffer.h"
+#include "FTLJITCode.h"
+#include "FTLLocation.h"
+#include "FTLStackMaps.h"
+
+namespace JSC { namespace FTL {
+
+using namespace DFG;
+
+static FTL::Location getRegisterWithAddend(const ExitValue& value, StackMaps::Record& record, StackMaps& stackmaps)
+{
+    if (value.kind() != ExitValueArgument)
+        return { };
+
+    auto location =
+        FTL::Location::forStackmaps(&stackmaps, record.locations[value.exitArgument().argument()]);
+
+    if (location.kind() != Location::Register || !location.addend())
+        return { };
+
+    RELEASE_ASSERT(location.isGPR());
+    return location;
+}
+
+static ValueRecovery recoveryFor(const ExitValue& value, StackMaps::Record& record, StackMaps& stackmaps)
+{
+    switch (value.kind()) {
+    case ExitValueConstant:
+        return ValueRecovery::constant(value.constant());
+
+    case ExitValueArgument: {
+        auto location =
+            FTL::Location::forStackmaps(&stackmaps, record.locations[value.exitArgument().argument()]);
+        auto format = value.exitArgument().format();
+
+        switch (location.kind()) {
+        case Location::Register:
+            // We handle the addend outside
+            return ValueRecovery::inRegister(location.dwarfReg().reg(), format);
+
+        case Location::Indirect:
+            // Oh LLVM, you crazy...
+            RELEASE_ASSERT(location.dwarfReg().reg() == Reg(MacroAssembler::framePointerRegister));
+            RELEASE_ASSERT(!(location.offset() % sizeof(void*)));
+            return ValueRecovery::displacedInJSStack(VirtualRegister { static_cast<int>(location.offset() / sizeof(void*)) }, format);
+
+        case Location::Constant:
+            return ValueRecovery::constant(JSValue::decode(location.constant()));
+
+        default:
+            RELEASE_ASSERT_NOT_REACHED();
+        }
+    }
+
+    case ExitValueInJSStack:
+        return ValueRecovery::displacedInJSStack(value.virtualRegister(), DataFormatJS);
+
+    case ExitValueInJSStackAsInt32:
+        return ValueRecovery::displacedInJSStack(value.virtualRegister(), DataFormatInt32);
+
+    case ExitValueInJSStackAsInt52:
+        return ValueRecovery::displacedInJSStack(value.virtualRegister(), DataFormatInt52);
+
+    case ExitValueInJSStackAsDouble:
+        return ValueRecovery::displacedInJSStack(value.virtualRegister(), DataFormatDouble);
+
+    default:
+        RELEASE_ASSERT_NOT_REACHED();
+    }
+}
+
+// This computes an estimated size (in bits) for the sequence of
+// instructions required to load, box, and store a value of a given
+// type, assuming no spilling is required.
+static uint32_t sizeFor(DataFormat format)
+{
+    switch (format) {
+    case DataFormatInt32:
+        // Boxing is zero-extending and tagging
+#if CPU(X86_64)
+        return 48 + sizeFor(DataFormatJS);
+#else
+        return sizeOfZeroExtend32 + sizeOfOrImm64 + sizeFor(DataFormatJS);
+#endif
+
+    case DataFormatInt52:
+        // Boxing is first a conversion to StrictInt52, then
+        // StrictInt52 boxing
+#if CPU(X86_64)
+        return 16 + sizeFor(DataFormatStrictInt52);
+#else
+        return sizeOfShiftImm32 + sizeFor(DataFormatStrictInt52);
+#endif
+
+    case DataFormatStrictInt52:
+        // Boxing is first a conversion to double, then double boxing
+#if CPU(X86_64)
+        return 16 + sizeFor(DataFormatDouble);
+#else
+        return sizeOfConvertInt64ToDouble + sizeFor(DataFormatDouble);
+#endif
+
+    case DataFormatDouble:
+        // Boxing is purifying, moving to a GPR, and tagging
+#if CPU(X86_64)
+        return 96 + sizeFor(DataFormatJS);
+#else
+        return sizeOfPurifyNaN + sizeOfSubImm64 + sizeOfMoveDoubleTo64 + sizeFor(DataFormatJS);
+#endif
+
+    case DataFormatBoolean:
+        // Boxing is adding ValueFalse
+#if CPU(X86_64)
+        return 16 + sizeFor(DataFormatJS);
+#else
+        return sizeOfAddImm32 + sizeFor(DataFormatJS);
+#endif
+
+    case DataFormatJS:
+        // We will load (in a GPR or FPR) then store the value
+#if CPU(X86_64)
+        return 32;
+#else
+        return sizeOfLoad + sizeOfStore;
+#endif
+
+    default:
+        RELEASE_ASSERT_NOT_REACHED();
+    }
+}
+
+JSTailCall::JSTailCall(unsigned stackmapID, Node* node, ExitArgumentList exitArguments,
+    ExitValue callee, Vector<ExitValue> arguments)
+    : JSCallBase(CallLinkInfo::TailCall, node->origin.semantic)
+    , m_stackmapID(stackmapID)
+    , m_callee { WTF::move(callee) }
+    , m_arguments { WTF::move(arguments) }
+    , m_instructionOffset(0)
+    , m_exitArguments { WTF::move(exitArguments) }
+{
+    ASSERT(node->op() == TailCall);
+    ASSERT(numArguments() == node->numChildren() - 1);
+
+    // Estimate the size of the inline cache, assuming that every
+    // value goes from the stack to the stack (in practice, this will
+    // seldom be true, giving us some amount of leeway) and that no
+    // spilling will occur (in practice, this will almost always be
+    // true).
+
+    // We first compute the new frame base and load the fp/lr
+    // registers final values. On debug builds, we also need to
+    // account for the fp-sp delta check (twice: fast and slow path).
+#if CPU(X86_64)
+    m_estimatedSize = 224;
+#if !ASSERT_DISABLED
+    m_estimatedSize += 192;
+#  endif
+#elif CPU(ARM64)
+    m_estimatedSize = 100;
+#if !ASSERT_DISABLED
+    m_estimatedSize += 50;
+#  endif
+#else
+    UNREACHABLE_FOR_PLATFORM();
+#endif
+
+    // The callee will probably be loaded only once since we need it
+    // for the slow path check, but it'll be stored twice. Arguments
+    // will probably be loaded & stored twice (fast & slow)
+    m_estimatedSize += 2 * sizeFor(m_callee.dataFormat());
+    for (ExitValue& arg : m_arguments)
+        m_estimatedSize += 2 * sizeFor(arg.dataFormat());
+
+    // We also have the slow path check, the two calls, and the
+    // CallLinkInfo load for the slow path
+#if CPU(X86_64)
+    m_estimatedSize += 80;
+#else
+    m_estimatedSize += sizeOfCall + sizeOfJump + sizeOfLoad + sizeOfSlowPathCheck;
+#endif
+}
+
+void JSTailCall::emit(JITCode& jitCode, CCallHelpers& jit)
+{
+    StackMaps::Record* record { nullptr };
+    
+    for (unsigned i = jitCode.stackmaps.records.size(); i--;) {
+        record = &jitCode.stackmaps.records[i];
+        if (record->patchpointID == m_stackmapID)
+            break;
+    }
+
+    RELEASE_ASSERT(record->patchpointID == m_stackmapID);
+
+    m_callLinkInfo = jit.codeBlock()->addCallLinkInfo();
+
+    CallFrameShuffleData shuffleData;
+
+    // LLVM has this awful thing that 
+
+    HashMap<Reg, Vector<std::pair<ValueRecovery*, int32_t>>> withAddend;
+    size_t numAddends { 0 };
+    shuffleData.callee = recoveryFor(m_callee, *record, jitCode.stackmaps);
+    if (FTL::Location addend = getRegisterWithAddend(m_callee, *record, jitCode.stackmaps)) {
+        withAddend.add(
+            addend.dwarfReg().reg(),
+            Vector<std::pair<ValueRecovery*, int32_t>>()).iterator->value.append(
+                std::make_pair(&shuffleData.callee, addend.addend()));
+        numAddends++;
+    }
+
+    for (size_t i = 0; i < numArguments(); ++i) {
+        shuffleData.args[i] = recoveryFor(m_arguments[i], *record, jitCode.stackmaps);
+        if (FTL::Location addend = getRegisterWithAddend(m_arguments[i], *record, jitCode.stackmaps)) {
+            withAddend.add(
+                addend.dwarfReg().reg(),
+                Vector<std::pair<ValueRecovery*, int32_t>>()).iterator->value.append(
+                    std::make_pair(&shuffleData.args[i], addend.addend()));
+            numAddends++;
+        }
+    }
+
+    numAddends = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), numAddends);
+
+    shuffleData.numLocals = static_cast<int64_t>(jitCode.stackmaps.stackSize()) / sizeof(void*) - 1 + numAddends;
+
+    ASSERT(!numAddends == withAddend.isEmpty());
+
+    if (!withAddend.isEmpty()) {
+        jit.subPtr(MacroAssembler::TrustedImm32(numAddends * sizeof(void*)), MacroAssembler::stackPointerRegister);
+        VirtualRegister spillBase { 1 - static_cast<int>(shuffleData.numLocals) };
+        for (auto entry : withAddend) {
+            for (auto pair : entry.value) {
+                ASSERT(numAddends > 0);
+                VirtualRegister spillSlot { spillBase + --numAddends };
+                ASSERT(entry.key.isGPR());
+                jit.addPtr(MacroAssembler::TrustedImm32(pair.second), entry.key.gpr());
+                jit.storePtr(entry.key.gpr(), CCallHelpers::addressFor(spillSlot));
+                jit.subPtr(MacroAssembler::TrustedImm32(pair.second), entry.key.gpr());
+                *pair.first = ValueRecovery::displacedInJSStack(spillSlot, pair.first->dataFormat());
+            }
+        }
+        ASSERT(numAddends < stackAlignmentRegisters());
+    }
+
+    shuffleData.args.resize(numArguments());
+    for (size_t i = 0; i < numArguments(); ++i)
+        shuffleData.args[i] = recoveryFor(m_arguments[i], *record, jitCode.stackmaps);
+
+    shuffleData.setupCalleeSaveRegisters(jit.codeBlock());
+
+    CallFrameShuffler fastPathShuffler(jit, shuffleData);
+
+    fastPathShuffler.pickCalleeJSValueRegs();
+    GPRReg calleeGPR { fastPathShuffler.calleeJSValueRegs().gpr() };
+    fastPathShuffler.restoreGPR(calleeGPR);
+
+    CCallHelpers::Jump slowPath = jit.branchPtrWithPatch(
+        CCallHelpers::NotEqual, calleeGPR, m_targetToCheck,
+        CCallHelpers::TrustedImmPtr(0));
+
+    fastPathShuffler.clearCalleeJSValueRegs();
+    shuffleData = fastPathShuffler.snapshot();
+    m_callLinkInfo->setFrameShuffleData(shuffleData);
+    fastPathShuffler.prepareForTailCall();
+
+    m_fastCall = jit.nearTailCall();
+
+    slowPath.link(&jit);
+
+    CallFrameShuffler slowPathShuffler(jit, shuffleData);
+    slowPathShuffler.setCalleeJSValueRegs(JSValueRegs { GPRInfo::regT0 });
+    slowPathShuffler.prepareForSlowPath();
+
+    jit.move(CCallHelpers::TrustedImmPtr(m_callLinkInfo), GPRInfo::regT2);
+
+    m_slowCall = jit.nearCall();
+
+    jit.abortWithReason(JITDidReturnFromTailCall);
+
+    m_callLinkInfo->setUpCall(m_type, m_origin, calleeGPR);
+}
+
+} } // namespace JSC::FTL
+
+#endif // ENABLE(FTL_JIT)
+

Copied: branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLJSTailCall.h (from rev 189036, branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLJSCallBase.h) (0 => 189049)


--- branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLJSTailCall.h	                        (rev 0)
+++ branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLJSTailCall.h	2015-08-27 21:13:37 UTC (rev 189049)
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
+ */
+
+#ifndef FTLJSTailCall_h
+#define FTLJSTailCall_h
+
+#if ENABLE(FTL_JIT)
+
+#include "FTLExitArgumentList.h"
+#include "FTLExitValue.h"
+#include "FTLJSCallBase.h"
+
+namespace JSC {
+
+namespace DFG {
+struct Node;
+}
+
+namespace FTL {
+
+class JSTailCall : public JSCallBase {
+public:
+    JSTailCall(unsigned stackmapID, DFG::Node*, ExitArgumentList, ExitValue callee, Vector<ExitValue> arguments);
+
+    void emit(JITCode& jitCode, CCallHelpers&);
+    
+    unsigned stackmapID() const { return m_stackmapID; }
+
+    unsigned estimatedSize() const { return m_estimatedSize; }
+
+    unsigned numArguments() const { return m_arguments.size(); }
+
+    bool operator<(const JSTailCall& other) const
+    {
+        return m_instructionOffset < other.m_instructionOffset;
+    }
+    
+private:
+    unsigned m_stackmapID;
+    ExitValue m_callee;
+    Vector<ExitValue> m_arguments;
+    unsigned m_estimatedSize;
+
+public:
+    uint32_t m_instructionOffset;
+    ExitArgumentList m_exitArguments;
+};
+
+} } // namespace JSC::FTL
+
+#endif // ENABLE(FTL_JIT)
+
+#endif // FTLJSTailCall_h
+

Modified: branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLLocation.h (189048 => 189049)


--- branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLLocation.h	2015-08-27 21:02:39 UTC (rev 189048)
+++ branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLLocation.h	2015-08-27 21:13:37 UTC (rev 189049)
@@ -120,7 +120,9 @@
         return u.constant;
     }
     
-    bool operator!() const { return kind() == Unprocessed && !u.variable.offset; }
+    explicit operator bool() const { return kind() != Unprocessed || u.variable.offset; }
+
+    bool operator!() const { return !static_cast<bool>(*this); }
     
     bool isHashTableDeletedValue() const { return kind() == Unprocessed && u.variable.offset; }
     

Modified: branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLLowerDFGToLLVM.cpp (189048 => 189049)


--- branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLLowerDFGToLLVM.cpp	2015-08-27 21:02:39 UTC (rev 189048)
+++ branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLLowerDFGToLLVM.cpp	2015-08-27 21:13:37 UTC (rev 189049)
@@ -742,11 +742,17 @@
             compileLogicalNot();
             break;
         case Call:
+        case TailCallInlinedCaller:
         case Construct:
             compileCallOrConstruct();
             break;
+        case TailCall:
+            compileTailCall();
+            break;
         case CallVarargs:
         case CallForwardVarargs:
+        case TailCallVarargsInlinedCaller:
+        case TailCallForwardVarargsInlinedCaller:
         case ConstructVarargs:
         case ConstructForwardVarargs:
             compileCallOrConstructVarargs();
@@ -4353,6 +4359,31 @@
         
         setJSValue(call);
     }
+
+    void compileTailCall()
+    {
+        int numArgs = m_node->numChildren() - 1;
+        ExitArgumentList exitArguments;
+        ExitValue callee =
+            exitValueForTailCall(exitArguments, m_graph.varArgChild(m_node, 0).node());
+        Vector<ExitValue> callArguments(numArgs);
+        
+        for (int i = 0; i < numArgs; ++i) {
+            callArguments[i] =
+                exitValueForTailCall(exitArguments, m_graph.varArgChild(m_node, 1 + i).node());
+        }
+
+        JSTailCall tailCall(m_stackmapIDs++, m_node,
+            WTF::move(exitArguments), WTF::move(callee), WTF::move(callArguments));
+
+        tailCall.m_exitArguments.insert(0, m_out.constInt32(tailCall.estimatedSize()));
+        tailCall.m_exitArguments.insert(0, m_out.constInt64(tailCall.stackmapID()));
+
+        m_out.call(m_out.stackmapIntrinsic(), tailCall.m_exitArguments);
+        m_out.unreachable();
+
+        m_ftlState.jsTailCalls.append(tailCall);
+    }
     
     void compileCallOrConstructVarargs()
     {
@@ -4363,10 +4394,12 @@
         
         switch (m_node->op()) {
         case CallVarargs:
+        case TailCallVarargsInlinedCaller:
         case ConstructVarargs:
             jsArguments = lowJSValue(m_node->child2());
             break;
         case CallForwardVarargs:
+        case TailCallForwardVarargsInlinedCaller:
         case ConstructForwardVarargs:
             break;
         default:
@@ -8140,7 +8173,14 @@
     }
     void callPreflight()
     {
-        callPreflight(m_node->origin.semantic);
+        CodeOrigin codeOrigin = m_node->origin.semantic;
+
+        if (m_node->op() == TailCallInlinedCaller
+            || m_node->op() == TailCallVarargsInlinedCaller
+            || m_node->op() == TailCallForwardVarargsInlinedCaller)
+            codeOrigin =*codeOrigin.inlineCallFrame->getCallerSkippingDeadFrames();
+
+        callPreflight(codeOrigin);
     }
     
     void callCheck()
@@ -8401,13 +8441,46 @@
         DFG_CRASH(m_graph, m_node, toCString("Cannot find value for node: ", node).data());
         return ExitValue::dead();
     }
-    
+
     ExitValue exitArgument(ExitArgumentList& arguments, DataFormat format, LValue value)
     {
         ExitValue result = ExitValue::exitArgument(ExitArgument(format, arguments.size()));
         arguments.append(value);
         return result;
     }
+
+    ExitValue exitValueForTailCall(ExitArgumentList& arguments, Node* node)
+    {
+        ASSERT(node->shouldGenerate());
+        ASSERT(node->hasResult());
+
+        switch (node->op()) {
+        case JSConstant:
+        case Int52Constant:
+        case DoubleConstant:
+                return ExitValue::constant(node->asJSValue());
+
+        default:
+                break;
+        }
+
+        LoweredNodeValue value = m_jsValueValues.get(node);
+        if (isValid(value))
+            return exitArgument(arguments, DataFormatJS, value.value());
+
+        value = m_int32Values.get(node);
+        if (isValid(value))
+            return exitArgument(arguments, DataFormatInt32, value.value());
+
+        value = m_booleanValues.get(node);
+        if (isValid(value)) {
+            LValue valueToPass = m_out.zeroExt(value.value(), m_out.int32);
+            return exitArgument(arguments, DataFormatBoolean, valueToPass);
+        }
+
+        // Doubles and Int52 have been converted by ValueRep()
+        DFG_CRASH(m_graph, m_node, toCString("Cannot find value for node: ", node).data());
+    }
     
     bool doesKill(Edge edge)
     {

Modified: branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLState.h (189048 => 189049)


--- branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLState.h	2015-08-27 21:02:39 UTC (rev 189048)
+++ branches/jsc-tailcall/Source/_javascript_Core/ftl/FTLState.h	2015-08-27 21:13:37 UTC (rev 189049)
@@ -37,6 +37,7 @@
 #include "FTLJITFinalizer.h"
 #include "FTLJSCall.h"
 #include "FTLJSCallVarargs.h"
+#include "FTLJSTailCall.h"
 #include "FTLStackMaps.h"
 #include "FTLState.h"
 #include <wtf/Noncopyable.h>
@@ -79,6 +80,7 @@
     SegmentedVector<CheckInDescriptor> checkIns;
     Vector<JSCall> jsCalls;
     Vector<JSCallVarargs> jsCallVarargses;
+    Vector<JSTailCall> jsTailCalls;
     Vector<CString> codeSectionNames;
     Vector<CString> dataSectionNames;
     void* unwindDataSection;

Modified: branches/jsc-tailcall/Source/_javascript_Core/jit/CallFrameShuffler.cpp (189048 => 189049)


--- branches/jsc-tailcall/Source/_javascript_Core/jit/CallFrameShuffler.cpp	2015-08-27 21:02:39 UTC (rev 189048)
+++ branches/jsc-tailcall/Source/_javascript_Core/jit/CallFrameShuffler.cpp	2015-08-27 21:13:37 UTC (rev 189049)
@@ -248,17 +248,26 @@
     ASSERT(cachedRecovery.recovery().isInRegisters());
 
     VirtualRegister spillSlot { 0 };
-    for (VirtualRegister slot = firstOld(); slot <= lastOld(); slot -= 1) {
-        ASSERT(slot < newAsOld(firstNew()));
+    for (VirtualRegister slot = firstOld(); slot >= lastOld(); slot += 1) {
+        if (slot >= newAsOld(firstNew()))
+            break;
+
         if (getOld(slot))
             continue;
 
         spillSlot = slot;
         break;
     }
-    // We must have enough slots to be able to fit the whole
-    // callee's frame for the slow path.
-    RELEASE_ASSERT(spillSlot.isLocal());
+    // We must have enough slots to be able to fit the whole callee's
+    // frame for the slow path - unless we are in the FTL. In that
+    // case, we are allowed to extend the frame *once*, since we are
+    // guaranteed to have enough available space for that.
+    if (spillSlot >= newAsOld(firstNew()) || !spillSlot.isLocal()) {
+        RELEASE_ASSERT(!m_didExtendFrame);
+        extendFrameIfNeeded();
+        spill(cachedRecovery);
+        return;
+    }
 
     if (verbose)
         dataLog("   * Spilling ", cachedRecovery.recovery(), " into ", spillSlot, "\n");
@@ -288,11 +297,40 @@
         dataLog("  Skipping the fp-sp delta check since there is too much pressure");
 }
 
+void CallFrameShuffler::extendFrameIfNeeded()
+{
+    ASSERT(!m_didExtendFrame);
+    ASSERT(!isSlowPath());
+
+    VirtualRegister lastRead { lastOld() };
+    for (; lastRead >= firstOld(); lastRead -= 1) {
+        if (getOld(lastRead))
+            break;
+    }
+    size_t availableSize = static_cast<size_t>(lastOld().offset() - lastRead.offset());
+
+    if (availableSize < m_newFrame.size()) {
+        size_t delta = m_newFrame.size() - availableSize;
+        m_oldFrame.grow(m_oldFrame.size() + delta);
+        m_jit.subPtr(MacroAssembler::TrustedImm32(delta), MacroAssembler::stackPointerRegister);
+
+        if (!isUndecided())
+            m_oldFrameOffset = numLocals();
+    }
+
+    m_didExtendFrame = true;
+}
+
 void CallFrameShuffler::prepareForSlowPath()
 {
     ASSERT(isUndecided());
     emitDeltaCheck();
 
+    // When coming from the FTL, we need to extend the frame. In other
+    // cases, we may end up extending the frame if we previously
+    // spilled things (e.g. in polymorphic cache).
+    extendFrameIfNeeded();
+
     m_frameDelta = numLocals() + JSStack::CallerFrameAndPCSize;
     m_newFrameBase = MacroAssembler::stackPointerRegister;
     m_newFrameOffset = -JSStack::CallerFrameAndPCSize;

Modified: branches/jsc-tailcall/Source/_javascript_Core/jit/CallFrameShuffler.h (189048 => 189049)


--- branches/jsc-tailcall/Source/_javascript_Core/jit/CallFrameShuffler.h	2015-08-27 21:02:39 UTC (rev 189048)
+++ branches/jsc-tailcall/Source/_javascript_Core/jit/CallFrameShuffler.h	2015-08-27 21:13:37 UTC (rev 189049)
@@ -160,6 +160,21 @@
         m_lockedRegisters.clear(gpr);
     }
 
+    void restoreGPR(GPRReg gpr)
+    {
+        if (!m_newRegisters[gpr])
+            return;
+
+        ensureGPR();
+#if USE(JSVALUE32_64)
+        GPRReg tempGPR { getFreeGPR() };
+        lockGPR(tempGPR);
+        ensureGPR();
+        releaseGPR(tempGPR);
+#endif
+        emitDisplace(*m_newRegisters[gpr]);
+    }
+
     // You can only take a snapshot if the recovery has not started
     // yet. The only operations that are valid before taking a
     // snapshot are lockGPR(), acquireGPR() and releaseGPR().
@@ -192,6 +207,48 @@
         return WTF::move(data);
     }
 
+#if ENABLE(FTL_JIT)
+    void pickCalleeJSValueRegs()
+    {
+        ASSERT(isUndecided());
+        CachedRecovery* cachedRecovery { getNew(VirtualRegister(JSStack::Callee)) };
+        ASSERT(cachedRecovery);
+        if (cachedRecovery->wantedJSValueRegs())
+            return;
+
+        if (cachedRecovery->recovery().isInGPR()) {
+            GPRReg current { cachedRecovery->recovery().gpr() };
+            if (!m_newRegisters[current]) {
+                m_newRegisters[current] = cachedRecovery;
+                cachedRecovery->setWantedJSValueRegs(JSValueRegs { current });
+                return;
+            }
+        }
+
+        ensureTempGPR();
+        GPRReg gpr { getFreeTempGPR() };
+        m_newRegisters[gpr] = cachedRecovery;
+        cachedRecovery->setWantedJSValueRegs(JSValueRegs { gpr });
+    }
+
+    void clearCalleeJSValueRegs()
+    {
+        ASSERT(isUndecided());
+        CachedRecovery* cachedRecovery { getNew(VirtualRegister(JSStack::Callee)) };
+        ASSERT(cachedRecovery);
+        if (!cachedRecovery->wantedJSValueRegs())
+            return;
+
+        m_newRegisters[cachedRecovery->wantedJSValueRegs().gpr()] = nullptr;
+        cachedRecovery->setWantedJSValueRegs(JSValueRegs());
+    }
+
+    JSValueRegs calleeJSValueRegs() const
+    {
+        return getNew(VirtualRegister(JSStack::Callee))->wantedJSValueRegs();
+    }
+#endif
+
     // Ask the shuffler to put the callee into some registers once the
     // shuffling is done. You should call this before any of the
     // prepare() methods, and must not take a snapshot afterwards, as
@@ -396,6 +453,10 @@
         return reg >= firstOld() && reg <= lastOld();
     }
 
+    bool m_didExtendFrame { false };
+
+    void extendFrameIfNeeded();
+
     // This stores, for each slot in the new frame, information about
     // the recovery for the value that should eventually go into that
     // slot.
@@ -514,6 +575,14 @@
         return nonTemp;
     }
 
+    GPRReg getFreeTempGPR() const
+    {
+        Reg freeTempGPR { getFreeRegister([this] (Reg reg) { return reg.isGPR() && !m_newRegisters[reg]; }) };
+        if (!freeTempGPR)
+            return InvalidGPRReg;
+        return freeTempGPR.gpr();
+    }
+
     GPRReg getFreeGPR() const
     {
         Reg freeGPR { getFreeRegister([] (Reg reg) { return reg.isGPR(); }) };
@@ -609,6 +678,31 @@
             });
     }
 
+    void ensureTempGPR()
+    {
+        if (getFreeTempGPR() != InvalidGPRReg)
+            return;
+
+        if (verbose)
+            dataLog("  Finding a temp GPR to spill\n");
+        ensureRegister(
+            [this] (const CachedRecovery& cachedRecovery) {
+                if (cachedRecovery.recovery().isInGPR()) {
+                    return !m_lockedRegisters.get(cachedRecovery.recovery().gpr()) 
+                        && !m_newRegisters[cachedRecovery.recovery().gpr()];
+                }
+#if USE(JSVALUE32_64)
+                if (cachedRecovery.recovery().technique() == InPair) {
+                    return !m_lockedRegisters.get(cachedRecovery.recovery().tagGPR())
+                        && !m_lockedRegisters.get(cachedRecovery.recovery().payloadGPR())
+                        && !m_newRegisters[cachedRecovery.recovery().tagGPR()]
+                        && !m_newRegisters[cachedRecovery.recovery().payloadGPR()];
+                }
+#endif
+                return false;
+            });
+    }
+
     void ensureGPR()
     {
         if (getFreeGPR() != InvalidGPRReg)
@@ -663,16 +757,24 @@
     {
         ASSERT(jsValueRegs && !getNew(jsValueRegs));
         CachedRecovery* cachedRecovery = addCachedRecovery(recovery);
-        ASSERT(!cachedRecovery->wantedJSValueRegs());
-        cachedRecovery->setWantedJSValueRegs(jsValueRegs);
 #if USE(JSVALUE64)
+        if (cachedRecovery->wantedJSValueRegs())
+            m_newRegisters[cachedRecovery->wantedJSValueRegs().gpr()] = nullptr;
         m_newRegisters[jsValueRegs.gpr()] = cachedRecovery;
 #else
+        if (JSValueRegs oldRegs { cachedRecovery->wantedJSValueRegs() }) {
+            if (oldRegs.payloadGPR())
+                m_newRegisters[oldRegs.payloadGPR()] = nullptr;
+            if (oldRegs.tagGPR())
+                m_newRegisters[oldRegs.tagGPR()] = nullptr;
+        }
         if (jsValueRegs.payloadGPR() != InvalidGPRReg)
             m_newRegisters[jsValueRegs.payloadGPR()] = cachedRecovery;
         if (jsValueRegs.tagGPR() != InvalidGPRReg)
             m_newRegisters[jsValueRegs.tagGPR()] = cachedRecovery;
 #endif
+        ASSERT(!cachedRecovery->wantedJSValueRegs());
+        cachedRecovery->setWantedJSValueRegs(jsValueRegs);
     }
 
     // m_oldFrameBase is the register relative to which we access

Modified: branches/jsc-tailcall/Source/_javascript_Core/jit/CallFrameShuffler64.cpp (189048 => 189049)


--- branches/jsc-tailcall/Source/_javascript_Core/jit/CallFrameShuffler64.cpp	2015-08-27 21:02:39 UTC (rev 189048)
+++ branches/jsc-tailcall/Source/_javascript_Core/jit/CallFrameShuffler64.cpp	2015-08-27 21:13:37 UTC (rev 189049)
@@ -253,7 +253,6 @@
 
 void CallFrameShuffler::emitDisplace(CachedRecovery& cachedRecovery)
 {
-    ASSERT(cachedRecovery.recovery().isInRegisters());
     GPRReg wantedGPR { cachedRecovery.wantedJSValueRegs().gpr() };
     ASSERT(wantedGPR != InvalidGPRReg);
     ASSERT(!m_lockedRegisters.get(wantedGPR));
@@ -313,8 +312,15 @@
     }
     ASSERT(!m_registers[wantedGPR]);
 
-    if (cachedRecovery.recovery().isInGPR()) {
+    if (cachedRecovery.recovery().isConstant()) {
         if (verbose)
+            dataLog("   * Loading ", cachedRecovery.recovery().constant(), " into ", wantedGPR, "\n");
+        m_jit.moveTrustedValue(cachedRecovery.recovery().constant(), JSValueRegs { wantedGPR });
+        updateRecovery(
+            cachedRecovery,
+            ValueRecovery::inGPR(wantedGPR, DataFormatJS));
+    } else if (cachedRecovery.recovery().isInGPR()) {
+        if (verbose)
             dataLog("   * Moving ", cachedRecovery.recovery().gpr(), " into ", wantedGPR, "\n");
         m_jit.move(cachedRecovery.recovery().gpr(), wantedGPR);
         updateRecovery(cachedRecovery,

Modified: branches/jsc-tailcall/Source/_javascript_Core/jit/Reg.h (189048 => 189049)


--- branches/jsc-tailcall/Source/_javascript_Core/jit/Reg.h	2015-08-27 21:02:39 UTC (rev 189048)
+++ branches/jsc-tailcall/Source/_javascript_Core/jit/Reg.h	2015-08-27 21:13:37 UTC (rev 189049)
@@ -55,6 +55,11 @@
         : m_index(invalid())
     {
     }
+
+    Reg(WTF::HashTableDeletedValueType)
+        : m_index(deleted())
+    {
+    }
     
     Reg(MacroAssembler::RegisterID reg)
         : m_index(MacroAssembler::registerIndex(reg))
@@ -102,6 +107,8 @@
     bool isSet() const { return m_index != invalid(); }
     bool operator!() const { return !isSet(); }
     explicit operator bool() const { return isSet(); }
+
+    bool isHashTableDeletedValue() const { return m_index == deleted(); }
     
     bool isGPR() const
     {
@@ -165,12 +172,32 @@
 
 private:
     static uint8_t invalid() { return 0xff; }
+
+    static uint8_t deleted() { return 0xfe; }
     
     uint8_t m_index;
 };
 
+struct RegHash {
+    static unsigned hash(const Reg& key) { return key.hash(); }
+    static bool equal(const Reg& a, const Reg& b) { return a == b; }
+    static const bool safeToCompareToEmptyOrDeleted = true;
+};
+
 } // namespace JSC
 
+namespace WTF {
+
+template<typename T> struct DefaultHash;
+template<> struct DefaultHash<JSC::Reg> {
+    typedef JSC::RegHash Hash;
+};
+
+template<typename T> struct HashTraits;
+template<> struct HashTraits<JSC::Reg> : SimpleClassHashTraits<JSC::Reg> { };
+
+} // namespace WTF
+
 #endif // ENABLE(JIT)
 
 #endif // Reg_h
_______________________________________________
webkit-changes mailing list
[email protected]
https://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to