Title: [278587] trunk/Source/_javascript_Core

Diff

Modified: trunk/Source/_javascript_Core/ChangeLog (278586 => 278587)


--- trunk/Source/_javascript_Core/ChangeLog	2021-06-08 01:28:30 UTC (rev 278586)
+++ trunk/Source/_javascript_Core/ChangeLog	2021-06-08 01:29:31 UTC (rev 278587)
@@ -1,3 +1,20 @@
+2021-06-07  Commit Queue  <[email protected]>
+
+        Unreviewed, reverting r278371 and r278463.
+        https://bugs.webkit.org/show_bug.cgi?id=226749
+
+        Break dumpDisassembly in JetStream2
+
+        Reverted changesets:
+
+        "We should drop B3 values while running Air"
+        https://bugs.webkit.org/show_bug.cgi?id=226187
+        https://trac.webkit.org/changeset/278371
+
+        "Drop the FTL(DFG) graph after lowering to B3"
+        https://bugs.webkit.org/show_bug.cgi?id=226556
+        https://trac.webkit.org/changeset/278463
+
 2021-06-07  Alexey Shvayka  <[email protected]>
 
         Window should behave like a legacy platform object without indexed setter

Modified: trunk/Source/_javascript_Core/b3/B3Generate.cpp (278586 => 278587)


--- trunk/Source/_javascript_Core/b3/B3Generate.cpp	2021-06-08 01:28:30 UTC (rev 278586)
+++ trunk/Source/_javascript_Core/b3/B3Generate.cpp	2021-06-08 01:29:31 UTC (rev 278587)
@@ -133,7 +133,6 @@
     }
 
     lowerToAir(procedure);
-    procedure.freeUnneededB3ValuesAfterLowering();
 }
 
 } } // namespace JSC::B3

Modified: trunk/Source/_javascript_Core/b3/B3PCToOriginMap.h (278586 => 278587)


--- trunk/Source/_javascript_Core/b3/B3PCToOriginMap.h	2021-06-08 01:28:30 UTC (rev 278586)
+++ trunk/Source/_javascript_Core/b3/B3PCToOriginMap.h	2021-06-08 01:29:31 UTC (rev 278587)
@@ -61,7 +61,7 @@
     const Vector<OriginRange>& ranges() const  { return m_ranges; }
 
 private:
-    Vector<OriginRange, 0> m_ranges;
+    Vector<OriginRange> m_ranges;
 };
 
 } } // namespace JSC::B3

Modified: trunk/Source/_javascript_Core/b3/B3Procedure.cpp (278586 => 278587)


--- trunk/Source/_javascript_Core/b3/B3Procedure.cpp	2021-06-08 01:28:30 UTC (rev 278586)
+++ trunk/Source/_javascript_Core/b3/B3Procedure.cpp	2021-06-08 01:29:31 UTC (rev 278587)
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2015-2021 Apple Inc. All rights reserved.
+ * Copyright (C) 2015-2020 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -434,51 +434,6 @@
     m_code->setNumEntrypoints(numEntrypoints);
 }
 
-void Procedure::freeUnneededB3ValuesAfterLowering()
-{
-    // We cannot clear m_stackSlots() or m_tuples here, as they are unfortunately modified and read respectively by Air.
-    m_variables.clearAll();
-    m_blocks.clear();
-    m_cfg = nullptr;
-    m_dominators = nullptr;
-    m_naturalLoops = nullptr;
-    m_backwardsCFG = nullptr;
-    m_backwardsDominators = nullptr;
-    m_fastConstants.clear();
-
-    if (m_code->shouldPreserveB3Origins())
-        return;
-
-    BitVector valuesToPreserve;
-    valuesToPreserve.ensureSize(m_values.size());
-    for (Value* value : m_values) {
-        switch (value->opcode()) {
-        // Ideally we would also be able to get rid of all of those.
-        // But Air currently relies on these origins being preserved, see https://bugs.webkit.org/show_bug.cgi?id=194040
-        case WasmBoundsCheck:
-            valuesToPreserve.quickSet(value->index());
-            break;
-        case CCall:
-        case Patchpoint:
-        case CheckAdd:
-        case CheckSub:
-        case CheckMul:
-        case Check:
-            valuesToPreserve.quickSet(value->index());
-            for (Value* child : value->children())
-                valuesToPreserve.quickSet(child->index());
-            break;
-        default:
-            break;
-        }
-    }
-    for (Value* value : m_values) {
-        if (!valuesToPreserve.quickGet(value->index()))
-            m_values.remove(value);
-    }
-    m_values.packIndices();
-}
-
 } } // namespace JSC::B3
 
 #endif // ENABLE(B3_JIT)

Modified: trunk/Source/_javascript_Core/b3/B3Procedure.h (278586 => 278587)


--- trunk/Source/_javascript_Core/b3/B3Procedure.h	2021-06-08 01:28:30 UTC (rev 278586)
+++ trunk/Source/_javascript_Core/b3/B3Procedure.h	2021-06-08 01:29:31 UTC (rev 278587)
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2015-2021 Apple Inc. All rights reserved.
+ * Copyright (C) 2015-2017 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -258,11 +258,7 @@
     JS_EXPORT_PRIVATE RegisterAtOffsetList calleeSaveRegisterAtOffsetList() const;
 
     PCToOriginMap& pcToOriginMap() { return m_pcToOriginMap; }
-    PCToOriginMap releasePCToOriginMap()
-    {
-        RELEASE_ASSERT(needsPCToOriginMap());
-        return WTFMove(m_pcToOriginMap);
-    }
+    PCToOriginMap releasePCToOriginMap() { return WTFMove(m_pcToOriginMap); }
 
     JS_EXPORT_PRIVATE void setWasmBoundsCheckGenerator(RefPtr<WasmBoundsCheckGenerator>);
 
@@ -275,11 +271,6 @@
     JS_EXPORT_PRIVATE RegisterSet mutableGPRs();
     JS_EXPORT_PRIVATE RegisterSet mutableFPRs();
 
-    void setNeedsPCToOriginMap() { m_needsPCToOriginMap = true; }
-    bool needsPCToOriginMap() { return m_needsPCToOriginMap; }
-
-    JS_EXPORT_PRIVATE void freeUnneededB3ValuesAfterLowering();
-
 private:
     friend class BlockInsertionSet;
 
@@ -296,6 +287,7 @@
     std::unique_ptr<BackwardsCFG> m_backwardsCFG;
     std::unique_ptr<BackwardsDominators> m_backwardsDominators;
     HashSet<ValueKey> m_fastConstants;
+    unsigned m_numEntrypoints { 1 };
     const char* m_lastPhaseName;
     std::unique_ptr<OpaqueByproducts> m_byproducts;
     std::unique_ptr<Air::Code> m_code;
@@ -302,11 +294,9 @@
     RefPtr<SharedTask<void(PrintStream&, Origin)>> m_originPrinter;
     const void* m_frontendData;
     PCToOriginMap m_pcToOriginMap;
-    unsigned m_numEntrypoints { 1 };
     unsigned m_optLevel { defaultOptLevel() };
     bool m_needsUsedRegisters { true };
     bool m_hasQuirks { false };
-    bool m_needsPCToOriginMap { false };
 };
     
 } } // namespace JSC::B3

Modified: trunk/Source/_javascript_Core/b3/B3SparseCollection.h (278586 => 278587)


--- trunk/Source/_javascript_Core/b3/B3SparseCollection.h	2021-06-08 01:28:30 UTC (rev 278586)
+++ trunk/Source/_javascript_Core/b3/B3SparseCollection.h	2021-06-08 01:29:31 UTC (rev 278587)
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2016-2021 Apple Inc. All rights reserved.
+ * Copyright (C) 2016 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -111,12 +111,6 @@
         m_vector.shrink(endIndex);
     }
 
-    void clearAll()
-    {
-        m_indexFreeList.clear();
-        m_vector.clear();
-    }
-
     unsigned size() const { return m_vector.size(); }
     bool isEmpty() const { return m_vector.isEmpty(); }
     
@@ -160,8 +154,6 @@
         }
 
     private:
-        friend class SparseCollection;
-
         unsigned findNext(unsigned index)
         {
             while (index < m_collection->size() && !m_collection->at(index))

Modified: trunk/Source/_javascript_Core/b3/air/AirCode.cpp (278586 => 278587)


--- trunk/Source/_javascript_Core/b3/air/AirCode.cpp	2021-06-08 01:28:30 UTC (rev 278586)
+++ trunk/Source/_javascript_Core/b3/air/AirCode.cpp	2021-06-08 01:29:31 UTC (rev 278587)
@@ -56,7 +56,6 @@
 Code::Code(Procedure& proc)
     : m_proc(proc)
     , m_cfg(new CFG(*this))
-    , m_preserveB3Origins(proc.needsPCToOriginMap() || Options::dumpAirGraphAtEachPhase() || Options::dumpFTLDisassembly())
     , m_lastPhaseName("initial")
     , m_defaultPrologueGenerator(createSharedTask<PrologueGeneratorFunction>(&defaultPrologueGenerator))
 {

Modified: trunk/Source/_javascript_Core/b3/air/AirCode.h (278586 => 278587)


--- trunk/Source/_javascript_Core/b3/air/AirCode.h	2021-06-08 01:28:30 UTC (rev 278586)
+++ trunk/Source/_javascript_Core/b3/air/AirCode.h	2021-06-08 01:29:31 UTC (rev 278587)
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2015-2021 Apple Inc. All rights reserved.
+ * Copyright (C) 2015-2020 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -354,10 +354,6 @@
     void emitEpilogue(CCallHelpers&);
 
     std::unique_ptr<GenerateAndAllocateRegisters> m_generateAndAllocateRegisters;
-
-    bool shouldPreserveB3Origins() const { return m_preserveB3Origins; }
-
-    void forcePreservationOfB3Origins() { m_preserveB3Origins = true; }
     
 private:
     friend class ::JSC::B3::Procedure;
@@ -394,9 +390,7 @@
     unsigned m_numFPTmps { 0 };
     unsigned m_frameSize { 0 };
     unsigned m_callArgAreaSize { 0 };
-    unsigned m_optLevel { defaultOptLevel() };
     bool m_stackIsAllocated { false };
-    bool m_preserveB3Origins { true };
     RegisterAtOffsetList m_uncorrectedCalleeSaveRegisterAtOffsetList;
     RegisterSet m_calleeSaveRegisters;
     StackSlot* m_calleeSaveStackSlot { nullptr };
@@ -406,6 +400,7 @@
     RefPtr<WasmBoundsCheckGenerator> m_wasmBoundsCheckGenerator;
     const char* m_lastPhaseName;
     std::unique_ptr<Disassembler> m_disassembler;
+    unsigned m_optLevel { defaultOptLevel() };
     Ref<PrologueGenerator> m_defaultPrologueGenerator;
 };
 

Modified: trunk/Source/_javascript_Core/b3/air/AirGenerate.cpp (278586 => 278587)


--- trunk/Source/_javascript_Core/b3/air/AirGenerate.cpp	2021-06-08 01:28:30 UTC (rev 278586)
+++ trunk/Source/_javascript_Core/b3/air/AirGenerate.cpp	2021-06-08 01:29:31 UTC (rev 278587)
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2015-2021 Apple Inc. All rights reserved.
+ * Copyright (C) 2015-2020 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -214,12 +214,11 @@
 
     PCToOriginMap& pcToOriginMap = code.proc().pcToOriginMap();
     auto addItem = [&] (Inst& inst) {
-        if (!code.shouldPreserveB3Origins())
+        if (!inst.origin) {
+            pcToOriginMap.appendItem(jit.labelIgnoringWatchpoints(), Origin());
             return;
-        if (inst.origin)
-            pcToOriginMap.appendItem(jit.labelIgnoringWatchpoints(), inst.origin->origin());
-        else
-            pcToOriginMap.appendItem(jit.labelIgnoringWatchpoints(), Origin());
+        }
+        pcToOriginMap.appendItem(jit.labelIgnoringWatchpoints(), inst.origin->origin());
     };
 
     Disassembler* disassembler = code.disassembler();

Modified: trunk/Source/_javascript_Core/b3/testb3_6.cpp (278586 => 278587)


--- trunk/Source/_javascript_Core/b3/testb3_6.cpp	2021-06-08 01:28:30 UTC (rev 278586)
+++ trunk/Source/_javascript_Core/b3/testb3_6.cpp	2021-06-08 01:29:31 UTC (rev 278587)
@@ -2767,8 +2767,6 @@
         root->appendNew<Const32Value>(proc, Origin(), 44),
         ptr);
     root->appendNew<Value>(proc, Return, Origin());
-    // We'll look at the values after compiling
-    proc.code().forcePreservationOfB3Origins();
     compileAndRun<int>(proc);
     unsigned storeCount = 0;
     for (Value* value : proc.values()) {

Modified: trunk/Source/_javascript_Core/dfg/DFGGraph.cpp (278586 => 278587)


--- trunk/Source/_javascript_Core/dfg/DFGGraph.cpp	2021-06-08 01:28:30 UTC (rev 278586)
+++ trunk/Source/_javascript_Core/dfg/DFGGraph.cpp	2021-06-08 01:29:31 UTC (rev 278587)
@@ -1861,26 +1861,6 @@
     m_cpsCFG = nullptr;
 }
 
-void Graph::freeDFGIRAfterLowering()
-{
-    m_blocks.clear();
-    m_roots.clear();
-    m_varArgChildren.clear();
-    m_nodes.clearAll();
-
-    m_bytecodeLiveness.clear();
-    m_safeToLoad.clear();
-    m_cpsDominators = nullptr;
-    m_ssaDominators = nullptr;
-    m_cpsNaturalLoops = nullptr;
-    m_ssaNaturalLoops = nullptr;
-    m_ssaCFG = nullptr;
-    m_cpsCFG = nullptr;
-    m_backwardsCFG = nullptr;
-    m_backwardsDominators = nullptr;
-    m_controlEquivalenceAnalysis = nullptr;
-}
-
 void Prefix::dump(PrintStream& out) const
 {
     if (!m_enabled)

Modified: trunk/Source/_javascript_Core/dfg/DFGGraph.h (278586 => 278587)


--- trunk/Source/_javascript_Core/dfg/DFGGraph.h	2021-06-08 01:28:30 UTC (rev 278586)
+++ trunk/Source/_javascript_Core/dfg/DFGGraph.h	2021-06-08 01:29:31 UTC (rev 278587)
@@ -1074,8 +1074,6 @@
         m_catchEntrypoints.append(CatchEntrypointData { machineCode, FixedVector<FlushFormat>(WTFMove(argumentFormats)), bytecodeIndex });
     }
 
-    void freeDFGIRAfterLowering();
-
     StackCheck m_stackChecker;
     VM& m_vm;
     Plan& m_plan;

Modified: trunk/Source/_javascript_Core/ftl/FTLCompile.cpp (278586 => 278587)


--- trunk/Source/_javascript_Core/ftl/FTLCompile.cpp	2021-06-08 01:28:30 UTC (rev 278586)
+++ trunk/Source/_javascript_Core/ftl/FTLCompile.cpp	2021-06-08 01:29:31 UTC (rev 278587)
@@ -58,9 +58,6 @@
     if (shouldDumpDisassembly())
         state.proc->code().setDisassembler(makeUnique<B3::Air::Disassembler>());
 
-    if (!shouldDumpDisassembly() && !Options::asyncDisassembly() && !graph.compilation() && !state.proc->needsPCToOriginMap())
-        graph.freeDFGIRAfterLowering();
-
     {
         GraphSafepoint safepoint(state.graph, safepointResult);
 
@@ -153,11 +150,10 @@
         state.allocationFailed = true;
         return;
     }
-
-    if (vm.shouldBuilderPCToCodeOriginMapping()) {
-        B3::PCToOriginMap originMap = state.proc->releasePCToOriginMap();
+    
+    B3::PCToOriginMap originMap = state.proc->releasePCToOriginMap();
+    if (vm.shouldBuilderPCToCodeOriginMapping())
         codeBlock->setPCToCodeOriginMap(makeUnique<PCToCodeOriginMap>(PCToCodeOriginMapBuilder(vm, WTFMove(originMap)), *state.finalizer->b3CodeLinkBuffer));
-    }
 
     CodeLocationLabel<JSEntryPtrTag> label = state.finalizer->b3CodeLinkBuffer->locationOf<JSEntryPtrTag>(state.proc->code().entrypointLabel(0));
     state.generatedFunction = label;

Modified: trunk/Source/_javascript_Core/ftl/FTLLowerDFGToB3.cpp (278586 => 278587)


--- trunk/Source/_javascript_Core/ftl/FTLLowerDFGToB3.cpp	2021-06-08 01:28:30 UTC (rev 278586)
+++ trunk/Source/_javascript_Core/ftl/FTLLowerDFGToB3.cpp	2021-06-08 01:29:31 UTC (rev 278587)
@@ -2384,7 +2384,6 @@
         patchpoint->numGPScratchRegisters = 1;
         patchpoint->clobber(RegisterSet::macroScratchRegisters());
         State* state = &m_ftlState;
-        CodeOrigin semanticNodeOrigin = node->origin.semantic;
         patchpoint->setGenerator(
             [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -2415,12 +2414,12 @@
 #endif
 
                         if (mathICGenerationState->shouldSlowPathRepatch) {
-                            SlowPathCall call = callOperation(*state, params.unavailableRegisters(), jit, semanticNodeOrigin, exceptions.get(),
-                                repatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(semanticNodeOrigin), params[1].gpr(), CCallHelpers::TrustedImmPtr(mathIC));
+                            SlowPathCall call = callOperation(*state, params.unavailableRegisters(), jit, node->origin.semantic, exceptions.get(),
+                                repatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(node->origin.semantic), params[1].gpr(), CCallHelpers::TrustedImmPtr(mathIC));
                             mathICGenerationState->slowPathCall = call.call();
                         } else {
-                            SlowPathCall call = callOperation(*state, params.unavailableRegisters(), jit, semanticNodeOrigin,
-                                exceptions.get(), nonRepatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(semanticNodeOrigin), params[1].gpr());
+                            SlowPathCall call = callOperation(*state, params.unavailableRegisters(), jit, node->origin.semantic,
+                                exceptions.get(), nonRepatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(node->origin.semantic), params[1].gpr());
                             mathICGenerationState->slowPathCall = call.call();
                         }
                         jit.jump().linkTo(done, &jit);
@@ -2439,8 +2438,8 @@
                     });
                 } else {
                     callOperation(
-                        *state, params.unavailableRegisters(), jit, semanticNodeOrigin, exceptions.get(),
-                        nonRepatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(semanticNodeOrigin), params[1].gpr());
+                        *state, params.unavailableRegisters(), jit, node->origin.semantic, exceptions.get(),
+                        nonRepatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(node->origin.semantic), params[1].gpr());
                 }
 
 #if ENABLE(MATH_IC_STATS)
@@ -2492,7 +2491,6 @@
         patchpoint->numFPScratchRegisters = 2;
         patchpoint->clobber(RegisterSet::macroScratchRegisters());
         State* state = &m_ftlState;
-        CodeOrigin semanticNodeOrigin = node->origin.semantic;
         patchpoint->setGenerator(
             [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -2526,12 +2524,12 @@
 #endif
 
                         if (mathICGenerationState->shouldSlowPathRepatch) {
-                            SlowPathCall call = callOperation(*state, params.unavailableRegisters(), jit, semanticNodeOrigin, exceptions.get(),
-                                repatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(semanticNodeOrigin), params[1].gpr(), params[2].gpr(), CCallHelpers::TrustedImmPtr(mathIC));
+                            SlowPathCall call = callOperation(*state, params.unavailableRegisters(), jit, node->origin.semantic, exceptions.get(),
+                                repatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(node->origin.semantic), params[1].gpr(), params[2].gpr(), CCallHelpers::TrustedImmPtr(mathIC));
                             mathICGenerationState->slowPathCall = call.call();
                         } else {
-                            SlowPathCall call = callOperation(*state, params.unavailableRegisters(), jit, semanticNodeOrigin,
-                                exceptions.get(), nonRepatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(semanticNodeOrigin), params[1].gpr(), params[2].gpr());
+                            SlowPathCall call = callOperation(*state, params.unavailableRegisters(), jit, node->origin.semantic,
+                                exceptions.get(), nonRepatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(node->origin.semantic), params[1].gpr(), params[2].gpr());
                             mathICGenerationState->slowPathCall = call.call();
                         }
                         jit.jump().linkTo(done, &jit);
@@ -2550,8 +2548,8 @@
                     });
                 } else {
                     callOperation(
-                        *state, params.unavailableRegisters(), jit, semanticNodeOrigin, exceptions.get(),
-                        nonRepatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(semanticNodeOrigin), params[1].gpr(), params[2].gpr());
+                        *state, params.unavailableRegisters(), jit, node->origin.semantic, exceptions.get(),
+                        nonRepatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(node->origin.semantic), params[1].gpr(), params[2].gpr());
                 }
 
 #if ENABLE(MATH_IC_STATS)
@@ -3995,11 +3993,10 @@
 
         State* state = &m_ftlState;
         bool baseIsCell = abstractValue(node->child1()).isType(SpecCell);
-        CodeOrigin nodeSemanticOrigin = node->origin.semantic;
         patchpoint->setGenerator([=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
 
-                CallSiteIndex callSiteIndex = state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(nodeSemanticOrigin);
+                CallSiteIndex callSiteIndex = state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(node->origin.semantic);
 
                 // This is the direct exit target for operation calls.
                 Box<CCallHelpers::JumpList> exceptions = exceptionHandle->scheduleExitCreation(params)->jumps(jit);
@@ -4014,7 +4011,7 @@
                 GPRReg propertyGPR = params[2].gpr();
 
                 auto generator = Box<JITGetByValGenerator>::create(
-                    jit.codeBlock(), nodeSemanticOrigin, callSiteIndex, AccessType::GetPrivateName,
+                    jit.codeBlock(), node->origin.semantic, callSiteIndex, AccessType::GetPrivateName,
                     params.unavailableRegisters(), JSValueRegs(baseGPR), JSValueRegs(propertyGPR), JSValueRegs(resultGPR));
 
                 CCallHelpers::Jump notCell;
@@ -4032,9 +4029,9 @@
                     generator->slowPathJump().link(&jit);
                     CCallHelpers::Label slowPathBegin = jit.label();
                     CCallHelpers::Call slowPathCall = callOperation(
-                        *state, params.unavailableRegisters(), jit, nodeSemanticOrigin,
+                        *state, params.unavailableRegisters(), jit, node->origin.semantic,
                         exceptions.get(), operationGetPrivateNameOptimize, resultGPR,
-                        jit.codeBlock()->globalObjectFor(nodeSemanticOrigin),
+                        jit.codeBlock()->globalObjectFor(node->origin.semantic),
                         CCallHelpers::TrustedImmPtr(generator->stubInfo()), baseGPR, propertyGPR).call();
                     jit.jump().linkTo(done, &jit);
 
@@ -4128,11 +4125,10 @@
 
         State* state = &m_ftlState;
         bool baseIsCell = abstractValue(m_node->child1()).isType(SpecCell);
-        CodeOrigin nodeSemanticOrigin = node->origin.semantic;
         patchpoint->setGenerator([=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
 
-            CallSiteIndex callSiteIndex = state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(nodeSemanticOrigin);
+            CallSiteIndex callSiteIndex = state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(node->origin.semantic);
 
             // This is the direct exit target for operation calls.
             Box<CCallHelpers::JumpList> exceptions = exceptionHandle->scheduleExitCreation(params)->jumps(jit);
@@ -4146,7 +4142,7 @@
             GPRReg brandGPR = params[1].gpr();
 
             auto generator = Box<JITPrivateBrandAccessGenerator>::create(
-                jit.codeBlock(), nodeSemanticOrigin, callSiteIndex, accessType,
+                jit.codeBlock(), node->origin.semantic, callSiteIndex, accessType,
                 params.unavailableRegisters(), JSValueRegs(baseGPR), JSValueRegs(brandGPR));
 
             CCallHelpers::Jump notCell;
@@ -4176,9 +4172,9 @@
                 generator->slowPathJump().link(&jit);
                 CCallHelpers::Label slowPathBegin = jit.label();
                 CCallHelpers::Call slowPathCall = callOperation(
-                    *state, params.unavailableRegisters(), jit, nodeSemanticOrigin,
+                    *state, params.unavailableRegisters(), jit, node->origin.semantic,
                     exceptions.get(), appropriatePrivateAccessFunction(accessType), InvalidGPRReg,
-                    jit.codeBlock()->globalObjectFor(nodeSemanticOrigin),
+                    jit.codeBlock()->globalObjectFor(node->origin.semantic),
                     CCallHelpers::TrustedImmPtr(generator->stubInfo()), baseGPR, brandGPR).call();
                 jit.jump().linkTo(done, &jit);
 
@@ -4538,14 +4534,13 @@
             preparePatchpointForExceptions(patchpoint);
 
         State* state = &m_ftlState;
-
-        CodeOrigin nodeSemanticOrigin = node->origin.semantic;
+        
         patchpoint->setGenerator(
             [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
 
                 CallSiteIndex callSiteIndex =
-                    state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(nodeSemanticOrigin);
+                    state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(node->origin.semantic);
 
                 Box<CCallHelpers::JumpList> exceptions =
                     exceptionHandle->scheduleExitCreation(params)->jumps(jit);
@@ -4554,7 +4549,7 @@
                 exceptionHandle->scheduleExitCreationForUnwind(params, callSiteIndex);
 
                 auto generator = Box<JITPutByIdGenerator>::create(
-                    jit.codeBlock(), nodeSemanticOrigin, callSiteIndex,
+                    jit.codeBlock(), node->origin.semantic, callSiteIndex,
                     params.unavailableRegisters(), identifier, JSValueRegs(params[0].gpr()),
                     JSValueRegs(params[1].gpr()), GPRInfo::patchpointScratchRegister, ecmaMode,
                     putKind);
@@ -4569,9 +4564,9 @@
                         generator->slowPathJump().link(&jit);
                         CCallHelpers::Label slowPathBegin = jit.label();
                         CCallHelpers::Call slowPathCall = callOperation(
-                            *state, params.unavailableRegisters(), jit, nodeSemanticOrigin,
+                            *state, params.unavailableRegisters(), jit, node->origin.semantic,
                             exceptions.get(), generator->slowPathFunction(), InvalidGPRReg,
-                            jit.codeBlock()->globalObjectFor(nodeSemanticOrigin),
+                            jit.codeBlock()->globalObjectFor(node->origin.semantic),
                             CCallHelpers::TrustedImmPtr(generator->stubInfo()), params[1].gpr(),
                             params[0].gpr(), identifier.rawBits()).call();
                         jit.jump().linkTo(done, &jit);
@@ -5258,11 +5253,10 @@
             RefPtr<PatchpointExceptionHandle> exceptionHandle = preparePatchpointForExceptions(patchpoint);
 
             State* state = &m_ftlState;
-            CodeOrigin nodeSemanticOrigin = node->origin.semantic;
             patchpoint->setGenerator([=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
 
-                CallSiteIndex callSiteIndex = state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(nodeSemanticOrigin);
+                CallSiteIndex callSiteIndex = state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(node->origin.semantic);
 
                 // This is the direct exit target for operation calls.
                 Box<CCallHelpers::JumpList> exceptions = exceptionHandle->scheduleExitCreation(params)->jumps(jit);
@@ -5277,7 +5271,7 @@
                 GPRReg propertyGPR = params[2].gpr();
 
                 auto generator = Box<JITGetByValGenerator>::create(
-                    jit.codeBlock(), nodeSemanticOrigin, callSiteIndex, AccessType::GetByVal,
+                    jit.codeBlock(), node->origin.semantic, callSiteIndex, AccessType::GetByVal,
                     params.unavailableRegisters(), JSValueRegs(baseGPR), JSValueRegs(propertyGPR), JSValueRegs(resultGPR));
 
                 generator->stubInfo()->propertyIsString = propertyIsString;
@@ -5299,9 +5293,9 @@
                     generator->slowPathJump().link(&jit);
                     CCallHelpers::Label slowPathBegin = jit.label();
                     CCallHelpers::Call slowPathCall = callOperation(
-                        *state, params.unavailableRegisters(), jit, nodeSemanticOrigin,
+                        *state, params.unavailableRegisters(), jit, node->origin.semantic,
                         exceptions.get(), operationGetByValOptimize, resultGPR,
-                        jit.codeBlock()->globalObjectFor(nodeSemanticOrigin),
+                        jit.codeBlock()->globalObjectFor(node->origin.semantic),
                         CCallHelpers::TrustedImmPtr(generator->stubInfo()), CCallHelpers::TrustedImmPtr(nullptr), baseGPR, propertyGPR).call();
                     jit.jump().linkTo(done, &jit);
 
@@ -5859,16 +5853,12 @@
 
         State* state = &m_ftlState;
         Node* node = m_node;
-        CodeOrigin nodeSemanticOrigin = node->origin.semantic;
-        auto child1UseKind = node->child1().useKind();
-        auto child2UseKind = node->child2().useKind();
-        auto ecmaMode = node->ecmaMode().value();
         patchpoint->setGenerator(
             [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
 
                 CallSiteIndex callSiteIndex =
-                    state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(nodeSemanticOrigin);
+                    state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(node->origin.semantic);
 
                 Box<CCallHelpers::JumpList> exceptions =
                     exceptionHandle->scheduleExitCreation(params)->jumps(jit);
@@ -5880,7 +5870,7 @@
                 ASSERT(base.gpr() != params.gpScratch(0));
                 ASSERT(returnGPR != params.gpScratch(0));
 
-                if (child1UseKind)
+                if (node->child1().useKind() == UntypedUse)
                     slowCases.append(jit.branchIfNotCell(base));
 
                 constexpr auto optimizationFunction = [&] () {
@@ -5895,7 +5885,7 @@
                         return CCallHelpers::TrustedImmPtr(subscriptValue.rawBits());
                     else {
                         ASSERT(params.gpScratch(0) != params[2].gpr());
-                        if (child2UseKind == UntypedUse)
+                        if (node->child2().useKind() == UntypedUse)
                             slowCases.append(jit.branchIfNotCell(JSValueRegs(params[2].gpr())));
                         return JSValueRegs(params[2].gpr());
                     }
@@ -5904,12 +5894,12 @@
                 const auto generator = [&] {
                     if constexpr (kind == DelByKind::Normal) {
                         return Box<JITDelByIdGenerator>::create(
-                            jit.codeBlock(), nodeSemanticOrigin, callSiteIndex,
+                            jit.codeBlock(), node->origin.semantic, callSiteIndex,
                             params.unavailableRegisters(), subscriptValue, base,
                             JSValueRegs(returnGPR), params.gpScratch(0));
                     } else {
                         return Box<JITDelByValGenerator>::create(
-                            jit.codeBlock(), nodeSemanticOrigin, callSiteIndex,
+                            jit.codeBlock(), node->origin.semantic, callSiteIndex,
                             params.unavailableRegisters(), base,
                             subscript, JSValueRegs(returnGPR), params.gpScratch(0));
                     }
@@ -5926,11 +5916,11 @@
                         slowCases.link(&jit);
                         CCallHelpers::Label slowPathBegin = jit.label();
                         CCallHelpers::Call slowPathCall = callOperation(
-                            *state, params.unavailableRegisters(), jit, nodeSemanticOrigin,
+                            *state, params.unavailableRegisters(), jit, node->origin.semantic,
                             exceptions.get(), optimizationFunction, returnGPR,
-                            jit.codeBlock()->globalObjectFor(nodeSemanticOrigin),
+                            jit.codeBlock()->globalObjectFor(node->origin.semantic),
                             CCallHelpers::TrustedImmPtr(generator->stubInfo()), base,
-                            subscript, CCallHelpers::TrustedImm32(ecmaMode)).call();
+                            subscript, CCallHelpers::TrustedImm32(node->ecmaMode().value())).call();
                         jit.jump().linkTo(done, &jit);
 
                         generator->reportSlowPathCall(slowPathBegin, slowPathCall);
@@ -9786,8 +9776,6 @@
         CodeOrigin codeOrigin = codeOriginDescriptionOfCallSite();
         State* state = &m_ftlState;
         VM* vm = &this->vm();
-        CodeOrigin nodeSemanticOrigin = node->origin.semantic;
-        auto nodeOp = node->op();
         patchpoint->setGenerator(
             [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -9799,9 +9787,9 @@
                     CCallHelpers::TrustedImm32(callSiteIndex.bits()),
                     CCallHelpers::tagFor(VirtualRegister(CallFrameSlot::argumentCountIncludingThis)));
 
-                CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(nodeSemanticOrigin);
+                CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(node->origin.semantic);
                 callLinkInfo->setUpCall(
-                    nodeOp == Construct ? CallLinkInfo::Construct : CallLinkInfo::Call, GPRInfo::regT0);
+                    node->op() == Construct ? CallLinkInfo::Construct : CallLinkInfo::Call, GPRInfo::regT0);
 
                 auto slowPath = callLinkInfo->emitFastPath(jit, GPRInfo::regT0, InvalidGPRReg, CallLinkInfo::UseDataIC::No);
                 CCallHelpers::Jump done = jit.jump();
@@ -9808,7 +9796,7 @@
 
                 slowPath.link(&jit);
                 auto slowPathStart = jit.label();
-                jit.move(CCallHelpers::TrustedImmPtr(jit.codeBlock()->globalObjectFor(nodeSemanticOrigin)), GPRInfo::regT3);
+                jit.move(CCallHelpers::TrustedImmPtr(jit.codeBlock()->globalObjectFor(node->origin.semantic)), GPRInfo::regT3);
                 callLinkInfo->emitSlowPath(*vm, jit);
 
                 done.link(&jit);
@@ -9898,7 +9886,6 @@
         }
         
         CodeOrigin codeOrigin = codeOriginDescriptionOfCallSite();
-        CodeOrigin semanticNodeOrigin = node->origin.semantic;
         State* state = &m_ftlState;
         patchpoint->setGenerator(
             [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
@@ -9932,7 +9919,7 @@
                     shuffleData.numPassedArgs = numPassedArgs;
                     shuffleData.setupCalleeSaveRegisters(jit.codeBlock());
                     
-                    CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(semanticNodeOrigin);
+                    CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(node->origin.semantic);
                     callLinkInfo->setUpCall(CallLinkInfo::DirectTailCall, InvalidGPRReg);
                     
                     CCallHelpers::Label mainPath = jit.label();
@@ -9949,7 +9936,7 @@
                     CCallHelpers::Label slowPath = jit.label();
                     callOperation(
                         *state, toSave, jit,
-                        semanticNodeOrigin, exceptions.get(), operationLinkDirectCall,
+                        node->origin.semantic, exceptions.get(), operationLinkDirectCall,
                         InvalidGPRReg, CCallHelpers::TrustedImmPtr(callLinkInfo), calleeGPR).call();
                     jit.jump().linkTo(mainPath, &jit);
                     callLinkInfo->setExecutableDuringCompilation(executable);
@@ -9964,7 +9951,7 @@
                     return;
                 }
                 
-                CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(semanticNodeOrigin);
+                CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(node->origin.semantic);
                 callLinkInfo->setUpCall(
                     isConstruct ? CallLinkInfo::DirectConstruct : CallLinkInfo::DirectCall, InvalidGPRReg);
 
@@ -9991,7 +9978,7 @@
                         
                         callOperation(
                             *state, params.unavailableRegisters(), jit,
-                            semanticNodeOrigin, exceptions.get(), operationLinkDirectCall,
+                            node->origin.semantic, exceptions.get(), operationLinkDirectCall,
                             InvalidGPRReg, CCallHelpers::TrustedImmPtr(callLinkInfo),
                             calleeGPR).call();
                         jit.jump().linkTo(mainPath, &jit);
@@ -10067,7 +10054,6 @@
         CodeOrigin codeOrigin = codeOriginDescriptionOfCallSite();
         State* state = &m_ftlState;
         VM* vm = &this->vm();
-        CodeOrigin semanticNodeOrigin = node->origin.semantic;
         patchpoint->setGenerator(
             [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -10105,7 +10091,7 @@
                 slowPathShuffler.setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
                 slowPathShuffler.prepareForSlowPath();
 
-                jit.move(CCallHelpers::TrustedImmPtr(jit.codeBlock()->globalObjectFor(semanticNodeOrigin)), GPRInfo::regT3);
+                jit.move(CCallHelpers::TrustedImmPtr(jit.codeBlock()->globalObjectFor(node->origin.semantic)), GPRInfo::regT3);
                 callLinkInfo->emitSlowPath(*vm, jit);
 
                 auto doneLocation = jit.label();
@@ -10119,60 +10105,7 @@
                     });
             });
     }
-
-    struct CapturedForPhantomNewArrayWithSpreadCase {
-        unsigned parameterOffset;
-    };
-    struct CapturedForPhantomNewArrayBufferCase {
-        int64_t value;
-        int32_t currentStoreOffset;
-    };
-    struct CapturedForPhantomNewArrayBufferEnd {
-        unsigned arrayLength;
-    };
-    struct CapturedForPhantomCreateRest {
-        InlineCallFrame* inlineCallFrame;
-        unsigned numberOfArgumentsToSkip;
-        unsigned parameterOffset;
-    };
-    struct VarargsSpreadArgumentToEmit {
-        enum Type {
-            PhantomNewArrayWithSpreadCase,
-            PhantomNewArrayBufferCase,
-            PhantomNewArrayBufferEnd,
-            PhantomCreateRest
-        } m_type;
-        union {
-            CapturedForPhantomNewArrayWithSpreadCase m_phantomNewArrayWithSpreadCase;
-            CapturedForPhantomNewArrayBufferCase m_phantomNewArrayBufferCase;
-            CapturedForPhantomNewArrayBufferEnd m_phantomNewArrayBufferEnd;
-            CapturedForPhantomCreateRest m_phantomCreateRest;
-        };
-
-        VarargsSpreadArgumentToEmit(VarargsSpreadArgumentToEmit::Type t, unsigned arg)
-            : m_type(t)
-        {
-            if (m_type == PhantomNewArrayWithSpreadCase)
-                m_phantomNewArrayWithSpreadCase = { arg };
-            else {
-                ASSERT(t == PhantomNewArrayBufferEnd);
-                m_phantomNewArrayBufferEnd = { arg };
-            }
-        }
-        VarargsSpreadArgumentToEmit(VarargsSpreadArgumentToEmit::Type t, int64_t value, int32_t currentStoreOffset)
-            : m_type(t)
-            , m_phantomNewArrayBufferCase({ value, currentStoreOffset })
-        {
-            ASSERT(t == PhantomNewArrayBufferCase);
-        }
-        VarargsSpreadArgumentToEmit(VarargsSpreadArgumentToEmit::Type t, InlineCallFrame* inlineCallFrame, unsigned numberOfArgumentsToSkip, unsigned parameterOffset)
-            : m_type(t)
-            , m_phantomCreateRest({ inlineCallFrame, numberOfArgumentsToSkip, parameterOffset })
-        {
-            ASSERT(t == PhantomCreateRest);
-        }
-    };
-
+    
     void compileCallOrConstructVarargsSpread()
     {
         Node* node = m_node;
@@ -10187,20 +10120,13 @@
         Vector<LValue, 2> spreadLengths;
         Vector<LValue, 8> patchpointArguments;
         HashMap<InlineCallFrame*, LValue, WTF::DefaultHash<InlineCallFrame*>, WTF::NullableHashTraits<InlineCallFrame*>> cachedSpreadLengths;
-        // Because the patchpoint generator runs late in Air, the dfg graph will be long gone.
-        // So we must load everything relevant right now, and make sure that they are captured by value by the lambda that acts as the generator
-        // One particularly tricky point is that the generator would like to walk over the tree rooted at this node, exploring through PhantomNewArrayWithSpread and PhantomNewArrayBuffer, emitting code along the way.
-        // Instead, we do that walk here, and record just enough information in the following vector to emit the right code at the end of Air.
-        Vector<VarargsSpreadArgumentToEmit> argumentsToEmitFromRightToLeft;
-        int storeOffset = CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register));
-        unsigned paramsOffset = 4;
-        unsigned index = 0;
         auto pushAndCountArgumentsFromRightToLeft = recursableLambda([&](auto self, Node* target) -> void {
-            switch (target->op()) {
-            case PhantomSpread:
+            if (target->op() == PhantomSpread) {
                 self(target->child1().node());
                 return;
-            case PhantomNewArrayWithSpread: {
+            }
+
+            if (target->op() == PhantomNewArrayWithSpread) {
                 BitVector* bitVector = target->bitVector();
                 for (unsigned i = target->numChildren(); i--; ) {
                     if (bitVector->get(i))
@@ -10209,45 +10135,27 @@
                         ++staticArgumentCount;
                         LValue argument = this->lowJSValue(m_graph.varArgChild(target, i));
                         patchpointArguments.append(argument);
-                        argumentsToEmitFromRightToLeft.append({ VarargsSpreadArgumentToEmit::Type::PhantomNewArrayWithSpreadCase, paramsOffset + (index++)});
                     }
                 }
                 return;
             }
-            case PhantomNewArrayBuffer: {
-                auto* array = target->castOperand<JSImmutableButterfly*>();
-                unsigned arrayLength = array->length();
-                staticArgumentCount += arrayLength;
-                Checked<int32_t> offsetCount { 1 };
-                for (unsigned i = arrayLength; i--; ++offsetCount) {
-                    Checked<int32_t> currentStoreOffset { storeOffset };
-                    currentStoreOffset -= (offsetCount * static_cast<int32_t>(sizeof(Register)));
-                    // Because varargs values are drained as JSValue, we should not generate value
-                    // in Double form even if PhantomNewArrayBuffer's indexingType is ArrayWithDouble.
-                    int64_t value = JSValue::encode(array->get(i));
-                    argumentsToEmitFromRightToLeft.append({ VarargsSpreadArgumentToEmit::Type::PhantomNewArrayBufferCase, value, currentStoreOffset.value() });
-                }
-                argumentsToEmitFromRightToLeft.append({ VarargsSpreadArgumentToEmit::Type::PhantomNewArrayBufferEnd, arrayLength });
+
+            if (target->op() == PhantomNewArrayBuffer) {
+                staticArgumentCount += target->castOperand<JSImmutableButterfly*>()->length();
                 return;
             }
-            case PhantomCreateRest: {
-                InlineCallFrame* inlineCallFrame = target->origin.semantic.inlineCallFrame();
-                unsigned numberOfArgumentsToSkip = target->numberOfArgumentsToSkip();
-                unsigned parameterOffset = paramsOffset + (index++);
-                LValue length = cachedSpreadLengths.ensure(inlineCallFrame, [&] () {
-                    return m_out.zeroExtPtr(this->getSpreadLengthFromInlineCallFrame(inlineCallFrame, numberOfArgumentsToSkip));
-                }).iterator->value;
-                patchpointArguments.append(length);
-                spreadLengths.append(length);
-                argumentsToEmitFromRightToLeft.append({ VarargsSpreadArgumentToEmit::Type::PhantomCreateRest, inlineCallFrame, numberOfArgumentsToSkip, parameterOffset });
-                return;
-            }
-            default:
-                RELEASE_ASSERT_NOT_REACHED();
-            }
+
+            RELEASE_ASSERT(target->op() == PhantomCreateRest);
+            InlineCallFrame* inlineCallFrame = target->origin.semantic.inlineCallFrame();
+            unsigned numberOfArgumentsToSkip = target->numberOfArgumentsToSkip();
+            LValue length = cachedSpreadLengths.ensure(inlineCallFrame, [&] () {
+                return m_out.zeroExtPtr(this->getSpreadLengthFromInlineCallFrame(inlineCallFrame, numberOfArgumentsToSkip));
+            }).iterator->value;
+            patchpointArguments.append(length);
+            spreadLengths.append(length);
         });
+
         pushAndCountArgumentsFromRightToLeft(arguments);
-
         LValue argumentCountIncludingThis = m_out.constIntPtr(staticArgumentCount + 1);
         for (LValue length : spreadLengths)
             argumentCountIncludingThis = m_out.add(length, argumentCountIncludingThis);
@@ -10275,14 +10183,12 @@
             WTF::roundUpToMultipleOf(stackAlignmentBytes(), 5 * sizeof(EncodedJSValue));
 
         m_proc.requestCallArgAreaSizeInBytes(minimumJSCallAreaSize);
-
+        
         CodeOrigin codeOrigin = codeOriginDescriptionOfCallSite();
         State* state = &m_ftlState;
         VM* vm = &this->vm();
-        CodeOrigin semanticNodeOrigin = node->origin.semantic;
-        auto nodeOp = node->op();
         patchpoint->setGenerator(
-            [=, argumentsToEmit = WTFMove(argumentsToEmitFromRightToLeft)] (CCallHelpers& jit, const StackmapGenerationParams& params) {
+            [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
                 CallSiteIndex callSiteIndex =
                     state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(codeOrigin);
@@ -10296,7 +10202,7 @@
                     CCallHelpers::TrustedImm32(callSiteIndex.bits()),
                     CCallHelpers::tagFor(VirtualRegister(CallFrameSlot::argumentCountIncludingThis)));
 
-                CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(semanticNodeOrigin);
+                CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(node->origin.semantic);
 
                 RegisterSet usedRegisters = RegisterSet::allRegisters();
                 usedRegisters.exclude(RegisterSet::volatileRegistersForJSCall());
@@ -10360,54 +10266,74 @@
 
                     jit.store32(scratchGPR2, CCallHelpers::Address(scratchGPR1, CallFrameSlot::argumentCountIncludingThis * static_cast<int>(sizeof(Register)) + PayloadOffset));
 
-                    for (const auto& argumentToEmit : argumentsToEmit) {
-                        switch (argumentToEmit.m_type) {
-                        case VarargsSpreadArgumentToEmit::PhantomNewArrayWithSpreadCase: {
-                            unsigned parameterOffset = argumentToEmit.m_phantomNewArrayWithSpreadCase.parameterOffset;
-                            jit.subPtr(CCallHelpers::TrustedImmPtr(static_cast<size_t>(1)), scratchGPR2);
-                            getValueFromRep(params[parameterOffset], scratchGPR3);
-                            jit.store64(scratchGPR3, CCallHelpers::BaseIndex(scratchGPR1, scratchGPR2, CCallHelpers::TimesEight, storeOffset));
-                            continue;
+                    int storeOffset = CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register));
+
+                    unsigned paramsOffset = 4;
+                    unsigned index = 0;
+                    auto emitArgumentsFromRightToLeft = recursableLambda([&](auto self, Node* target) -> void {
+                        if (target->op() == PhantomSpread) {
+                            self(target->child1().node());
+                            return;
                         }
-                        case VarargsSpreadArgumentToEmit::PhantomNewArrayBufferCase: {
-                            int64_t value = argumentToEmit.m_phantomNewArrayBufferCase.value;
-                            int32_t currentStoreOffset = argumentToEmit.m_phantomNewArrayBufferCase.currentStoreOffset;
-                            jit.move(CCallHelpers::TrustedImm64(value), scratchGPR3);
-                            jit.store64(scratchGPR3, CCallHelpers::BaseIndex(scratchGPR1, scratchGPR2, CCallHelpers::TimesEight, currentStoreOffset));
-                            continue;
+
+                        if (target->op() == PhantomNewArrayWithSpread) {
+                            BitVector* bitVector = target->bitVector();
+                            for (unsigned i = target->numChildren(); i--; ) {
+                                if (bitVector->get(i))
+                                    self(state->graph.varArgChild(target, i).node());
+                                else {
+                                    jit.subPtr(CCallHelpers::TrustedImmPtr(static_cast<size_t>(1)), scratchGPR2);
+                                    getValueFromRep(params[paramsOffset + (index++)], scratchGPR3);
+                                    jit.store64(scratchGPR3,
+                                        CCallHelpers::BaseIndex(scratchGPR1, scratchGPR2, CCallHelpers::TimesEight, storeOffset));
+                                }
+                            }
+                            return;
                         }
-                        case VarargsSpreadArgumentToEmit::PhantomNewArrayBufferEnd: {
-                            size_t arrayLength = static_cast<size_t>(argumentToEmit.m_phantomNewArrayBufferEnd.arrayLength);
-                            jit.subPtr(CCallHelpers::TrustedImmPtr(arrayLength), scratchGPR2);
-                            continue;
+
+                        if (target->op() == PhantomNewArrayBuffer) {
+                            auto* array = target->castOperand<JSImmutableButterfly*>();
+                            Checked<int32_t> offsetCount { 1 };
+                            for (unsigned i = array->length(); i--; ++offsetCount) {
+                                // Because varargs values are drained as JSValue, we should not generate value
+                                // in Double form even if PhantomNewArrayBuffer's indexingType is ArrayWithDouble.
+                                int64_t value = JSValue::encode(array->get(i));
+                                jit.move(CCallHelpers::TrustedImm64(value), scratchGPR3);
+                                Checked<int32_t> currentStoreOffset { storeOffset };
+                                currentStoreOffset -= (offsetCount * static_cast<int32_t>(sizeof(Register)));
+                                jit.store64(scratchGPR3,
+                                    CCallHelpers::BaseIndex(scratchGPR1, scratchGPR2, CCallHelpers::TimesEight, currentStoreOffset));
+                            }
+                            jit.subPtr(CCallHelpers::TrustedImmPtr(static_cast<size_t>(array->length())), scratchGPR2);
+                            return;
                         }
-                        case VarargsSpreadArgumentToEmit::PhantomCreateRest: {
-                            InlineCallFrame* inlineCallFrame = argumentToEmit.m_phantomCreateRest.inlineCallFrame;
-                            unsigned numberOfArgumentsToSkip = argumentToEmit.m_phantomCreateRest.numberOfArgumentsToSkip;
-                            unsigned parameterOffset = argumentToEmit.m_phantomCreateRest.parameterOffset;
 
-                            B3::ValueRep numArgumentsToCopy = params[parameterOffset];
-                            getValueFromRep(numArgumentsToCopy, scratchGPR3);
-                            int loadOffset = (AssemblyHelpers::argumentsStart(inlineCallFrame).offset() + numberOfArgumentsToSkip) * static_cast<int>(sizeof(Register));
+                        RELEASE_ASSERT(target->op() == PhantomCreateRest);
+                        InlineCallFrame* inlineCallFrame = target->origin.semantic.inlineCallFrame();
 
-                            auto done = jit.branchTestPtr(MacroAssembler::Zero, scratchGPR3);
-                            auto loopStart = jit.label();
-                            jit.subPtr(CCallHelpers::TrustedImmPtr(static_cast<size_t>(1)), scratchGPR3);
-                            jit.subPtr(CCallHelpers::TrustedImmPtr(static_cast<size_t>(1)), scratchGPR2);
-                            jit.load64(CCallHelpers::BaseIndex(GPRInfo::callFrameRegister, scratchGPR3, CCallHelpers::TimesEight, loadOffset), scratchGPR4);
-                            jit.store64(scratchGPR4,
-                                CCallHelpers::BaseIndex(scratchGPR1, scratchGPR2, CCallHelpers::TimesEight, storeOffset));
-                            jit.branchTestPtr(CCallHelpers::NonZero, scratchGPR3).linkTo(loopStart, &jit);
-                            done.link(&jit);
-                        }
-                        }
-                    }
+                        unsigned numberOfArgumentsToSkip = target->numberOfArgumentsToSkip();
+
+                        B3::ValueRep numArgumentsToCopy = params[paramsOffset + (index++)];
+                        getValueFromRep(numArgumentsToCopy, scratchGPR3);
+                        int loadOffset = (AssemblyHelpers::argumentsStart(inlineCallFrame).offset() + numberOfArgumentsToSkip) * static_cast<int>(sizeof(Register));
+
+                        auto done = jit.branchTestPtr(MacroAssembler::Zero, scratchGPR3);
+                        auto loopStart = jit.label();
+                        jit.subPtr(CCallHelpers::TrustedImmPtr(static_cast<size_t>(1)), scratchGPR3);
+                        jit.subPtr(CCallHelpers::TrustedImmPtr(static_cast<size_t>(1)), scratchGPR2);
+                        jit.load64(CCallHelpers::BaseIndex(GPRInfo::callFrameRegister, scratchGPR3, CCallHelpers::TimesEight, loadOffset), scratchGPR4);
+                        jit.store64(scratchGPR4,
+                            CCallHelpers::BaseIndex(scratchGPR1, scratchGPR2, CCallHelpers::TimesEight, storeOffset));
+                        jit.branchTestPtr(CCallHelpers::NonZero, scratchGPR3).linkTo(loopStart, &jit);
+                        done.link(&jit);
+                    });
+                    emitArgumentsFromRightToLeft(arguments);
                 }
 
                 {
                     CCallHelpers::Jump dontThrow = jit.jump();
                     slowCase.link(&jit);
-                    jit.setupArguments<decltype(operationThrowStackOverflowForVarargs)>(jit.codeBlock()->globalObjectFor(semanticNodeOrigin));
+                    jit.setupArguments<decltype(operationThrowStackOverflowForVarargs)>(jit.codeBlock()->globalObjectFor(node->origin.semantic));
                     jit.prepareCallOperation(jit.vm());
                     callWithExceptionCheck(operationThrowStackOverflowForVarargs);
                     jit.abortWithReason(DFGVarargsThrowingPathDidNotThrow);
@@ -10421,9 +10347,9 @@
                 jit.store64(scratchGPR3, CCallHelpers::calleeArgumentSlot(0));
                 
                 CallLinkInfo::CallType callType;
-                if (nodeOp == ConstructVarargs || nodeOp == ConstructForwardVarargs)
+                if (node->op() == ConstructVarargs || node->op() == ConstructForwardVarargs)
                     callType = CallLinkInfo::ConstructVarargs;
-                else if (nodeOp == TailCallVarargs || nodeOp == TailCallForwardVarargs)
+                else if (node->op() == TailCallVarargs || node->op() == TailCallForwardVarargs)
                     callType = CallLinkInfo::TailCallVarargs;
                 else
                     callType = CallLinkInfo::CallVarargs;
@@ -10451,7 +10377,7 @@
 
                 if (isTailCall)
                     jit.emitRestoreCalleeSaves();
-                jit.move(CCallHelpers::TrustedImmPtr(jit.codeBlock()->globalObjectFor(semanticNodeOrigin)), GPRInfo::regT3);
+                jit.move(CCallHelpers::TrustedImmPtr(jit.codeBlock()->globalObjectFor(node->origin.semantic)), GPRInfo::regT3);
                 callLinkInfo->emitSlowPath(*vm, jit);
                 
                 if (isTailCall)
@@ -10557,14 +10483,6 @@
         CodeOrigin codeOrigin = codeOriginDescriptionOfCallSite();
         State* state = &m_ftlState;
         VM* vm = &this->vm();
-        CodeOrigin semanticNodeOrigin = node->origin.semantic;
-        InlineCallFrame* inlineCallFrame;
-        if (node->child3())
-            inlineCallFrame = node->child3()->origin.semantic.inlineCallFrame();
-        else
-            inlineCallFrame = semanticNodeOrigin.inlineCallFrame();
-        CallVarargsData* data = ""
-        auto nodeOp = node->op();
         patchpoint->setGenerator(
             [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -10580,7 +10498,8 @@
                     CCallHelpers::TrustedImm32(callSiteIndex.bits()),
                     CCallHelpers::tagFor(VirtualRegister(CallFrameSlot::argumentCountIncludingThis)));
 
-                CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(semanticNodeOrigin);
+                CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(node->origin.semantic);
+                CallVarargsData* data = ""
 
                 unsigned argIndex = 1;
                 GPRReg calleeGPR = params[argIndex++].gpr();
@@ -10664,6 +10583,11 @@
                     jit.move(CCallHelpers::TrustedImm32(originalStackHeight / sizeof(EncodedJSValue)), scratchGPR2);
                     
                     CCallHelpers::JumpList slowCase;
+                    InlineCallFrame* inlineCallFrame;
+                    if (node->child3())
+                        inlineCallFrame = node->child3()->origin.semantic.inlineCallFrame();
+                    else
+                        inlineCallFrame = node->origin.semantic.inlineCallFrame();
 
                     // emitSetupVarargsFrameFastCase modifies the stack pointer if it succeeds.
                     emitSetupVarargsFrameFastCase(*vm, jit, scratchGPR2, scratchGPR1, scratchGPR2, scratchGPR3, inlineCallFrame, data->firstVarArgOffset, slowCase);
@@ -10670,7 +10594,7 @@
 
                     CCallHelpers::Jump done = jit.jump();
                     slowCase.link(&jit);
-                    jit.setupArguments<decltype(operationThrowStackOverflowForVarargs)>(jit.codeBlock()->globalObjectFor(semanticNodeOrigin));
+                    jit.setupArguments<decltype(operationThrowStackOverflowForVarargs)>(jit.codeBlock()->globalObjectFor(node->origin.semantic));
                     jit.prepareCallOperation(jit.vm());
                     callWithExceptionCheck(bitwise_cast<void(*)()>(operationThrowStackOverflowForVarargs));
                     jit.abortWithReason(DFGVarargsThrowingPathDidNotThrow);
@@ -10678,7 +10602,7 @@
                     done.link(&jit);
                 } else {
                     jit.move(CCallHelpers::TrustedImm32(originalStackHeight / sizeof(EncodedJSValue)), scratchGPR1);
-                    jit.setupArguments<decltype(operationSizeFrameForVarargs)>(jit.codeBlock()->globalObjectFor(semanticNodeOrigin), argumentsGPR, scratchGPR1, CCallHelpers::TrustedImm32(data->firstVarArgOffset));
+                    jit.setupArguments<decltype(operationSizeFrameForVarargs)>(jit.codeBlock()->globalObjectFor(node->origin.semantic), argumentsGPR, scratchGPR1, CCallHelpers::TrustedImm32(data->firstVarArgOffset));
                     jit.prepareCallOperation(jit.vm());
                     callWithExceptionCheck(bitwise_cast<void(*)()>(operationSizeFrameForVarargs));
 
@@ -10687,7 +10611,7 @@
                     argumentsLateRep.emitRestore(jit, argumentsGPR);
                     emitSetVarargsFrame(jit, scratchGPR1, false, scratchGPR2, scratchGPR2);
                     jit.addPtr(CCallHelpers::TrustedImm32(-minimumJSCallAreaSize), scratchGPR2, CCallHelpers::stackPointerRegister);
-                    jit.setupArguments<decltype(operationSetupVarargsFrame)>(jit.codeBlock()->globalObjectFor(semanticNodeOrigin), scratchGPR2, argumentsGPR, CCallHelpers::TrustedImm32(data->firstVarArgOffset), scratchGPR1);
+                    jit.setupArguments<decltype(operationSetupVarargsFrame)>(jit.codeBlock()->globalObjectFor(node->origin.semantic), scratchGPR2, argumentsGPR, CCallHelpers::TrustedImm32(data->firstVarArgOffset), scratchGPR1);
                     jit.prepareCallOperation(jit.vm());
                     callWithExceptionCheck(bitwise_cast<void(*)()>(operationSetupVarargsFrame));
                     
@@ -10704,9 +10628,9 @@
                 jit.store64(thisGPR, CCallHelpers::calleeArgumentSlot(0));
                 
                 CallLinkInfo::CallType callType;
-                if (nodeOp == ConstructVarargs || nodeOp == ConstructForwardVarargs)
+                if (node->op() == ConstructVarargs || node->op() == ConstructForwardVarargs)
                     callType = CallLinkInfo::ConstructVarargs;
-                else if (nodeOp == TailCallVarargs || nodeOp == TailCallForwardVarargs)
+                else if (node->op() == TailCallVarargs || node->op() == TailCallForwardVarargs)
                     callType = CallLinkInfo::TailCallVarargs;
                 else
                     callType = CallLinkInfo::CallVarargs;
@@ -10732,7 +10656,7 @@
 
                 if (isTailCall)
                     jit.emitRestoreCalleeSaves();
-                jit.move(CCallHelpers::TrustedImmPtr(jit.codeBlock()->globalObjectFor(semanticNodeOrigin)), GPRInfo::regT3);
+                jit.move(CCallHelpers::TrustedImmPtr(jit.codeBlock()->globalObjectFor(node->origin.semantic)), GPRInfo::regT3);
                 callLinkInfo->emitSlowPath(*vm, jit);
                 
                 if (isTailCall)
@@ -10806,8 +10730,6 @@
         CodeOrigin codeOrigin = codeOriginDescriptionOfCallSite();
         State* state = &m_ftlState;
         VM& vm = this->vm();
-        CodeOrigin semanticNodeOrigin = node->origin.semantic;
-        auto ecmaMode = node->ecmaMode().value();
         JSGlobalObject* globalObject = m_graph.globalObjectFor(m_origin.semantic);
         patchpoint->setGenerator(
             [=, &vm] (CCallHelpers& jit, const StackmapGenerationParams& params) {
@@ -10822,7 +10744,7 @@
                     CCallHelpers::TrustedImm32(callSiteIndex.bits()),
                     CCallHelpers::tagFor(VirtualRegister(CallFrameSlot::argumentCountIncludingThis)));
                 
-                CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(semanticNodeOrigin);
+                CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(node->origin.semantic);
                 callLinkInfo->setUpCall(CallLinkInfo::Call, GPRInfo::regT0);
                 
                 jit.addPtr(CCallHelpers::TrustedImm32(-static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC))), CCallHelpers::stackPointerRegister, GPRInfo::regT1);
@@ -10834,7 +10756,7 @@
                 unsigned requiredBytes = sizeof(CallerFrameAndPC) + sizeof(CallFrame*) * 2;
                 requiredBytes = WTF::roundUpToMultipleOf(stackAlignmentBytes(), requiredBytes);
                 jit.subPtr(CCallHelpers::TrustedImm32(requiredBytes), CCallHelpers::stackPointerRegister);
-                jit.move(CCallHelpers::TrustedImm32(ecmaMode), GPRInfo::regT2);
+                jit.move(CCallHelpers::TrustedImm32(node->ecmaMode().value()), GPRInfo::regT2);
                 jit.setupArguments<decltype(operationCallEval)>(globalObject, GPRInfo::regT1, GPRInfo::regT2);
                 jit.prepareCallOperation(vm);
                 jit.move(CCallHelpers::TrustedImmPtr(tagCFunction<OperationPtrTag>(operationCallEval)), GPRInfo::nonPreservedNonArgumentGPR0);
@@ -12383,12 +12305,12 @@
         RefPtr<PatchpointExceptionHandle> exceptionHandle = preparePatchpointForExceptions(patchpoint);
 
         State* state = &m_ftlState;
-        CodeOrigin semanticNodeOrigin = m_node->origin.semantic;
+        Node* node = m_node;
         patchpoint->setGenerator(
             [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
 
-                CallSiteIndex callSiteIndex = state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(semanticNodeOrigin);
+                CallSiteIndex callSiteIndex = state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(node->origin.semantic);
 
                 // This is the direct exit target for operation calls.
                 Box<CCallHelpers::JumpList> exceptions = exceptionHandle->scheduleExitCreation(params)->jumps(jit);
@@ -12406,12 +12328,12 @@
                 const auto generator = [&] {
                     if constexpr (kind == InByKind::Normal) {
                         return Box<JITInByIdGenerator>::create(
-                            jit.codeBlock(), semanticNodeOrigin, callSiteIndex,
+                            jit.codeBlock(), node->origin.semantic, callSiteIndex,
                             params.unavailableRegisters(), subscriptValue, base,
                             JSValueRegs(returnGPR));
                     } else {
                         return Box<JITInByValGenerator>::create(
-                            jit.codeBlock(), semanticNodeOrigin, callSiteIndex,
+                            jit.codeBlock(), node->origin.semantic, callSiteIndex,
                             params.unavailableRegisters(), base, subscript,
                             JSValueRegs(returnGPR));
                     }
@@ -12429,16 +12351,16 @@
                         CCallHelpers::Call slowPathCall;
                         if constexpr (kind == InByKind::Normal) {
                             slowPathCall = callOperation(
-                                *state, params.unavailableRegisters(), jit, semanticNodeOrigin,
+                                *state, params.unavailableRegisters(), jit, node->origin.semantic,
                                 exceptions.get(), operationInByIdOptimize, returnGPR,
-                                jit.codeBlock()->globalObjectFor(semanticNodeOrigin),
+                                jit.codeBlock()->globalObjectFor(node->origin.semantic),
                                 CCallHelpers::TrustedImmPtr(generator->stubInfo()),
                                 base, subscript).call();
                         } else {
                             slowPathCall = callOperation(
-                                *state, params.unavailableRegisters(), jit, semanticNodeOrigin,
+                                *state, params.unavailableRegisters(), jit, node->origin.semantic,
                                 exceptions.get(), operationInByValOptimize, returnGPR,
-                                jit.codeBlock()->globalObjectFor(semanticNodeOrigin),
+                                jit.codeBlock()->globalObjectFor(node->origin.semantic),
                                 CCallHelpers::TrustedImmPtr(generator->stubInfo()),
                                 CCallHelpers::TrustedImmPtr(nullptr), base, subscript).call();
                         }
@@ -12665,7 +12587,6 @@
         RefPtr<PatchpointExceptionHandle> exceptionHandle =
             preparePatchpointForExceptions(patchpoint);
 
-        CodeOrigin semanticNodeOrigin = node->origin.semantic;
         patchpoint->setGenerator(
             [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -12689,7 +12610,7 @@
                     slowCases.append(jit.branchIfNotCell(prototypeGPR));
                 
                 CallSiteIndex callSiteIndex =
-                    state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(semanticNodeOrigin);
+                    state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(node->origin.semantic);
                 
                 // This is the direct exit target for operation calls.
                 Box<CCallHelpers::JumpList> exceptions =
@@ -12696,7 +12617,7 @@
                     exceptionHandle->scheduleExitCreation(params)->jumps(jit);
                 
                 auto generator = Box<JITInstanceOfGenerator>::create(
-                    jit.codeBlock(), semanticNodeOrigin, callSiteIndex,
+                    jit.codeBlock(), node->origin.semantic, callSiteIndex,
                     params.unavailableRegisters(), resultGPR, valueGPR, prototypeGPR, scratchGPR,
                     scratch2GPR, prototypeIsObject);
                 generator->generateFastPath(jit);
@@ -12711,9 +12632,9 @@
                         slowCases.link(&jit);
                         CCallHelpers::Label slowPathBegin = jit.label();
                         CCallHelpers::Call slowPathCall = callOperation(
-                            *state, params.unavailableRegisters(), jit, semanticNodeOrigin,
+                            *state, params.unavailableRegisters(), jit, node->origin.semantic,
                             exceptions.get(), optimizationFunction, resultGPR,
-                            jit.codeBlock()->globalObjectFor(semanticNodeOrigin),
+                            jit.codeBlock()->globalObjectFor(node->origin.semantic),
                             CCallHelpers::TrustedImmPtr(generator->stubInfo()), valueGPR,
                             prototypeGPR).call();
                         jit.jump().linkTo(done, &jit);
@@ -14171,13 +14092,12 @@
             preparePatchpointForExceptions(patchpoint);
 
         State* state = &m_ftlState;
-        CodeOrigin semanticNodeOrigin = node->origin.semantic;
         patchpoint->setGenerator(
             [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
 
                 CallSiteIndex callSiteIndex =
-                    state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(semanticNodeOrigin);
+                    state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(node->origin.semantic);
 
                 // This is the direct exit target for operation calls.
                 Box<CCallHelpers::JumpList> exceptions =
@@ -14189,7 +14109,7 @@
                 exceptionHandle->scheduleExitCreationForUnwind(params, callSiteIndex);
 
                 auto generator = Box<JITGetByIdGenerator>::create(
-                    jit.codeBlock(), semanticNodeOrigin, callSiteIndex,
+                    jit.codeBlock(), node->origin.semantic, callSiteIndex,
                     params.unavailableRegisters(), identifier, JSValueRegs(params[1].gpr()),
                     JSValueRegs(params[0].gpr()), type);
 
@@ -14205,9 +14125,9 @@
                         generator->slowPathJump().link(&jit);
                         CCallHelpers::Label slowPathBegin = jit.label();
                         CCallHelpers::Call slowPathCall = callOperation(
-                            *state, params.unavailableRegisters(), jit, semanticNodeOrigin,
+                            *state, params.unavailableRegisters(), jit, node->origin.semantic,
                             exceptions.get(), optimizationFunction, params[0].gpr(),
-                            jit.codeBlock()->globalObjectFor(semanticNodeOrigin),
+                            jit.codeBlock()->globalObjectFor(node->origin.semantic),
                             CCallHelpers::TrustedImmPtr(generator->stubInfo()), params[1].gpr(),
                             CCallHelpers::TrustedImmPtr(identifier.rawBits())).call();
                         jit.jump().linkTo(done, &jit);
@@ -14241,13 +14161,12 @@
             preparePatchpointForExceptions(patchpoint);
 
         State* state = &m_ftlState;
-        CodeOrigin semanticNodeOrigin = node->origin.semantic;
         patchpoint->setGenerator(
             [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
 
                 CallSiteIndex callSiteIndex =
-                    state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(semanticNodeOrigin);
+                    state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(node->origin.semantic);
 
                 // This is the direct exit target for operation calls.
                 Box<CCallHelpers::JumpList> exceptions =
@@ -14259,7 +14178,7 @@
                 exceptionHandle->scheduleExitCreationForUnwind(params, callSiteIndex);
 
                 auto generator = Box<JITGetByIdWithThisGenerator>::create(
-                    jit.codeBlock(), semanticNodeOrigin, callSiteIndex,
+                    jit.codeBlock(), node->origin.semantic, callSiteIndex,
                     params.unavailableRegisters(), identifier, JSValueRegs(params[0].gpr()),
                     JSValueRegs(params[1].gpr()), JSValueRegs(params[2].gpr()));
 
@@ -14275,9 +14194,9 @@
                         generator->slowPathJump().link(&jit);
                         CCallHelpers::Label slowPathBegin = jit.label();
                         CCallHelpers::Call slowPathCall = callOperation(
-                            *state, params.unavailableRegisters(), jit, semanticNodeOrigin,
+                            *state, params.unavailableRegisters(), jit, node->origin.semantic,
                             exceptions.get(), optimizationFunction, params[0].gpr(),
-                            jit.codeBlock()->globalObjectFor(semanticNodeOrigin),
+                            jit.codeBlock()->globalObjectFor(node->origin.semantic),
                             CCallHelpers::TrustedImmPtr(generator->stubInfo()), params[1].gpr(),
                             params[2].gpr(), CCallHelpers::TrustedImmPtr(identifier.rawBits())).call();
                         jit.jump().linkTo(done, &jit);
@@ -14672,7 +14591,6 @@
         State* state = &m_ftlState;
         Node* node = m_node;
         NodeType op = m_node->op();
-        CodeOrigin semanticNodeOrigin = node->origin.semantic;
         JSValue child1Constant = m_state.forNode(m_node->child1()).value();
 
         auto nodeIndex = m_nodeIndexInGraph;
@@ -14694,7 +14612,7 @@
 
                 RefPtr<OSRExitHandle> handle = exitDescriptor->emitOSRExitLater(*state, BadType, origin, params, nodeIndex, osrExitArgumentOffset);
 
-                SnippetParams domJITParams(*state, params, semanticNodeOrigin, nullptr, WTFMove(regs), WTFMove(gpScratch), WTFMove(fpScratch));
+                SnippetParams domJITParams(*state, params, node, nullptr, WTFMove(regs), WTFMove(gpScratch), WTFMove(fpScratch));
                 CCallHelpers::JumpList failureCases = domJIT->generator()->run(jit, domJITParams);
                 CCallHelpers::JumpList notJSCastFailureCases;
                 if (op == CheckNotJSCast) {
@@ -14814,7 +14732,6 @@
 
         State* state = &m_ftlState;
         Node* node = m_node;
-        CodeOrigin semanticNodeOrigin = node->origin.semantic;
         patchpoint->setGenerator(
             [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -14836,7 +14753,7 @@
 
                 Box<CCallHelpers::JumpList> exceptions = exceptionHandle->scheduleExitCreation(params)->jumps(jit);
 
-                SnippetParams domJITParams(*state, params, semanticNodeOrigin, exceptions, WTFMove(regs), WTFMove(gpScratch), WTFMove(fpScratch));
+                SnippetParams domJITParams(*state, params, node, exceptions, WTFMove(regs), WTFMove(gpScratch), WTFMove(fpScratch));
                 domJIT->generator()->run(jit, domJITParams);
             });
         patchpoint->effects = Effects::forCall();
@@ -15593,7 +15510,6 @@
         patchpoint->clobber(RegisterSet::macroScratchRegisters());
         patchpoint->resultConstraints = { ValueRep::SomeEarlyRegister };
         State* state = &m_ftlState;
-        CodeOrigin semanticNodeOrigin = node->origin.semantic;
         patchpoint->setGenerator(
             [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -15619,16 +15535,16 @@
                             
                             generator->slowPathJumpList().link(&jit);
                             callOperation(
-                                *state, params.unavailableRegisters(), jit, semanticNodeOrigin,
+                                *state, params.unavailableRegisters(), jit, node->origin.semantic,
                                 exceptions.get(), slowPathFunction, params[0].gpr(),
-                                jit.codeBlock()->globalObjectFor(semanticNodeOrigin),
+                                jit.codeBlock()->globalObjectFor(node->origin.semantic),
                                 params[1].gpr(), params[2].gpr());
                             jit.jump().linkTo(done, &jit);
                         });
                 } else {
                     callOperation(
-                        *state, params.unavailableRegisters(), jit, semanticNodeOrigin,
-                        exceptions.get(), slowPathFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(semanticNodeOrigin), params[1].gpr(),
+                        *state, params.unavailableRegisters(), jit, node->origin.semantic,
+                        exceptions.get(), slowPathFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(node->origin.semantic), params[1].gpr(),
                         params[2].gpr());
                 }
             });
@@ -15661,7 +15577,6 @@
         patchpoint->clobber(RegisterSet::macroScratchRegisters());
         patchpoint->resultConstraints = { ValueRep::SomeEarlyRegister };
         State* state = &m_ftlState;
-        CodeOrigin semanticNodeOrigin = node->origin.semantic;
         patchpoint->setGenerator(
             [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -15683,9 +15598,9 @@
                             
                         generator->slowPathJumpList().link(&jit);
                         callOperation(
-                            *state, params.unavailableRegisters(), jit, semanticNodeOrigin,
+                            *state, params.unavailableRegisters(), jit, node->origin.semantic,
                             exceptions.get(), slowPathFunction, params[0].gpr(),
-                            jit.codeBlock()->globalObjectFor(semanticNodeOrigin),
+                            jit.codeBlock()->globalObjectFor(node->origin.semantic),
                             params[1].gpr(), params[2].gpr());
                         jit.jump().linkTo(done, &jit);
                     });
@@ -15719,7 +15634,6 @@
         patchpoint->clobber(RegisterSet::macroScratchRegisters());
         patchpoint->resultConstraints = { ValueRep::SomeEarlyRegister };
         State* state = &m_ftlState;
-        CodeOrigin semanticNodeOrigin = node->origin.semantic;
         patchpoint->setGenerator(
             [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -15747,9 +15661,9 @@
                             ? operationValueBitRShift : operationValueBitURShift;
                         
                         callOperation(
-                            *state, params.unavailableRegisters(), jit, semanticNodeOrigin,
+                            *state, params.unavailableRegisters(), jit, node->origin.semantic,
                             exceptions.get(), slowPathFunction, params[0].gpr(),
-                            jit.codeBlock()->globalObjectFor(semanticNodeOrigin),
+                            jit.codeBlock()->globalObjectFor(node->origin.semantic),
                             params[1].gpr(), params[2].gpr());
                         jit.jump().linkTo(done, &jit);
                     });
@@ -20092,7 +20006,6 @@
         BlockIndex blockIndex = block->index;
         unsigned nodeIndex = node ? node->index() : UINT_MAX;
 #if !ASSERT_ENABLED
-        auto nodeOp = node ? node->op() : LastNodeType;
         m_out.patchpoint(Void)->setGenerator(
             [=] (CCallHelpers& jit, const StackmapGenerationParams&) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -20100,7 +20013,7 @@
                 jit.move(CCallHelpers::TrustedImm32(blockIndex), GPRInfo::regT0);
                 jit.move(CCallHelpers::TrustedImm32(nodeIndex), GPRInfo::regT1);
                 if (node)
-                    jit.move(CCallHelpers::TrustedImm32(nodeOp), GPRInfo::regT2);
+                    jit.move(CCallHelpers::TrustedImm32(node->op()), GPRInfo::regT2);
                 jit.abortWithReason(FTLCrash);
             });
 #else // ASSERT_ENABLED

Modified: trunk/Source/_javascript_Core/ftl/FTLSnippetParams.cpp (278586 => 278587)


--- trunk/Source/_javascript_Core/ftl/FTLSnippetParams.cpp	2021-06-08 01:28:30 UTC (rev 278586)
+++ trunk/Source/_javascript_Core/ftl/FTLSnippetParams.cpp	2021-06-08 01:29:31 UTC (rev 278587)
@@ -34,7 +34,7 @@
 namespace JSC { namespace FTL {
 
 template<typename OperationType, typename ResultType, typename Arguments, size_t... ArgumentsIndex>
-static void dispatch(CCallHelpers& jit, FTL::State* state, const B3::StackmapGenerationParams& params, CodeOrigin semanticNodeOrigin, Box<CCallHelpers::JumpList> exceptions, CCallHelpers::JumpList from, OperationType operation, ResultType result, Arguments arguments, std::index_sequence<ArgumentsIndex...>)
+static void dispatch(CCallHelpers& jit, FTL::State* state, const B3::StackmapGenerationParams& params, DFG::Node* node, Box<CCallHelpers::JumpList> exceptions, CCallHelpers::JumpList from, OperationType operation, ResultType result, Arguments arguments, std::index_sequence<ArgumentsIndex...>)
 {
     CCallHelpers::Label done = jit.label();
     params.addLatePath([=] (CCallHelpers& jit) {
@@ -42,7 +42,7 @@
 
         from.link(&jit);
         callOperation(
-            *state, params.unavailableRegisters(), jit, semanticNodeOrigin,
+            *state, params.unavailableRegisters(), jit, node->origin.semantic,
             exceptions.get(), operation, extractResult(result), std::get<ArgumentsIndex>(arguments)...);
         jit.jump().linkTo(done, &jit);
     });
@@ -51,7 +51,7 @@
 #define JSC_DEFINE_CALL_OPERATIONS(OperationType, ResultType, ...) \
     void SnippetParams::addSlowPathCallImpl(CCallHelpers::JumpList from, CCallHelpers& jit, OperationType operation, ResultType result, std::tuple<__VA_ARGS__> args) \
     { \
-        dispatch(jit, &m_state, m_params, m_semanticNodeOrigin, m_exceptions, from, operation, result, args, std::make_index_sequence<std::tuple_size<decltype(args)>::value>()); \
+        dispatch(jit, &m_state, m_params, m_node, m_exceptions, from, operation, result, args, std::make_index_sequence<std::tuple_size<decltype(args)>::value>()); \
     } \
 
 SNIPPET_SLOW_PATH_CALLS(JSC_DEFINE_CALL_OPERATIONS)

Modified: trunk/Source/_javascript_Core/ftl/FTLSnippetParams.h (278586 => 278587)


--- trunk/Source/_javascript_Core/ftl/FTLSnippetParams.h	2021-06-08 01:28:30 UTC (rev 278586)
+++ trunk/Source/_javascript_Core/ftl/FTLSnippetParams.h	2021-06-08 01:29:31 UTC (rev 278587)
@@ -38,11 +38,11 @@
 
 class SnippetParams final : public JSC::SnippetParams {
 public:
-    SnippetParams(State& state, const B3::StackmapGenerationParams& params, CodeOrigin semanticNodeOrigin, Box<CCallHelpers::JumpList> exceptions, Vector<Value>&& regs, Vector<GPRReg>&& gpScratch, Vector<FPRReg>&& fpScratch)
+    SnippetParams(State& state, const B3::StackmapGenerationParams& params, DFG::Node* node, Box<CCallHelpers::JumpList> exceptions, Vector<Value>&& regs, Vector<GPRReg>&& gpScratch, Vector<FPRReg>&& fpScratch)
         : JSC::SnippetParams(state.vm(), WTFMove(regs), WTFMove(gpScratch), WTFMove(fpScratch))
         , m_state(state)
         , m_params(params)
-        , m_semanticNodeOrigin(semanticNodeOrigin)
+        , m_node(node)
         , m_exceptions(exceptions)
     {
     }
@@ -54,7 +54,7 @@
 
     State& m_state;
     const B3::StackmapGenerationParams& m_params;
-    CodeOrigin m_semanticNodeOrigin;
+    DFG::Node* m_node;
     Box<CCallHelpers::JumpList> m_exceptions;
 };
 

Modified: trunk/Source/_javascript_Core/ftl/FTLState.cpp (278586 => 278587)


--- trunk/Source/_javascript_Core/ftl/FTLState.cpp	2021-06-08 01:28:30 UTC (rev 278586)
+++ trunk/Source/_javascript_Core/ftl/FTLState.cpp	2021-06-08 01:29:31 UTC (rev 278587)
@@ -63,9 +63,6 @@
 
     proc = makeUnique<Procedure>();
 
-    if (graph.m_vm.shouldBuilderPCToCodeOriginMapping())
-        proc->setNeedsPCToOriginMap();
-
     proc->setOriginPrinter(
         [] (PrintStream& out, B3::Origin origin) {
             out.print(bitwise_cast<Node*>(origin.data()));
_______________________________________________
webkit-changes mailing list
[email protected]
https://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to