Diff
Modified: trunk/Source/_javascript_Core/ChangeLog (261609 => 261610)
--- trunk/Source/_javascript_Core/ChangeLog 2020-05-13 10:47:34 UTC (rev 261609)
+++ trunk/Source/_javascript_Core/ChangeLog 2020-05-13 12:16:36 UTC (rev 261610)
@@ -1,3 +1,39 @@
+2020-05-13 Caio Lima <[email protected]>
+
+ [JSC] Support delete by val/id IC on 32-bits
+ https://bugs.webkit.org/show_bug.cgi?id=208207
+
+ Reviewed by Saam Barati.
+
+ This patch implements DeleteById and DeleteByVal IC on 32-bits JIT. It
+ includes both Baseline and DFG changes.
+
+ * dfg/DFGFixupPhase.cpp:
+ (JSC::DFG::FixupPhase::fixupNode):
+ * dfg/DFGSpeculativeJIT.cpp:
+ (JSC::DFG::SpeculativeJIT::compileDeleteById):
+ (JSC::DFG::SpeculativeJIT::compileDeleteByVal):
+ * dfg/DFGSpeculativeJIT32_64.cpp:
+ (JSC::DFG::SpeculativeJIT::compileDeleteById): Deleted.
+ (JSC::DFG::SpeculativeJIT::compileDeleteByVal): Deleted.
+ * dfg/DFGSpeculativeJIT64.cpp:
+ (JSC::DFG::SpeculativeJIT::compileDeleteById): Deleted.
+ (JSC::DFG::SpeculativeJIT::compileDeleteByVal): Deleted.
+ * ftl/FTLLowerDFGToB3.cpp:
+ (JSC::FTL::DFG::LowerDFGToB3::compileDelBy):
+ * jit/JITInlineCacheGenerator.cpp:
+ (JSC::JITDelByValGenerator::JITDelByValGenerator):
+ (JSC::JITDelByIdGenerator::JITDelByIdGenerator):
+ * jit/JITInlineCacheGenerator.h:
+ * jit/JITPropertyAccess.cpp:
+ (JSC::JIT::emit_op_del_by_id):
+ (JSC::JIT::emit_op_del_by_val):
+ * jit/JITPropertyAccess32_64.cpp:
+ (JSC::JIT::emit_op_del_by_id):
+ (JSC::JIT::emit_op_del_by_val):
+ (JSC::JIT::emitSlow_op_del_by_val):
+ (JSC::JIT::emitSlow_op_del_by_id):
+
2020-05-13 Saam Barati <[email protected]>
MovHint can see an arguments object be MovHinted to a Tmp
Modified: trunk/Source/_javascript_Core/dfg/DFGFixupPhase.cpp (261609 => 261610)
--- trunk/Source/_javascript_Core/dfg/DFGFixupPhase.cpp 2020-05-13 10:47:34 UTC (rev 261609)
+++ trunk/Source/_javascript_Core/dfg/DFGFixupPhase.cpp 2020-05-13 12:16:36 UTC (rev 261610)
@@ -1736,21 +1736,17 @@
}
case DeleteByVal: {
-#if USE(JSVALUE64)
if (node->child1()->shouldSpeculateCell()) {
fixEdge<CellUse>(node->child1());
if (node->child2()->shouldSpeculateCell())
fixEdge<CellUse>(node->child2());
}
-#endif
break;
}
case DeleteById: {
-#if USE(JSVALUE64)
if (node->child1()->shouldSpeculateCell())
fixEdge<CellUse>(node->child1());
-#endif
break;
}
Modified: trunk/Source/_javascript_Core/dfg/DFGSpeculativeJIT.cpp (261609 => 261610)
--- trunk/Source/_javascript_Core/dfg/DFGSpeculativeJIT.cpp 2020-05-13 10:47:34 UTC (rev 261609)
+++ trunk/Source/_javascript_Core/dfg/DFGSpeculativeJIT.cpp 2020-05-13 12:16:36 UTC (rev 261610)
@@ -1107,6 +1107,132 @@
}
}
+void SpeculativeJIT::compileDeleteById(Node* node)
+{
+ if (node->child1().useKind() == CellUse) {
+ SpeculateCellOperand base(this, node->child1());
+ JSValueRegsTemporary result(this);
+ GPRTemporary scratch(this);
+
+ JITCompiler::JumpList slowCases;
+
+ JSValueRegs resultRegs = result.regs();
+
+ GPRReg baseGPR = base.gpr();
+ GPRReg scratchGPR = scratch.gpr();
+ GPRReg resultGPR = resultRegs.payloadGPR();
+
+ CodeOrigin codeOrigin = node->origin.semantic;
+ CallSiteIndex callSite = m_jit.recordCallSiteAndGenerateExceptionHandlingOSRExitIfNeeded(codeOrigin, m_stream->size());
+ RegisterSet usedRegisters = this->usedRegisters();
+
+ JITDelByIdGenerator gen(
+ m_jit.codeBlock(), codeOrigin, callSite, usedRegisters, node->cacheableIdentifier(),
+ JSValueRegs::payloadOnly(baseGPR), resultRegs, scratchGPR);
+
+ gen.generateFastPath(m_jit);
+ slowCases.append(gen.slowPathJump());
+
+#if USE(JSVALUE64)
+ std::unique_ptr<SlowPathGenerator> slowPath = slowPathCall(
+ slowCases, this, operationDeleteByIdOptimize,
+ resultGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(codeOrigin)), gen.stubInfo(), JSValueRegs(baseGPR), node->cacheableIdentifier().rawBits(), TrustedImm32(node->ecmaMode().value()));
+#else
+ std::unique_ptr<SlowPathGenerator> slowPath = slowPathCall(
+ slowCases, this, operationDeleteByIdOptimize,
+ resultGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(codeOrigin)), gen.stubInfo(), CCallHelpers::CellValue(baseGPR), node->cacheableIdentifier().rawBits(), TrustedImm32(node->ecmaMode().value()));
+#endif
+
+ m_jit.addDelById(gen, slowPath.get());
+ addSlowPathGenerator(WTFMove(slowPath));
+
+ unblessedBooleanResult(resultGPR, node);
+ return;
+ }
+
+ // FIXME: We should use IC even if child1 is UntypedUse. In that case, we should emit write-barrier after the fast path of IC.
+ // https://bugs.webkit.org/show_bug.cgi?id=209397
+ ASSERT(node->child1().useKind() == UntypedUse);
+ JSValueOperand base(this, node->child1());
+
+ JSValueRegs baseRegs = base.jsValueRegs();
+
+ flushRegisters();
+ GPRFlushedCallResult result(this);
+ GPRReg resultGPR = result.gpr();
+ callOperation(operationDeleteByIdGeneric, resultGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), nullptr, baseRegs, node->cacheableIdentifier().rawBits(), TrustedImm32(node->ecmaMode().value()));
+ m_jit.exceptionCheck();
+
+ unblessedBooleanResult(resultGPR, node);
+}
+
+void SpeculativeJIT::compileDeleteByVal(Node* node)
+{
+ if (node->child1().useKind() == CellUse) {
+ SpeculateCellOperand base(this, node->child1());
+
+ JSValueOperand key(this, node->child2(), ManualOperandSpeculation);
+ speculate(node, node->child2());
+
+ JSValueRegsTemporary result(this, Reuse, key);
+ GPRTemporary scratch(this);
+
+ JITCompiler::JumpList slowCases;
+
+ GPRReg baseGPR = base.gpr();
+ JSValueRegs keyRegs = key.jsValueRegs();
+ JSValueRegs resultRegs = result.regs();
+ GPRReg scratchGPR = scratch.gpr();
+ GPRReg resultGPR = resultRegs.payloadGPR();
+
+ if (needsTypeCheck(node->child2(), SpecCell))
+ slowCases.append(m_jit.branchIfNotCell(keyRegs));
+
+ CodeOrigin codeOrigin = node->origin.semantic;
+ CallSiteIndex callSite = m_jit.recordCallSiteAndGenerateExceptionHandlingOSRExitIfNeeded(codeOrigin, m_stream->size());
+ RegisterSet usedRegisters = this->usedRegisters();
+
+ JITDelByValGenerator gen(
+ m_jit.codeBlock(), codeOrigin, callSite, usedRegisters,
+ JSValueRegs::payloadOnly(baseGPR), keyRegs, resultRegs, scratchGPR);
+
+ gen.generateFastPath(m_jit);
+ slowCases.append(gen.slowPathJump());
+
+#if USE(JSVALUE64)
+ std::unique_ptr<SlowPathGenerator> slowPath = slowPathCall(
+ slowCases, this, operationDeleteByValOptimize,
+ resultGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(codeOrigin)), gen.stubInfo(), JSValueRegs(baseGPR), keyRegs, TrustedImm32(node->ecmaMode().value()));
+#else
+ std::unique_ptr<SlowPathGenerator> slowPath = slowPathCall(
+ slowCases, this, operationDeleteByValOptimize,
+ resultGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(codeOrigin)), gen.stubInfo(), CCallHelpers::CellValue(baseGPR), keyRegs, TrustedImm32(node->ecmaMode().value()));
+#endif
+
+ m_jit.addDelByVal(gen, slowPath.get());
+ addSlowPathGenerator(WTFMove(slowPath));
+
+ unblessedBooleanResult(resultGPR, node);
+ return;
+ }
+
+ // FIXME: We should use IC even if child1 is UntypedUse. In that case, we should emit write-barrier after the fast path of IC.
+ // https://bugs.webkit.org/show_bug.cgi?id=209397
+ JSValueOperand base(this, node->child1());
+ JSValueOperand key(this, node->child2());
+
+ JSValueRegs baseRegs = base.jsValueRegs();
+ JSValueRegs keyRegs = key.jsValueRegs();
+
+ flushRegisters();
+ GPRFlushedCallResult result(this);
+ GPRReg resultGPR = result.gpr();
+ callOperation(operationDeleteByValGeneric, resultGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), nullptr, baseRegs, keyRegs, TrustedImm32(node->ecmaMode().value()));
+ m_jit.exceptionCheck();
+
+ unblessedBooleanResult(resultGPR, node);
+}
+
void SpeculativeJIT::compileInById(Node* node)
{
SpeculateCellOperand base(this, node->child1());
Modified: trunk/Source/_javascript_Core/dfg/DFGSpeculativeJIT32_64.cpp (261609 => 261610)
--- trunk/Source/_javascript_Core/dfg/DFGSpeculativeJIT32_64.cpp 2020-05-13 10:47:34 UTC (rev 261609)
+++ trunk/Source/_javascript_Core/dfg/DFGSpeculativeJIT32_64.cpp 2020-05-13 12:16:36 UTC (rev 261610)
@@ -4293,45 +4293,6 @@
{
}
-void SpeculativeJIT::compileDeleteById(Node* node)
-{
- // FIXME: We should support inline caching on 32 bits.
- // See <https://bugs.webkit.org/show_bug.cgi?id=208207>.
- JSValueOperand value(this, node->child1());
- GPRFlushedCallResult result(this);
-
- JSValueRegs valueRegs = value.jsValueRegs();
- GPRReg resultGPR = result.gpr();
-
- value.use();
-
- flushRegisters();
- callOperation(operationDeleteByIdGeneric, resultGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), nullptr, valueRegs, node->cacheableIdentifier().rawBits(), TrustedImm32(node->ecmaMode().value()));
- m_jit.exceptionCheck();
-
- unblessedBooleanResult(resultGPR, node, UseChildrenCalledExplicitly);
-}
-
-void SpeculativeJIT::compileDeleteByVal(Node* node)
-{
- JSValueOperand base(this, node->child1());
- JSValueOperand key(this, node->child2());
- GPRFlushedCallResult result(this);
-
- JSValueRegs baseRegs = base.jsValueRegs();
- JSValueRegs keyRegs = key.jsValueRegs();
- GPRReg resultGPR = result.gpr();
-
- base.use();
- key.use();
-
- flushRegisters();
- callOperation(operationDeleteByValGeneric, resultGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), nullptr, baseRegs, keyRegs, TrustedImm32(node->ecmaMode().value()));
- m_jit.exceptionCheck();
-
- unblessedBooleanResult(resultGPR, node, UseChildrenCalledExplicitly);
-}
-
void SpeculativeJIT::compileArithRandom(Node* node)
{
JSGlobalObject* globalObject = m_jit.graph().globalObjectFor(node->origin.semantic);
Modified: trunk/Source/_javascript_Core/dfg/DFGSpeculativeJIT64.cpp (261609 => 261610)
--- trunk/Source/_javascript_Core/dfg/DFGSpeculativeJIT64.cpp 2020-05-13 10:47:34 UTC (rev 261609)
+++ trunk/Source/_javascript_Core/dfg/DFGSpeculativeJIT64.cpp 2020-05-13 12:16:36 UTC (rev 261610)
@@ -5715,116 +5715,6 @@
strictInt32Result(scratch1GPR, m_currentNode);
}
-void SpeculativeJIT::compileDeleteById(Node* node)
-{
- if (node->child1().useKind() == CellUse) {
- SpeculateCellOperand base(this, node->child1());
- GPRTemporary result(this);
- GPRTemporary scratch(this);
-
- JITCompiler::JumpList slowCases;
-
- GPRReg baseGPR = base.gpr();
- GPRReg scratchGPR = scratch.gpr();
- GPRReg resultGPR = result.gpr();
-
- CodeOrigin codeOrigin = node->origin.semantic;
- CallSiteIndex callSite = m_jit.recordCallSiteAndGenerateExceptionHandlingOSRExitIfNeeded(codeOrigin, m_stream->size());
- RegisterSet usedRegisters = this->usedRegisters();
-
- JITDelByIdGenerator gen(
- m_jit.codeBlock(), codeOrigin, callSite, usedRegisters, node->cacheableIdentifier(),
- JSValueRegs(baseGPR), resultGPR, scratchGPR);
-
- gen.generateFastPath(m_jit);
- slowCases.append(gen.slowPathJump());
-
- std::unique_ptr<SlowPathGenerator> slowPath = slowPathCall(
- slowCases, this, operationDeleteByIdOptimize,
- resultGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(codeOrigin)), gen.stubInfo(), JSValueRegs(baseGPR), node->cacheableIdentifier().rawBits(), TrustedImm32(node->ecmaMode().value()));
-
- m_jit.addDelById(gen, slowPath.get());
- addSlowPathGenerator(WTFMove(slowPath));
-
- unblessedBooleanResult(resultGPR, node);
- return;
- }
-
- // FIXME: We should use IC even if child1 is UntypedUse. In that case, we should emit write-barrier after the fast path of IC.
- // https://bugs.webkit.org/show_bug.cgi?id=209397
- ASSERT(node->child1().useKind() == UntypedUse);
- JSValueOperand base(this, node->child1());
-
- JSValueRegs baseRegs = base.jsValueRegs();
-
- flushRegisters();
- GPRFlushedCallResult result(this);
- GPRReg resultGPR = result.gpr();
- callOperation(operationDeleteByIdGeneric, resultGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), nullptr, baseRegs, node->cacheableIdentifier().rawBits(), TrustedImm32(node->ecmaMode().value()));
- m_jit.exceptionCheck();
-
- unblessedBooleanResult(resultGPR, node);
-}
-
-void SpeculativeJIT::compileDeleteByVal(Node* node)
-{
- if (node->child1().useKind() == CellUse) {
- speculate(node, node->child2());
-
- SpeculateCellOperand base(this, node->child1());
- JSValueOperand key(this, node->child2(), ManualOperandSpeculation);
- GPRTemporary result(this);
- GPRTemporary scratch(this);
-
- JITCompiler::JumpList slowCases;
-
- GPRReg baseGPR = base.gpr();
- JSValueRegs keyRegs = key.jsValueRegs();
- GPRReg scratchGPR = scratch.gpr();
- GPRReg resultGPR = result.gpr();
-
- if (needsTypeCheck(node->child2(), SpecCell))
- slowCases.append(m_jit.branchIfNotCell(keyRegs));
-
- CodeOrigin codeOrigin = node->origin.semantic;
- CallSiteIndex callSite = m_jit.recordCallSiteAndGenerateExceptionHandlingOSRExitIfNeeded(codeOrigin, m_stream->size());
- RegisterSet usedRegisters = this->usedRegisters();
-
- JITDelByValGenerator gen(
- m_jit.codeBlock(), codeOrigin, callSite, usedRegisters,
- JSValueRegs(baseGPR), keyRegs, resultGPR, scratchGPR);
-
- gen.generateFastPath(m_jit);
- slowCases.append(gen.slowPathJump());
-
- std::unique_ptr<SlowPathGenerator> slowPath = slowPathCall(
- slowCases, this, operationDeleteByValOptimize,
- resultGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(codeOrigin)), gen.stubInfo(), JSValueRegs(baseGPR), keyRegs, TrustedImm32(node->ecmaMode().value()));
-
- m_jit.addDelByVal(gen, slowPath.get());
- addSlowPathGenerator(WTFMove(slowPath));
-
- unblessedBooleanResult(resultGPR, node);
- return;
- }
-
- // FIXME: We should use IC even if child1 is UntypedUse. In that case, we should emit write-barrier after the fast path of IC.
- // https://bugs.webkit.org/show_bug.cgi?id=209397
- JSValueOperand base(this, node->child1());
- JSValueOperand key(this, node->child2());
-
- JSValueRegs baseRegs = base.jsValueRegs();
- JSValueRegs keyRegs = key.jsValueRegs();
-
- flushRegisters();
- GPRFlushedCallResult result(this);
- GPRReg resultGPR = result.gpr();
- callOperation(operationDeleteByValGeneric, resultGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), nullptr, baseRegs, keyRegs, TrustedImm32(node->ecmaMode().value()));
- m_jit.exceptionCheck();
-
- unblessedBooleanResult(resultGPR, node);
-}
-
void SpeculativeJIT::compileDateGet(Node* node)
{
SpeculateCellOperand base(this, node->child1());
Modified: trunk/Source/_javascript_Core/ftl/FTLLowerDFGToB3.cpp (261609 => 261610)
--- trunk/Source/_javascript_Core/ftl/FTLLowerDFGToB3.cpp 2020-05-13 10:47:34 UTC (rev 261609)
+++ trunk/Source/_javascript_Core/ftl/FTLLowerDFGToB3.cpp 2020-05-13 12:16:36 UTC (rev 261610)
@@ -5444,12 +5444,12 @@
return Box<JITDelByIdGenerator>::create(
jit.codeBlock(), node->origin.semantic, callSiteIndex,
params.unavailableRegisters(), subscriptValue, base,
- returnGPR, params.gpScratch(0));
+ JSValueRegs(returnGPR), params.gpScratch(0));
} else {
return Box<JITDelByValGenerator>::create(
jit.codeBlock(), node->origin.semantic, callSiteIndex,
params.unavailableRegisters(), base,
- subscript, returnGPR, params.gpScratch(0));
+ subscript, JSValueRegs(returnGPR), params.gpScratch(0));
}
}();
Modified: trunk/Source/_javascript_Core/jit/JITInlineCacheGenerator.cpp (261609 => 261610)
--- trunk/Source/_javascript_Core/jit/JITInlineCacheGenerator.cpp 2020-05-13 10:47:34 UTC (rev 261609)
+++ trunk/Source/_javascript_Core/jit/JITInlineCacheGenerator.cpp 2020-05-13 12:16:36 UTC (rev 261610)
@@ -163,17 +163,18 @@
return operationPutByIdNonStrictOptimize;
}
-JITDelByValGenerator::JITDelByValGenerator(CodeBlock* codeBlock, CodeOrigin codeOrigin, CallSiteIndex callSiteIndex, const RegisterSet& usedRegisters, JSValueRegs base, JSValueRegs property, GPRReg result, GPRReg scratch)
+JITDelByValGenerator::JITDelByValGenerator(CodeBlock* codeBlock, CodeOrigin codeOrigin, CallSiteIndex callSiteIndex, const RegisterSet& usedRegisters, JSValueRegs base, JSValueRegs property, JSValueRegs result, GPRReg scratch)
: Base(codeBlock, codeOrigin, callSiteIndex, AccessType::DeleteByVal, usedRegisters)
{
m_stubInfo->hasConstantIdentifier = false;
- ASSERT(base.payloadGPR() != result);
+ ASSERT(base.payloadGPR() != result.payloadGPR());
m_stubInfo->baseGPR = base.payloadGPR();
m_stubInfo->regs.propertyGPR = property.payloadGPR();
- m_stubInfo->valueGPR = result;
+ m_stubInfo->valueGPR = result.payloadGPR();
#if USE(JSVALUE32_64)
+ ASSERT(base.tagGPR() != result.tagGPR());
m_stubInfo->baseTagGPR = base.tagGPR();
- m_stubInfo->valueTagGPR = InvalidGPRReg;
+ m_stubInfo->valueTagGPR = result.tagGPR();
m_stubInfo->v.propertyTagGPR = property.tagGPR();
#endif
m_stubInfo->usedRegisters.clear(scratch);
@@ -194,17 +195,18 @@
fastPath, slowPath, fastPath.locationOf<JITStubRoutinePtrTag>(m_start));
}
-JITDelByIdGenerator::JITDelByIdGenerator(CodeBlock* codeBlock, CodeOrigin codeOrigin, CallSiteIndex callSiteIndex, const RegisterSet& usedRegisters, CacheableIdentifier, JSValueRegs base, GPRReg result, GPRReg scratch)
+JITDelByIdGenerator::JITDelByIdGenerator(CodeBlock* codeBlock, CodeOrigin codeOrigin, CallSiteIndex callSiteIndex, const RegisterSet& usedRegisters, CacheableIdentifier, JSValueRegs base, JSValueRegs result, GPRReg scratch)
: Base(codeBlock, codeOrigin, callSiteIndex, AccessType::DeleteByID, usedRegisters)
{
m_stubInfo->hasConstantIdentifier = true;
- ASSERT(base.payloadGPR() != result);
+ ASSERT(base.payloadGPR() != result.payloadGPR());
m_stubInfo->baseGPR = base.payloadGPR();
m_stubInfo->regs.propertyGPR = InvalidGPRReg;
- m_stubInfo->valueGPR = result;
+ m_stubInfo->valueGPR = result.payloadGPR();
#if USE(JSVALUE32_64)
+ ASSERT(base.tagGPR() != result.tagGPR());
m_stubInfo->baseTagGPR = base.tagGPR();
- m_stubInfo->valueTagGPR = InvalidGPRReg;
+ m_stubInfo->valueTagGPR = result.tagGPR();
m_stubInfo->v.propertyTagGPR = InvalidGPRReg;
#endif
m_stubInfo->usedRegisters.clear(scratch);
Modified: trunk/Source/_javascript_Core/jit/JITInlineCacheGenerator.h (261609 => 261610)
--- trunk/Source/_javascript_Core/jit/JITInlineCacheGenerator.h 2020-05-13 10:47:34 UTC (rev 261609)
+++ trunk/Source/_javascript_Core/jit/JITInlineCacheGenerator.h 2020-05-13 12:16:36 UTC (rev 261610)
@@ -152,7 +152,7 @@
JITDelByValGenerator(
CodeBlock*, CodeOrigin, CallSiteIndex, const RegisterSet& usedRegisters,
- JSValueRegs base, JSValueRegs property, GPRReg result, GPRReg scratch);
+ JSValueRegs base, JSValueRegs property, JSValueRegs result, GPRReg scratch);
MacroAssembler::Jump slowPathJump() const
{
@@ -177,7 +177,7 @@
JITDelByIdGenerator(
CodeBlock*, CodeOrigin, CallSiteIndex, const RegisterSet& usedRegisters, CacheableIdentifier,
- JSValueRegs base, GPRReg result, GPRReg scratch);
+ JSValueRegs base, JSValueRegs result, GPRReg scratch);
MacroAssembler::Jump slowPathJump() const
{
Modified: trunk/Source/_javascript_Core/jit/JITPropertyAccess.cpp (261609 => 261610)
--- trunk/Source/_javascript_Core/jit/JITPropertyAccess.cpp 2020-05-13 10:47:34 UTC (rev 261609)
+++ trunk/Source/_javascript_Core/jit/JITPropertyAccess.cpp 2020-05-13 12:16:36 UTC (rev 261610)
@@ -400,7 +400,7 @@
JITDelByIdGenerator gen(
m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(m_bytecodeIndex), RegisterSet::stubUnavailableRegisters(),
CacheableIdentifier::createFromIdentifierOwnedByCodeBlock(m_codeBlock, *ident),
- JSValueRegs(regT1), regT0, regT2);
+ JSValueRegs(regT1), JSValueRegs(regT0), regT2);
gen.generateFastPath(*this);
addSlowCase(gen.slowPathJump());
m_delByIds.append(gen);
@@ -449,7 +449,7 @@
emitJumpSlowCaseIfNotJSCell(regT0, property);
JITDelByValGenerator gen(
m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(m_bytecodeIndex), RegisterSet::stubUnavailableRegisters(),
- JSValueRegs(regT1), JSValueRegs(regT0), regT0, regT2);
+ JSValueRegs(regT1), JSValueRegs(regT0), JSValueRegs(regT0), regT2);
gen.generateFastPath(*this);
addSlowCase(gen.slowPathJump());
m_delByVals.append(gen);
Modified: trunk/Source/_javascript_Core/jit/JITPropertyAccess32_64.cpp (261609 => 261610)
--- trunk/Source/_javascript_Core/jit/JITPropertyAccess32_64.cpp 2020-05-13 10:47:34 UTC (rev 261609)
+++ trunk/Source/_javascript_Core/jit/JITPropertyAccess32_64.cpp 2020-05-13 12:16:36 UTC (rev 261610)
@@ -123,10 +123,28 @@
VirtualRegister base = bytecode.m_base;
const Identifier* ident = &(m_codeBlock->identifier(bytecode.m_property));
- emitLoad(base, regT1, regT0);
- callOperation(operationDeleteByIdGeneric, m_codeBlock->globalObject(), nullptr, JSValueRegs(regT1, regT0), CacheableIdentifier::createFromIdentifierOwnedByCodeBlock(m_codeBlock, *ident).rawBits(), TrustedImm32(bytecode.m_ecmaMode.value()));
- boxBoolean(regT0, JSValueRegs(regT1, regT0));
- emitPutVirtualRegister(dst, JSValueRegs(regT1, regT0));
+ JSValueRegs baseRegs = JSValueRegs(regT3, regT2);
+ JSValueRegs resultRegs = JSValueRegs(regT1, regT0);
+
+ emitLoad(base, baseRegs.tagGPR(), baseRegs.payloadGPR());
+ emitJumpSlowCaseIfNotJSCell(base, baseRegs.tagGPR());
+ JITDelByIdGenerator gen(
+ m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(m_bytecodeIndex), RegisterSet::stubUnavailableRegisters(),
+ CacheableIdentifier::createFromIdentifierOwnedByCodeBlock(m_codeBlock, *ident),
+ baseRegs, resultRegs, regT4);
+ gen.generateFastPath(*this);
+ addSlowCase(gen.slowPathJump());
+ m_delByIds.append(gen);
+
+ boxBoolean(regT0, resultRegs);
+
+ emitPutVirtualRegister(dst, resultRegs);
+
+ // IC can write new Structure without write-barrier if a base is cell.
+ // We should emit write-barrier at the end of sequence since write-barrier clobbers registers.
+ // FIXME: Use UnconditionalWriteBarrier in Baseline effectively to reduce code size.
+ // https://bugs.webkit.org/show_bug.cgi?id=209395
+ emitWriteBarrier(base, ShouldFilterBase);
}
void JIT::emit_op_del_by_val(const Instruction* currentInstruction)
@@ -135,15 +153,85 @@
VirtualRegister dst = bytecode.m_dst;
VirtualRegister base = bytecode.m_base;
VirtualRegister property = bytecode.m_property;
- emitLoad2(base, regT1, regT0, property, regT3, regT2);
- callOperation(operationDeleteByValGeneric, m_codeBlock->globalObject(), nullptr, JSValueRegs(regT1, regT0), JSValueRegs(regT3, regT2), TrustedImm32(bytecode.m_ecmaMode.value()));
- boxBoolean(regT0, JSValueRegs(regT1, regT0));
- emitPutVirtualRegister(dst, JSValueRegs(regT1, regT0));
+
+ JSValueRegs baseRegs = JSValueRegs(regT3, regT2);
+ JSValueRegs propertyRegs = JSValueRegs(regT1, regT0);
+ JSValueRegs resultRegs = JSValueRegs(regT1, regT0);
+
+ emitLoad2(base, baseRegs.tagGPR(), baseRegs.payloadGPR(), property, propertyRegs.tagGPR(), propertyRegs.payloadGPR());
+
+ emitJumpSlowCaseIfNotJSCell(base, baseRegs.tagGPR());
+ emitJumpSlowCaseIfNotJSCell(property, propertyRegs.tagGPR());
+
+ JITDelByValGenerator gen(
+ m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(m_bytecodeIndex), RegisterSet::stubUnavailableRegisters(),
+ baseRegs, propertyRegs, resultRegs, regT4);
+
+ gen.generateFastPath(*this);
+ addSlowCase(gen.slowPathJump());
+ m_delByVals.append(gen);
+
+ boxBoolean(regT0, resultRegs);
+ emitPutVirtualRegister(dst, resultRegs);
+
+ // We should emit write-barrier at the end of sequence since write-barrier clobbers registers.
+ // IC can write new Structure without write-barrier if a base is cell.
+ // FIXME: Use UnconditionalWriteBarrier in Baseline effectively to reduce code size.
+ // https://bugs.webkit.org/show_bug.cgi?id=209395
+ emitWriteBarrier(base, ShouldFilterBase);
}
-void JIT::emitSlow_op_del_by_val(const Instruction*, Vector<SlowCaseEntry>::iterator&) { }
-void JIT::emitSlow_op_del_by_id(const Instruction*, Vector<SlowCaseEntry>::iterator&) { }
+void JIT::emitSlow_op_del_by_val(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ linkAllSlowCases(iter);
+ auto bytecode = currentInstruction->as<OpDelByVal>();
+ VirtualRegister dst = bytecode.m_dst;
+ VirtualRegister base = bytecode.m_base;
+ VirtualRegister property = bytecode.m_property;
+
+ JITDelByValGenerator& gen = m_delByVals[m_delByValIndex++];
+
+ Label coldPathBegin = label();
+
+ JSValueRegs baseRegs = JSValueRegs(regT3, regT2);
+ JSValueRegs propertyRegs = JSValueRegs(regT1, regT0);
+ JSValueRegs resultRegs = JSValueRegs(regT1, regT0);
+
+ emitLoad2(base, baseRegs.tagGPR(), baseRegs.payloadGPR(), property, propertyRegs.tagGPR(), propertyRegs.payloadGPR());
+
+ Call call = callOperation(operationDeleteByValOptimize, TrustedImmPtr(m_codeBlock->globalObject()), gen.stubInfo(), JSValueRegs(baseRegs.tagGPR(), baseRegs.payloadGPR()), JSValueRegs(propertyRegs.tagGPR(), propertyRegs.payloadGPR()), TrustedImm32(bytecode.m_ecmaMode.value()));
+ gen.reportSlowPathCall(coldPathBegin, call);
+
+ boxBoolean(regT0, resultRegs);
+ emitPutVirtualRegister(dst, resultRegs);
+}
+
+void JIT::emitSlow_op_del_by_id(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ linkAllSlowCases(iter);
+
+ auto bytecode = currentInstruction->as<OpDelById>();
+ VirtualRegister dst = bytecode.m_dst;
+ VirtualRegister base = bytecode.m_base;
+ const Identifier* ident = &(m_codeBlock->identifier(bytecode.m_property));
+
+ JSValueRegs baseRegs = JSValueRegs(regT1, regT0);
+ JSValueRegs resultRegs = JSValueRegs(regT1, regT0);
+
+ JITDelByIdGenerator& gen = m_delByIds[m_delByIdIndex++];
+
+ Label coldPathBegin = label();
+
+ emitLoad(base, baseRegs.tagGPR(), baseRegs.payloadGPR());
+
+ Call call = callOperation(operationDeleteByIdOptimize, TrustedImmPtr(m_codeBlock->globalObject()), gen.stubInfo(), baseRegs, CacheableIdentifier::createFromIdentifierOwnedByCodeBlock(m_codeBlock, *ident).rawBits(), TrustedImm32(bytecode.m_ecmaMode.value()));
+ gen.reportSlowPathCall(coldPathBegin, call);
+
+ boxBoolean(regT0, resultRegs);
+ emitPutVirtualRegister(dst, resultRegs);
+}
+
void JIT::emit_op_get_by_val(const Instruction* currentInstruction)
{
auto bytecode = currentInstruction->as<OpGetByVal>();