Modified: trunk/Source/_javascript_Core/ChangeLog (283953 => 283954)
--- trunk/Source/_javascript_Core/ChangeLog 2021-10-12 00:09:14 UTC (rev 283953)
+++ trunk/Source/_javascript_Core/ChangeLog 2021-10-12 00:33:18 UTC (rev 283954)
@@ -1,5 +1,31 @@
2021-10-11 Saam Barati <sbar...@apple.com>
+ Share more code that uses ScratchRegisterAllocator in the ICs
+ https://bugs.webkit.org/show_bug.cgi?id=231125
+ <rdar://problem/84066374>
+
+ Reviewed by Sam Weinig.
+
+ We had the same code to allocate a scratch register allocator copy pasted
+ all over the IC code. This patch refactors that to use a shared helper.
+
+ Also, Delete was using a ScratchRegisterAllocator for no reason (it never
+ allocated a scratch register), so I deleted that code.
+
+ * bytecode/AccessCase.cpp:
+ (JSC::AccessCase::generateWithGuard):
+ (JSC::AccessCase::generateImpl):
+ * bytecode/GetterSetterAccessCase.cpp:
+ (JSC::GetterSetterAccessCase::emitDOMJITGetter):
+ * bytecode/PolymorphicAccess.cpp:
+ (JSC::AccessGenerationState::makeDefaultScratchAllocator):
+ (JSC::PolymorphicAccess::regenerate):
+ * bytecode/PolymorphicAccess.h:
+ * jit/IntrinsicEmitter.cpp:
+ (JSC::IntrinsicGetterAccessCase::emitIntrinsicGetter):
+
+2021-10-11 Saam Barati <sbar...@apple.com>
+
Don't branch around register allocation in DFG enumerator get by val and pass in the right LValue type to strictInt52ToJSValue
https://bugs.webkit.org/show_bug.cgi?id=231465
<rdar://83876470>
Modified: trunk/Source/_javascript_Core/bytecode/AccessCase.cpp (283953 => 283954)
--- trunk/Source/_javascript_Core/bytecode/AccessCase.cpp 2021-10-12 00:09:14 UTC (rev 283953)
+++ trunk/Source/_javascript_Core/bytecode/AccessCase.cpp 2021-10-12 00:33:18 UTC (rev 283954)
@@ -1098,14 +1098,7 @@
jit.load8(CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()), scratchGPR);
fallThrough.append(jit.branch32(CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(ScopedArgumentsType)));
- ScratchRegisterAllocator allocator(stubInfo.usedRegisters);
- allocator.lock(stubInfo.baseRegs());
- allocator.lock(valueRegs);
- allocator.lock(stubInfo.propertyRegs());
- if (stubInfo.m_stubInfoGPR != InvalidGPRReg)
- allocator.lock(stubInfo.m_stubInfoGPR);
- ASSERT(stubInfo.m_arrayProfileGPR == InvalidGPRReg);
- allocator.lock(scratchGPR);
+ auto allocator = state.makeDefaultScratchAllocator(scratchGPR);
GPRReg scratch2GPR = allocator.allocateScratchGPR();
GPRReg scratch3GPR = allocator.allocateScratchGPR();
@@ -1198,14 +1191,7 @@
jit.load32(CCallHelpers::Address(baseGPR, JSArrayBufferView::offsetOfLength()), scratchGPR);
state.failAndRepatch.append(jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, scratchGPR));
- ScratchRegisterAllocator allocator(stubInfo.usedRegisters);
- allocator.lock(stubInfo.baseRegs());
- allocator.lock(valueRegs);
- allocator.lock(stubInfo.propertyRegs());
- if (stubInfo.m_stubInfoGPR != InvalidGPRReg)
- allocator.lock(stubInfo.m_stubInfoGPR);
- ASSERT(stubInfo.m_arrayProfileGPR == InvalidGPRReg);
- allocator.lock(scratchGPR);
+ auto allocator = state.makeDefaultScratchAllocator(scratchGPR);
GPRReg scratch2GPR = allocator.allocateScratchGPR();
ScratchRegisterAllocator::PreservedState preservedState = allocator.preserveReusedRegistersByPushing(
@@ -1284,14 +1270,7 @@
fallThrough.append(jit.branchIfNotString(baseGPR));
- ScratchRegisterAllocator allocator(stubInfo.usedRegisters);
- allocator.lock(stubInfo.baseRegs());
- allocator.lock(valueRegs);
- allocator.lock(stubInfo.propertyRegs());
- if (stubInfo.m_stubInfoGPR != InvalidGPRReg)
- allocator.lock(stubInfo.m_stubInfoGPR);
- ASSERT(stubInfo.m_arrayProfileGPR == InvalidGPRReg);
- allocator.lock(scratchGPR);
+ auto allocator = state.makeDefaultScratchAllocator(scratchGPR);
GPRReg scratch2GPR = allocator.allocateScratchGPR();
CCallHelpers::JumpList failAndIgnore;
@@ -1345,14 +1324,8 @@
jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR);
jit.and32(CCallHelpers::TrustedImm32(IndexingShapeMask), scratchGPR);
- ScratchRegisterAllocator allocator(stubInfo.usedRegisters);
- allocator.lock(stubInfo.baseRegs());
- allocator.lock(valueRegs);
- allocator.lock(stubInfo.propertyRegs());
- if (stubInfo.m_stubInfoGPR != InvalidGPRReg)
- allocator.lock(stubInfo.m_stubInfoGPR);
- ASSERT(stubInfo.m_arrayProfileGPR == InvalidGPRReg);
- allocator.lock(scratchGPR);
+ auto allocator = state.makeDefaultScratchAllocator(scratchGPR);
+
GPRReg scratch2GPR = allocator.allocateScratchGPR();
#if USE(JSVALUE32_64)
GPRReg scratch3GPR = allocator.allocateScratchGPR();
@@ -1453,15 +1426,7 @@
CCallHelpers::JumpList isOutOfBounds;
- ScratchRegisterAllocator allocator(stubInfo.usedRegisters);
- allocator.lock(stubInfo.baseRegs());
- allocator.lock(valueRegs);
- allocator.lock(stubInfo.propertyRegs());
- if (stubInfo.m_stubInfoGPR != InvalidGPRReg)
- allocator.lock(stubInfo.m_stubInfoGPR);
- if (stubInfo.m_arrayProfileGPR != InvalidGPRReg)
- allocator.lock(stubInfo.m_arrayProfileGPR);
- allocator.lock(scratchGPR);
+ auto allocator = state.makeDefaultScratchAllocator(scratchGPR);
GPRReg scratch2GPR = allocator.allocateScratchGPR();
ScratchRegisterAllocator::PreservedState preservedState;
@@ -1634,15 +1599,7 @@
// OutOfBounds bit of ArrayProfile will be set in the operation function.
state.failAndRepatch.append(jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, scratchGPR));
- ScratchRegisterAllocator allocator(stubInfo.usedRegisters);
- allocator.lock(stubInfo.baseRegs());
- allocator.lock(valueRegs);
- allocator.lock(stubInfo.propertyRegs());
- if (stubInfo.m_stubInfoGPR != InvalidGPRReg)
- allocator.lock(stubInfo.m_stubInfoGPR);
- if (stubInfo.m_arrayProfileGPR != InvalidGPRReg)
- allocator.lock(stubInfo.m_arrayProfileGPR);
- allocator.lock(scratchGPR);
+ auto allocator = state.makeDefaultScratchAllocator(scratchGPR);
GPRReg scratch2GPR = allocator.allocateScratchGPR();
ScratchRegisterAllocator::PreservedState preservedState = allocator.preserveReusedRegistersByPushing(
@@ -1721,14 +1678,7 @@
GPRReg valueGPR = valueRegs.payloadGPR();
- ScratchRegisterAllocator allocator(stubInfo.usedRegisters);
- allocator.lock(stubInfo.baseRegs());
- allocator.lock(valueRegs);
- allocator.lock(stubInfo.propertyRegs());
- if (stubInfo.m_stubInfoGPR != InvalidGPRReg)
- allocator.lock(stubInfo.m_stubInfoGPR);
- ASSERT(stubInfo.m_arrayProfileGPR == InvalidGPRReg);
- allocator.lock(scratchGPR);
+ auto allocator = state.makeDefaultScratchAllocator(scratchGPR);
GPRReg scratch2GPR = allocator.allocateScratchGPR();
@@ -2288,16 +2238,7 @@
bool reallocating = allocating && structure()->outOfLineCapacity();
bool allocatingInline = allocating && !structure()->couldHaveIndexingHeader();
- ScratchRegisterAllocator allocator(stubInfo.usedRegisters);
- allocator.lock(stubInfo.baseRegs());
- allocator.lock(valueRegs);
- if (stubInfo.propertyRegs())
- allocator.lock(stubInfo.propertyRegs());
- if (stubInfo.m_stubInfoGPR != InvalidGPRReg)
- allocator.lock(stubInfo.m_stubInfoGPR);
- if (stubInfo.m_arrayProfileGPR != InvalidGPRReg)
- allocator.lock(stubInfo.m_arrayProfileGPR);
- allocator.lock(scratchGPR);
+ auto allocator = state.makeDefaultScratchAllocator(scratchGPR);
GPRReg scratchGPR2 = InvalidGPRReg;
GPRReg scratchGPR3 = InvalidGPRReg;
@@ -2449,14 +2390,6 @@
}
case Delete: {
- ScratchRegisterAllocator allocator(stubInfo.usedRegisters);
- allocator.lock(stubInfo.baseRegs());
- allocator.lock(valueRegs);
- allocator.lock(baseGPR);
- if (stubInfo.m_stubInfoGPR != InvalidGPRReg)
- allocator.lock(stubInfo.m_stubInfoGPR);
- ASSERT(stubInfo.m_arrayProfileGPR == InvalidGPRReg);
- allocator.lock(scratchGPR);
ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
ASSERT(newStructure()->transitionKind() == TransitionKind::PropertyDeletion);
ASSERT(baseGPR != scratchGPR);
@@ -2463,9 +2396,6 @@
ASSERT(!valueRegs.uses(baseGPR));
ASSERT(!valueRegs.uses(scratchGPR));
- ScratchRegisterAllocator::PreservedState preservedState =
- allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
-
jit.moveValue(JSValue(), valueRegs);
if (isInlineOffset(m_offset)) {
@@ -2489,7 +2419,6 @@
jit.move(MacroAssembler::TrustedImm32(true), valueRegs.payloadGPR());
- allocator.restoreReusedRegistersByPopping(jit, preservedState);
state.succeed();
return;
}
Modified: trunk/Source/_javascript_Core/bytecode/GetterSetterAccessCase.cpp (283953 => 283954)
--- trunk/Source/_javascript_Core/bytecode/GetterSetterAccessCase.cpp 2021-10-12 00:09:14 UTC (rev 283953)
+++ trunk/Source/_javascript_Core/bytecode/GetterSetterAccessCase.cpp 2021-10-12 00:33:18 UTC (rev 283954)
@@ -136,13 +136,7 @@
Vector<FPRReg> fpScratch;
Vector<SnippetParams::Value> regs;
- ScratchRegisterAllocator allocator(stubInfo.usedRegisters);
- allocator.lock(stubInfo.baseRegs());
- allocator.lock(valueRegs);
- if (stubInfo.m_stubInfoGPR != InvalidGPRReg)
- allocator.lock(stubInfo.m_stubInfoGPR);
- ASSERT(stubInfo.m_arrayProfileGPR == InvalidGPRReg);
- allocator.lock(scratchGPR);
+ auto allocator = state.makeDefaultScratchAllocator(scratchGPR);
GPRReg paramBaseGPR = InvalidGPRReg;
GPRReg paramGlobalObjectGPR = InvalidGPRReg;
Modified: trunk/Source/_javascript_Core/bytecode/PolymorphicAccess.cpp (283953 => 283954)
--- trunk/Source/_javascript_Core/bytecode/PolymorphicAccess.cpp 2021-10-12 00:09:14 UTC (rev 283953)
+++ trunk/Source/_javascript_Core/bytecode/PolymorphicAccess.cpp 2021-10-12 00:33:18 UTC (rev 283954)
@@ -253,6 +253,22 @@
}
}
+ScratchRegisterAllocator AccessGenerationState::makeDefaultScratchAllocator(GPRReg extraToLock)
+{
+ ScratchRegisterAllocator allocator(stubInfo->usedRegisters);
+ allocator.lock(stubInfo->baseRegs());
+ allocator.lock(valueRegs);
+ allocator.lock(u.thisGPR);
+#if USE(JSVALUE32_64)
+ allocator.lock(stubInfo->v.thisTagGPR);
+#endif
+ allocator.lock(stubInfo->m_stubInfoGPR);
+ allocator.lock(stubInfo->m_arrayProfileGPR);
+ allocator.lock(extraToLock);
+
+ return allocator;
+}
+
PolymorphicAccess::PolymorphicAccess() { }
PolymorphicAccess::~PolymorphicAccess() { }
@@ -483,23 +499,8 @@
}
m_list.resize(dstIndex);
- ScratchRegisterAllocator allocator(stubInfo.usedRegisters);
+ auto allocator = state.makeDefaultScratchAllocator();
state.allocator = &allocator;
- allocator.lock(state.baseGPR);
- if (state.u.thisGPR != InvalidGPRReg)
- allocator.lock(state.u.thisGPR);
- if (state.valueRegs)
- allocator.lock(state.valueRegs);
-#if USE(JSVALUE32_64)
- allocator.lock(stubInfo.baseTagGPR);
- if (stubInfo.v.thisTagGPR != InvalidGPRReg)
- allocator.lock(stubInfo.v.thisTagGPR);
-#endif
- if (stubInfo.m_stubInfoGPR != InvalidGPRReg)
- allocator.lock(stubInfo.m_stubInfoGPR);
- if (stubInfo.m_arrayProfileGPR != InvalidGPRReg)
- allocator.lock(stubInfo.m_arrayProfileGPR);
-
state.scratchGPR = allocator.allocateScratchGPR();
for (auto& accessCase : cases) {
Modified: trunk/Source/_javascript_Core/bytecode/PolymorphicAccess.h (283953 => 283954)
--- trunk/Source/_javascript_Core/bytecode/PolymorphicAccess.h 2021-10-12 00:09:14 UTC (rev 283953)
+++ trunk/Source/_javascript_Core/bytecode/PolymorphicAccess.h 2021-10-12 00:33:18 UTC (rev 283954)
@@ -45,7 +45,6 @@
class PolymorphicAccess;
class StructureStubInfo;
class WatchpointsOnStructureStubInfo;
-class ScratchRegisterAllocator;
DECLARE_ALLOCATOR_WITH_HEAP_IDENTIFIER(PolymorphicAccess);
@@ -277,6 +276,8 @@
m_spillStateForJSGetterSetter = spillState;
}
SpillState spillStateForJSGetterSetter() const { return m_spillStateForJSGetterSetter; }
+
+ ScratchRegisterAllocator makeDefaultScratchAllocator(GPRReg extraToLock = InvalidGPRReg);
private:
const RegisterSet& liveRegistersToPreserveAtExceptionHandlingCallSite();
Modified: trunk/Source/_javascript_Core/jit/IntrinsicEmitter.cpp (283953 => 283954)
--- trunk/Source/_javascript_Core/jit/IntrinsicEmitter.cpp 2021-10-12 00:09:14 UTC (rev 283953)
+++ trunk/Source/_javascript_Core/jit/IntrinsicEmitter.cpp 2021-10-12 00:33:18 UTC (rev 283954)
@@ -139,20 +139,7 @@
case UnderscoreProtoIntrinsic: {
StructureStubInfo& stubInfo = *state.stubInfo;
if (stubInfo.thisValueIsInThisGPR()) {
- ScratchRegisterAllocator allocator(stubInfo.usedRegisters);
- allocator.lock(state.scratchGPR);
- allocator.lock(state.baseGPR);
- allocator.lock(state.u.thisGPR);
- allocator.lock(valueRegs);
-#if USE(JSVALUE32_64)
- allocator.lock(stubInfo.baseTagGPR);
- allocator.lock(stubInfo.v.thisTagGPR);
-#endif
- if (stubInfo.m_stubInfoGPR != InvalidGPRReg)
- allocator.lock(stubInfo.m_stubInfoGPR);
- if (stubInfo.m_arrayProfileGPR != InvalidGPRReg)
- allocator.lock(stubInfo.m_arrayProfileGPR);
-
+ auto allocator = state.makeDefaultScratchAllocator(state.scratchGPR);
GPRReg scratch2GPR = allocator.allocateScratchGPR();
auto preservedState = allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);