Diff
Modified: trunk/Source/_javascript_Core/ChangeLog (190560 => 190561)
--- trunk/Source/_javascript_Core/ChangeLog 2015-10-05 15:12:27 UTC (rev 190560)
+++ trunk/Source/_javascript_Core/ChangeLog 2015-10-05 17:05:24 UTC (rev 190561)
@@ -1,5 +1,77 @@
2015-10-04 Filip Pizlo <[email protected]>
+ Inline cache repatching should be throttled if it happens a lot
+ https://bugs.webkit.org/show_bug.cgi?id=149796
+ rdar://problem/22674436
+
+ Reviewed by Saam Barati.
+
+ We noticed a slight PLT regression from http://trac.webkit.org/changeset/189586. It's because
+ some pages do things that our inline caches mishandle, in the sense that some ICs end up
+ repatching themselves very frequently. The cost of repatching outweighs the speed-up on those
+ pages. There are probably super smart things we could do to tune the IC heuristics to make the
+ ICs do the right thing on those pages. But more fundamentally, we should ensure that our ICs
+ back off from continuous repatching if they repatch a lot. That's what this change does.
+
+ With this change, StructureStubInfo counts the number of repatchings. If that exceeds a
+ threshold, we put the IC into a cool-down mode, where some number of future repatch events do
+ nothing but decrement the cool-down counter. The duration of cool-down increases exponentially
+ every time we have to do it.
+
+ This change also outlines a lot of code. The fact that StructureStubInfo had a lot of inline
+ methods was starting to get on my nerves. Now it only has inline methods for things that need
+ to be inlined. Also, I changed StructureStubInfo to be a class rather than a struct. Maybe
+ with enough such incremental changes, eventually StructureStubInfo will actually behave like a
+ proper class.
+
+ This has no effect on JSC benchmarks. It progresses one of the pages that was hit by the
+ regression by 15%. It's hard to see if this totally fixes the entire PLT regression since the
+ geomean regression was very close to noise.
+
+ * bytecode/CodeBlock.cpp:
+ (JSC::CodeBlock::printGetByIdCacheStatus):
+ (JSC::CodeBlock::printPutByIdCacheStatus):
+ (JSC::CodeBlock::CodeBlock):
+ (JSC::CodeBlock::checkIfOptimizationThresholdReached):
+ * bytecode/CodeBlock.h:
+ * bytecode/GetByIdStatus.cpp:
+ (JSC::GetByIdStatus::computeForStubInfoWithoutExitSiteFeedback):
+ (JSC::GetByIdStatus::computeFor):
+ * bytecode/PolymorphicAccess.cpp:
+ (JSC::PolymorphicAccess::regenerate):
+ * bytecode/PolymorphicAccess.h:
+ * bytecode/PutByIdStatus.cpp:
+ (JSC::PutByIdStatus::computeForStubInfo):
+ * bytecode/StructureStubClearingWatchpoint.h:
+ * bytecode/StructureStubInfo.cpp:
+ (JSC::StructureStubInfo::StructureStubInfo):
+ (JSC::StructureStubInfo::~StructureStubInfo):
+ (JSC::StructureStubInfo::initGetByIdSelf):
+ (JSC::StructureStubInfo::initPutByIdReplace):
+ (JSC::StructureStubInfo::initStub):
+ (JSC::StructureStubInfo::deref):
+ (JSC::StructureStubInfo::addAccessCase):
+ * bytecode/StructureStubInfo.h:
+ (JSC::StructureStubInfo::considerCaching):
+ (JSC::StructureStubInfo::willRepatch):
+ (JSC::StructureStubInfo::willCoolDown):
+ (JSC::getStructureStubInfoCodeOrigin):
+ (JSC::StructureStubInfo::StructureStubInfo): Deleted.
+ (JSC::StructureStubInfo::initGetByIdSelf): Deleted.
+ (JSC::StructureStubInfo::initPutByIdReplace): Deleted.
+ (JSC::StructureStubInfo::initStub): Deleted.
+ (JSC::StructureStubInfo::seenOnce): Deleted.
+ (JSC::StructureStubInfo::setSeen): Deleted.
+ * jit/JIT.h:
+ * jit/JITOperations.cpp:
+ * jit/Repatch.cpp:
+ (JSC::tryCacheGetByID):
+ (JSC::tryCachePutByID):
+ (JSC::tryRepatchIn):
+ * runtime/Options.h:
+
+2015-10-04 Filip Pizlo <[email protected]>
+
CodeBlock.h shouldn't be included from everywhere
https://bugs.webkit.org/show_bug.cgi?id=149785
Modified: trunk/Source/_javascript_Core/bytecode/CodeBlock.cpp (190560 => 190561)
--- trunk/Source/_javascript_Core/bytecode/CodeBlock.cpp 2015-10-05 15:12:27 UTC (rev 190560)
+++ trunk/Source/_javascript_Core/bytecode/CodeBlock.cpp 2015-10-05 17:05:24 UTC (rev 190561)
@@ -341,39 +341,37 @@
if (stubInfo.resetByGC)
out.print(" (Reset By GC)");
- if (stubInfo.seen) {
- out.printf(" jit(");
+ out.printf(" jit(");
- Structure* baseStructure = nullptr;
- PolymorphicAccess* stub = nullptr;
+ Structure* baseStructure = nullptr;
+ PolymorphicAccess* stub = nullptr;
- switch (stubInfo.cacheType) {
- case CacheType::GetByIdSelf:
- out.printf("self");
- baseStructure = stubInfo.u.byIdSelf.baseObjectStructure.get();
- break;
- case CacheType::Stub:
- out.printf("stub");
- stub = stubInfo.u.stub;
- break;
- case CacheType::Unset:
- out.printf("unset");
- break;
- default:
- RELEASE_ASSERT_NOT_REACHED();
- break;
- }
+ switch (stubInfo.cacheType) {
+ case CacheType::GetByIdSelf:
+ out.printf("self");
+ baseStructure = stubInfo.u.byIdSelf.baseObjectStructure.get();
+ break;
+ case CacheType::Stub:
+ out.printf("stub");
+ stub = stubInfo.u.stub;
+ break;
+ case CacheType::Unset:
+ out.printf("unset");
+ break;
+ default:
+ RELEASE_ASSERT_NOT_REACHED();
+ break;
+ }
- if (baseStructure) {
- out.printf(", ");
- dumpStructure(out, "struct", baseStructure, ident);
- }
+ if (baseStructure) {
+ out.printf(", ");
+ dumpStructure(out, "struct", baseStructure, ident);
+ }
- if (stub)
- out.print(", ", *stub);
+ if (stub)
+ out.print(", ", *stub);
- out.printf(")");
- }
+ out.printf(")");
}
#else
UNUSED_PARAM(map);
@@ -413,27 +411,25 @@
if (stubInfo.resetByGC)
out.print(" (Reset By GC)");
- if (stubInfo.seen) {
- out.printf(" jit(");
-
- switch (stubInfo.cacheType) {
- case CacheType::PutByIdReplace:
- out.print("replace, ");
- dumpStructure(out, "struct", stubInfo.u.byIdSelf.baseObjectStructure.get(), ident);
- break;
- case CacheType::Stub: {
- out.print("stub, ", *stubInfo.u.stub);
- break;
- }
- case CacheType::Unset:
- out.printf("unset");
- break;
- default:
- RELEASE_ASSERT_NOT_REACHED();
- break;
- }
- out.printf(")");
+ out.printf(" jit(");
+
+ switch (stubInfo.cacheType) {
+ case CacheType::PutByIdReplace:
+ out.print("replace, ");
+ dumpStructure(out, "struct", stubInfo.u.byIdSelf.baseObjectStructure.get(), ident);
+ break;
+ case CacheType::Stub: {
+ out.print("stub, ", *stubInfo.u.stub);
+ break;
}
+ case CacheType::Unset:
+ out.printf("unset");
+ break;
+ default:
+ RELEASE_ASSERT_NOT_REACHED();
+ break;
+ }
+ out.printf(")");
}
#else
UNUSED_PARAM(map);
Modified: trunk/Source/_javascript_Core/bytecode/GetByIdStatus.cpp (190560 => 190561)
--- trunk/Source/_javascript_Core/bytecode/GetByIdStatus.cpp 2015-10-05 15:12:27 UTC (rev 190560)
+++ trunk/Source/_javascript_Core/bytecode/GetByIdStatus.cpp 2015-10-05 17:05:24 UTC (rev 190561)
@@ -137,12 +137,9 @@
const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, StructureStubInfo* stubInfo, UniquedStringImpl* uid,
CallLinkStatus::ExitSiteData callExitSiteData)
{
- if (!stubInfo)
+ if (!stubInfo || !stubInfo->everConsidered)
return GetByIdStatus(NoInformation);
-
- if (!stubInfo->seen)
- return GetByIdStatus(NoInformation);
-
+
PolymorphicAccess* list = 0;
State slowPathState = TakesSlowPath;
if (stubInfo->cacheType == CacheType::Stub) {
@@ -269,7 +266,7 @@
result = computeForStubInfoWithoutExitSiteFeedback(
locker, dfgBlock, dfgMap.get(codeOrigin), uid, exitSiteData);
}
-
+
if (result.takesSlowPath())
return result;
Modified: trunk/Source/_javascript_Core/bytecode/PolymorphicAccess.cpp (190560 => 190561)
--- trunk/Source/_javascript_Core/bytecode/PolymorphicAccess.cpp 2015-10-05 15:12:27 UTC (rev 190560)
+++ trunk/Source/_javascript_Core/bytecode/PolymorphicAccess.cpp 2015-10-05 17:05:24 UTC (rev 190561)
@@ -1127,14 +1127,18 @@
state.failAndRepatch.append(binarySwitch.fallThrough());
}
- state.failAndIgnore.link(&jit);
+ if (!state.failAndIgnore.empty()) {
+ state.failAndIgnore.link(&jit);
+
+ // Make sure that the inline cache optimization code knows that we are taking slow path because
+ // of something that isn't patchable. The slow path will decrement "countdown" and will only
+ // patch things if the countdown reaches zero. We increment the slow path count here to ensure
+ // that the slow path does not try to patch.
+ jit.load8(&stubInfo.countdown, state.scratchGPR);
+ jit.add32(CCallHelpers::TrustedImm32(1), state.scratchGPR);
+ jit.store8(state.scratchGPR, &stubInfo.countdown);
+ }
- // Make sure that the inline cache optimization code knows that we are taking slow path because
- // of something that isn't patchable. "seen" being false means that we bypass patching. This is
- // pretty gross but it means that we don't need to have two slow path entrypoints - one for
- // patching and one for normal slow stuff.
- jit.store8(CCallHelpers::TrustedImm32(false), &stubInfo.seen);
-
CCallHelpers::JumpList failure;
if (allocator.didReuseRegisters()) {
state.failAndRepatch.link(&jit);
@@ -1161,6 +1165,9 @@
for (auto callback : state.callbacks)
callback(linkBuffer);
+
+ if (verbose)
+ dataLog(*codeBlock, " ", stubInfo.codeOrigin, ": Generating polymorphic access stub for ", listDump(cases), "\n");
MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
codeBlock, linkBuffer,
Modified: trunk/Source/_javascript_Core/bytecode/PolymorphicAccess.h (190560 => 190561)
--- trunk/Source/_javascript_Core/bytecode/PolymorphicAccess.h 2015-10-05 15:12:27 UTC (rev 190560)
+++ trunk/Source/_javascript_Core/bytecode/PolymorphicAccess.h 2015-10-05 17:05:24 UTC (rev 190561)
@@ -39,8 +39,8 @@
class CodeBlock;
class PolymorphicAccess;
+class StructureStubInfo;
class WatchpointsOnStructureStubInfo;
-struct StructureStubInfo;
struct AccessGenerationState;
Modified: trunk/Source/_javascript_Core/bytecode/PutByIdStatus.cpp (190560 => 190561)
--- trunk/Source/_javascript_Core/bytecode/PutByIdStatus.cpp 2015-10-05 15:12:27 UTC (rev 190560)
+++ trunk/Source/_javascript_Core/bytecode/PutByIdStatus.cpp 2015-10-05 17:05:24 UTC (rev 190561)
@@ -144,18 +144,15 @@
const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, StructureStubInfo* stubInfo,
UniquedStringImpl* uid, CallLinkStatus::ExitSiteData callExitSiteData)
{
- if (!stubInfo)
+ if (!stubInfo || !stubInfo->everConsidered)
return PutByIdStatus();
if (stubInfo->tookSlowPath)
return PutByIdStatus(TakesSlowPath);
- if (!stubInfo->seen)
- return PutByIdStatus();
-
switch (stubInfo->cacheType) {
case CacheType::Unset:
- // If the JIT saw it but didn't optimize it, then assume that this takes slow path.
+ // This means that we attempted to cache but failed for some reason.
return PutByIdStatus(TakesSlowPath);
case CacheType::PutByIdReplace: {
Modified: trunk/Source/_javascript_Core/bytecode/StructureStubClearingWatchpoint.h (190560 => 190561)
--- trunk/Source/_javascript_Core/bytecode/StructureStubClearingWatchpoint.h 2015-10-05 15:12:27 UTC (rev 190560)
+++ trunk/Source/_javascript_Core/bytecode/StructureStubClearingWatchpoint.h 2015-10-05 17:05:24 UTC (rev 190561)
@@ -37,8 +37,8 @@
namespace JSC {
class CodeBlock;
+class StructureStubInfo;
class WatchpointsOnStructureStubInfo;
-struct StructureStubInfo;
class StructureStubClearingWatchpoint : public Watchpoint {
WTF_MAKE_NONCOPYABLE(StructureStubClearingWatchpoint);
Modified: trunk/Source/_javascript_Core/bytecode/StructureStubInfo.cpp (190560 => 190561)
--- trunk/Source/_javascript_Core/bytecode/StructureStubInfo.cpp 2015-10-05 15:12:27 UTC (rev 190560)
+++ trunk/Source/_javascript_Core/bytecode/StructureStubInfo.cpp 2015-10-05 17:05:24 UTC (rev 190561)
@@ -33,6 +33,47 @@
namespace JSC {
#if ENABLE(JIT)
+StructureStubInfo::StructureStubInfo(AccessType accessType)
+ : callSiteIndex(UINT_MAX)
+ , accessType(accessType)
+ , cacheType(CacheType::Unset)
+ , countdown(1) // For a totally clear stub, we'll patch it after the first execution.
+ , repatchCount(0)
+ , numberOfCoolDowns(0)
+ , resetByGC(false)
+ , tookSlowPath(false)
+ , everConsidered(false)
+{
+}
+
+StructureStubInfo::~StructureStubInfo()
+{
+}
+
+void StructureStubInfo::initGetByIdSelf(CodeBlock* codeBlock, Structure* baseObjectStructure, PropertyOffset offset)
+{
+ cacheType = CacheType::GetByIdSelf;
+
+ u.byIdSelf.baseObjectStructure.set(
+ *codeBlock->vm(), codeBlock->ownerExecutable(), baseObjectStructure);
+ u.byIdSelf.offset = offset;
+}
+
+void StructureStubInfo::initPutByIdReplace(CodeBlock* codeBlock, Structure* baseObjectStructure, PropertyOffset offset)
+{
+ cacheType = CacheType::PutByIdReplace;
+
+ u.byIdSelf.baseObjectStructure.set(
+ *codeBlock->vm(), codeBlock->ownerExecutable(), baseObjectStructure);
+ u.byIdSelf.offset = offset;
+}
+
+void StructureStubInfo::initStub(CodeBlock*, std::unique_ptr<PolymorphicAccess> stub)
+{
+ cacheType = CacheType::Stub;
+ u.stub = stub.release();
+}
+
void StructureStubInfo::deref()
{
switch (cacheType) {
@@ -49,8 +90,10 @@
}
MacroAssemblerCodePtr StructureStubInfo::addAccessCase(
- VM& vm, CodeBlock* codeBlock, const Identifier& ident, std::unique_ptr<AccessCase> accessCase)
+ CodeBlock* codeBlock, const Identifier& ident, std::unique_ptr<AccessCase> accessCase)
{
+ VM& vm = *codeBlock->vm();
+
if (!accessCase)
return MacroAssemblerCodePtr();
@@ -74,8 +117,7 @@
if (!result)
return MacroAssemblerCodePtr();
- cacheType = CacheType::Stub;
- u.stub = access.release();
+ initStub(codeBlock, WTF::move(access));
return result;
}
Modified: trunk/Source/_javascript_Core/bytecode/StructureStubInfo.h (190560 => 190561)
--- trunk/Source/_javascript_Core/bytecode/StructureStubInfo.h 2015-10-05 15:12:27 UTC (rev 190560)
+++ trunk/Source/_javascript_Core/bytecode/StructureStubInfo.h 2015-10-05 17:05:24 UTC (rev 190561)
@@ -32,6 +32,7 @@
#include "MacroAssembler.h"
#include "ObjectPropertyConditionSet.h"
#include "Opcode.h"
+#include "Options.h"
#include "PolymorphicAccess.h"
#include "RegisterSet.h"
#include "SpillRegistersMode.h"
@@ -57,41 +58,19 @@
Stub
};
-struct StructureStubInfo {
- StructureStubInfo(AccessType accessType)
- : accessType(accessType)
- , cacheType(CacheType::Unset)
- , seen(false)
- , resetByGC(false)
- , tookSlowPath(false)
- , callSiteIndex(UINT_MAX)
- {
- }
+class StructureStubInfo {
+ WTF_MAKE_NONCOPYABLE(StructureStubInfo);
+ WTF_MAKE_FAST_ALLOCATED;
+public:
+ StructureStubInfo(AccessType);
+ ~StructureStubInfo();
- void initGetByIdSelf(VM& vm, JSCell* owner, Structure* baseObjectStructure, PropertyOffset offset)
- {
- cacheType = CacheType::GetByIdSelf;
+ void initGetByIdSelf(CodeBlock*, Structure* baseObjectStructure, PropertyOffset);
+ void initPutByIdReplace(CodeBlock*, Structure* baseObjectStructure, PropertyOffset);
+ void initStub(CodeBlock*, std::unique_ptr<PolymorphicAccess>);
- u.byIdSelf.baseObjectStructure.set(vm, owner, baseObjectStructure);
- u.byIdSelf.offset = offset;
- }
-
- void initPutByIdReplace(VM& vm, JSCell* owner, Structure* baseObjectStructure, PropertyOffset offset)
- {
- cacheType = CacheType::PutByIdReplace;
-
- u.byIdSelf.baseObjectStructure.set(vm, owner, baseObjectStructure);
- u.byIdSelf.offset = offset;
- }
-
- void initStub(std::unique_ptr<PolymorphicAccess> stub)
- {
- cacheType = CacheType::Stub;
- u.stub = stub.release();
- }
-
MacroAssemblerCodePtr addAccessCase(
- VM&, CodeBlock*, const Identifier&, std::unique_ptr<AccessCase>);
+ CodeBlock*, const Identifier&, std::unique_ptr<AccessCase>);
void reset(CodeBlock*);
@@ -101,25 +80,56 @@
// either entirely or just enough to ensure that those dead pointers don't get used anymore.
void visitWeakReferences(CodeBlock*);
- bool seenOnce()
+ ALWAYS_INLINE bool considerCaching()
{
- return seen;
+ everConsidered = true;
+ if (!countdown) {
+ // Check if we have been doing repatching too frequently. If so, then we should cool off
+ // for a while.
+ willRepatch();
+ if (repatchCount > Options::repatchCountForCoolDown()) {
+ // We've been repatching too much, so don't do it now.
+ repatchCount = 0;
+ // The amount of time we require for cool-down depends on the number of times we've
+ // had to cool down in the past. The relationship is exponential. The max value we
+ // allow here is 2^256 - 2, since the slow paths may increment the count to indicate
+ // that they'd like to temporarily skip patching just this once.
+ countdown = WTF::leftShiftWithSaturation(
+ static_cast<uint8_t>(Options::initialCoolDownCount()),
+ numberOfCoolDowns,
+ static_cast<uint8_t>(std::numeric_limits<uint8_t>::max() - 1));
+ willCoolDown();
+ return false;
+ }
+ return true;
+ }
+ countdown--;
+ return false;
}
- void setSeen()
+ ALWAYS_INLINE void willRepatch()
{
- seen = true;
+ WTF::incrementWithSaturation(repatchCount);
}
-
- AccessType accessType;
- CacheType cacheType;
- bool seen;
- bool resetByGC : 1;
- bool tookSlowPath : 1;
+ ALWAYS_INLINE void willCoolDown()
+ {
+ WTF::incrementWithSaturation(numberOfCoolDowns);
+ }
+
+ CodeLocationCall callReturnLocation;
+
CodeOrigin codeOrigin;
CallSiteIndex callSiteIndex;
+ union {
+ struct {
+ WriteBarrierBase<Structure> baseObjectStructure;
+ PropertyOffset offset;
+ } byIdSelf;
+ PolymorphicAccess* stub;
+ } u;
+
struct {
unsigned spillMode : 8;
int8_t baseGPR;
@@ -141,15 +151,14 @@
#endif
} patch;
- union {
- struct {
- WriteBarrierBase<Structure> baseObjectStructure;
- PropertyOffset offset;
- } byIdSelf;
- PolymorphicAccess* stub;
- } u;
-
- CodeLocationCall callReturnLocation;
+ AccessType accessType;
+ CacheType cacheType;
+ uint8_t countdown; // We repatch only when this is zero. If not zero, we decrement.
+ uint8_t repatchCount;
+ uint8_t numberOfCoolDowns;
+ bool resetByGC : 1;
+ bool tookSlowPath : 1;
+ bool everConsidered : 1;
};
inline CodeOrigin getStructureStubInfoCodeOrigin(StructureStubInfo& structureStubInfo)
Modified: trunk/Source/_javascript_Core/jit/JIT.h (190560 => 190561)
--- trunk/Source/_javascript_Core/jit/JIT.h 2015-10-05 15:12:27 UTC (rev 190560)
+++ trunk/Source/_javascript_Core/jit/JIT.h 2015-10-05 17:05:24 UTC (rev 190561)
@@ -63,12 +63,12 @@
class MarkedAllocator;
class Register;
class StructureChain;
+ class StructureStubInfo;
struct Instruction;
struct OperandTypes;
struct SimpleJumpTable;
struct StringJumpTable;
- struct StructureStubInfo;
struct CallRecord {
MacroAssembler::Call from;
Modified: trunk/Source/_javascript_Core/jit/JITOperations.cpp (190560 => 190561)
--- trunk/Source/_javascript_Core/jit/JITOperations.cpp 2015-10-05 15:12:27 UTC (rev 190560)
+++ trunk/Source/_javascript_Core/jit/JITOperations.cpp 2015-10-05 17:05:24 UTC (rev 190561)
@@ -184,10 +184,8 @@
PropertySlot slot(baseValue);
bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
- if (stubInfo->seen)
+ if (stubInfo->considerCaching())
repatchGetByID(exec, baseValue, ident, slot, *stubInfo);
- else
- stubInfo->seen = true;
return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
}
@@ -210,10 +208,8 @@
RELEASE_ASSERT(accessType == stubInfo->accessType);
- if (stubInfo->seen)
+ if (stubInfo->considerCaching())
repatchIn(exec, base, ident, result, slot, *stubInfo);
- else
- stubInfo->seen = true;
return JSValue::encode(jsBoolean(result));
}
@@ -308,10 +304,8 @@
if (accessType != static_cast<AccessType>(stubInfo->accessType))
return;
- if (stubInfo->seen)
+ if (stubInfo->considerCaching())
repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
- else
- stubInfo->seen = true;
}
void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
@@ -332,10 +326,8 @@
if (accessType != static_cast<AccessType>(stubInfo->accessType))
return;
- if (stubInfo->seen)
+ if (stubInfo->considerCaching())
repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
- else
- stubInfo->seen = true;
}
void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
@@ -356,10 +348,8 @@
if (accessType != static_cast<AccessType>(stubInfo->accessType))
return;
- if (stubInfo->seen)
+ if (stubInfo->considerCaching())
repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
- else
- stubInfo->seen = true;
}
void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
@@ -380,10 +370,8 @@
if (accessType != static_cast<AccessType>(stubInfo->accessType))
return;
- if (stubInfo->seen)
+ if (stubInfo->considerCaching())
repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
- else
- stubInfo->seen = true;
}
void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
Modified: trunk/Source/_javascript_Core/jit/Repatch.cpp (190560 => 190561)
--- trunk/Source/_javascript_Core/jit/Repatch.cpp 2015-10-05 15:12:27 UTC (rev 190560)
+++ trunk/Source/_javascript_Core/jit/Repatch.cpp 2015-10-05 17:05:24 UTC (rev 190561)
@@ -266,7 +266,7 @@
&& !loadTargetFromProxy) {
structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
repatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), operationGetByIdOptimize, true);
- stubInfo.initGetByIdSelf(vm, codeBlock->ownerExecutable(), structure, slot.cachedOffset());
+ stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
return RetryCacheLater;
}
@@ -308,8 +308,8 @@
slot.isCacheableCustom() ? slot.slotBase() : nullptr);
}
- MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(
- vm, codeBlock, propertyName, WTF::move(newCase));
+ MacroAssemblerCodePtr codePtr =
+ stubInfo.addAccessCase(codeBlock, propertyName, WTF::move(newCase));
if (!codePtr)
return GiveUpOnCache;
@@ -384,8 +384,7 @@
repatchByIdSelfAccess(
codeBlock, stubInfo, structure, slot.cachedOffset(),
appropriateOptimizingPutByIdFunction(slot, putKind), false);
- stubInfo.initPutByIdReplace(
- vm, codeBlock->ownerExecutable(), structure, slot.cachedOffset());
+ stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
return RetryCacheLater;
}
@@ -452,8 +451,7 @@
}
}
- MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(
- vm, codeBlock, ident, WTF::move(newCase));
+ MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(codeBlock, ident, WTF::move(newCase));
if (!codePtr)
return GiveUpOnCache;
@@ -511,7 +509,7 @@
std::unique_ptr<AccessCase> newCase = AccessCase::in(
vm, owner, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
- MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(vm, codeBlock, ident, WTF::move(newCase));
+ MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(codeBlock, ident, WTF::move(newCase));
if (!codePtr)
return GiveUpOnCache;
Modified: trunk/Source/_javascript_Core/runtime/Options.h (190560 => 190561)
--- trunk/Source/_javascript_Core/runtime/Options.h 2015-10-05 15:12:27 UTC (rev 190560)
+++ trunk/Source/_javascript_Core/runtime/Options.h 2015-10-05 17:05:24 UTC (rev 190561)
@@ -121,6 +121,9 @@
v(bool, forceCodeBlockLiveness, false, nullptr) \
v(bool, forceICFailure, false, nullptr) \
\
+ v(unsigned, repatchCountForCoolDown, 10, nullptr) \
+ v(unsigned, initialCoolDownCount, 20, nullptr) \
+ \
v(bool, dumpGeneratedBytecodes, false, nullptr) \
v(bool, dumpBytecodeLivenessResults, false, nullptr) \
v(bool, validateBytecode, false, nullptr) \
Modified: trunk/Source/WTF/ChangeLog (190560 => 190561)
--- trunk/Source/WTF/ChangeLog 2015-10-05 15:12:27 UTC (rev 190560)
+++ trunk/Source/WTF/ChangeLog 2015-10-05 17:05:24 UTC (rev 190561)
@@ -1,3 +1,17 @@
+2015-10-04 Filip Pizlo <[email protected]>
+
+ Inline cache repatching should be throttled if it happens a lot
+ https://bugs.webkit.org/show_bug.cgi?id=149796
+ rdar://problem/22674436
+
+ Reviewed by Saam Barati.
+
+ Add some helpers for saturated math.
+
+ * wtf/MathExtras.h:
+ (WTF::incrementWithSaturation):
+ (WTF::leftShiftWithSaturation):
+
2015-10-01 Brent Fulgham <[email protected]>
[Win] Unreviewed CMake build fixes.
Modified: trunk/Source/WTF/wtf/MathExtras.h (190560 => 190561)
--- trunk/Source/WTF/wtf/MathExtras.h 2015-10-05 15:12:27 UTC (rev 190560)
+++ trunk/Source/WTF/wtf/MathExtras.h 2015-10-05 17:05:24 UTC (rev 190561)
@@ -400,6 +400,25 @@
return static_cast<int>(value) == value;
}
+template<typename T>
+inline void incrementWithSaturation(T& value)
+{
+ if (value != std::numeric_limits<T>::max())
+ value++;
+}
+
+template<typename T>
+inline T leftShiftWithSaturation(T value, unsigned shiftAmount, T max = std::numeric_limits<T>::max())
+{
+ T result = value << shiftAmount;
+ // We will have saturated if shifting right doesn't recover the original value.
+ if (result >> shiftAmount != value)
+ return max;
+ if (result > max)
+ return max;
+ return result;
+}
+
} // namespace WTF
#endif // #ifndef WTF_MathExtras_h