Diff
Modified: trunk/Source/_javascript_Core/ChangeLog (217710 => 217711)
--- trunk/Source/_javascript_Core/ChangeLog 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/ChangeLog 2017-06-02 15:59:22 UTC (rev 217711)
@@ -1,3 +1,110 @@
+2017-06-01 Filip Pizlo <[email protected]>
+
+ GC should use scrambled free-lists
+ https://bugs.webkit.org/show_bug.cgi?id=172793
+
+ Reviewed by Mark Lam.
+
+ Previously, our bump'n'pop allocator would use a conventional linked-list for the free-list.
+ The linked-list would be threaded through free memory, as is the usual convention.
+
+ This scrambles the next pointers of that free-list. It also scrambles the head pointer, because
+ this leads to a more natural fast-path structure and saves one register on ARM64.
+
+ The secret with which pointers are scrambled is per-allocator. Allocators choose a new secret
+ every time they do a sweep-to-pop.
+
+ This doesn't change the behavior of the bump part of bump'n'pop, but it does refactor the code
+ quite a bit. Previously, there were four copies of the allocator fast path: two in
+ MarkedAllocatorInlines.h, one in MarkedAllocator.cpp, and one in AssemblyHelpers.h. The JIT one
+ was obviously different-looking, but the other three were almost identical. This moves all of
+ that logic into FreeList. There are now just two copies of the allocator: FreeListInlines.h and
+ AssemblyHelpers.h.
+
+ This appears to be just as fast as our previously allocator.
+
+ * _javascript_Core.xcodeproj/project.pbxproj:
+ * heap/FreeList.cpp:
+ (JSC::FreeList::FreeList):
+ (JSC::FreeList::~FreeList):
+ (JSC::FreeList::clear):
+ (JSC::FreeList::initializeList):
+ (JSC::FreeList::initializeBump):
+ (JSC::FreeList::contains):
+ (JSC::FreeList::dump):
+ * heap/FreeList.h:
+ (JSC::FreeList::allocationWillFail):
+ (JSC::FreeList::originalSize):
+ (JSC::FreeList::addressOfList):
+ (JSC::FreeList::offsetOfBlock):
+ (JSC::FreeList::offsetOfList):
+ (JSC::FreeList::offsetOfIndex):
+ (JSC::FreeList::offsetOfPayloadEnd):
+ (JSC::FreeList::offsetOfRemaining):
+ (JSC::FreeList::offsetOfOriginalSize):
+ (JSC::FreeList::FreeList): Deleted.
+ (JSC::FreeList::list): Deleted.
+ (JSC::FreeList::bump): Deleted.
+ (JSC::FreeList::operator==): Deleted.
+ (JSC::FreeList::operator!=): Deleted.
+ (JSC::FreeList::operator bool): Deleted.
+ * heap/FreeListInlines.h: Added.
+ (JSC::FreeList::addFreeCell):
+ (JSC::FreeList::allocate):
+ (JSC::FreeList::forEach):
+ (JSC::FreeList::toOffset):
+ (JSC::FreeList::fromOffset):
+ * heap/IncrementalSweeper.cpp:
+ (JSC::IncrementalSweeper::sweepNextBlock):
+ * heap/MarkedAllocator.cpp:
+ (JSC::MarkedAllocator::MarkedAllocator):
+ (JSC::MarkedAllocator::didConsumeFreeList):
+ (JSC::MarkedAllocator::tryAllocateWithoutCollecting):
+ (JSC::MarkedAllocator::tryAllocateIn):
+ (JSC::MarkedAllocator::allocateSlowCaseImpl):
+ (JSC::MarkedAllocator::stopAllocating):
+ (JSC::MarkedAllocator::prepareForAllocation):
+ (JSC::MarkedAllocator::resumeAllocating):
+ (JSC::MarkedAllocator::sweep):
+ (JSC::MarkedAllocator::setFreeList): Deleted.
+ * heap/MarkedAllocator.h:
+ (JSC::MarkedAllocator::freeList):
+ (JSC::MarkedAllocator::isFreeListedCell): Deleted.
+ * heap/MarkedAllocatorInlines.h:
+ (JSC::MarkedAllocator::isFreeListedCell):
+ (JSC::MarkedAllocator::tryAllocate):
+ (JSC::MarkedAllocator::allocate):
+ * heap/MarkedBlock.cpp:
+ (JSC::MarkedBlock::Handle::stopAllocating):
+ (JSC::MarkedBlock::Handle::lastChanceToFinalize):
+ (JSC::MarkedBlock::Handle::resumeAllocating):
+ (JSC::MarkedBlock::Handle::zap):
+ (JSC::MarkedBlock::Handle::sweep):
+ (JSC::MarkedBlock::Handle::isFreeListedCell):
+ (JSC::MarkedBlock::Handle::forEachFreeCell): Deleted.
+ * heap/MarkedBlock.h:
+ * heap/MarkedBlockInlines.h:
+ (JSC::MarkedBlock::Handle::specializedSweep):
+ (JSC::MarkedBlock::Handle::finishSweepKnowingSubspace):
+ (JSC::MarkedBlock::Handle::isFreeListedCell): Deleted.
+ * heap/Subspace.cpp:
+ (JSC::Subspace::finishSweep):
+ * heap/Subspace.h:
+ * jit/AssemblyHelpers.h:
+ (JSC::AssemblyHelpers::emitAllocateWithNonNullAllocator):
+ * runtime/JSDestructibleObjectSubspace.cpp:
+ (JSC::JSDestructibleObjectSubspace::finishSweep):
+ * runtime/JSDestructibleObjectSubspace.h:
+ * runtime/JSSegmentedVariableObjectSubspace.cpp:
+ (JSC::JSSegmentedVariableObjectSubspace::finishSweep):
+ * runtime/JSSegmentedVariableObjectSubspace.h:
+ * runtime/JSStringSubspace.cpp:
+ (JSC::JSStringSubspace::finishSweep):
+ * runtime/JSStringSubspace.h:
+ * wasm/js/JSWebAssemblyCodeBlockSubspace.cpp:
+ (JSC::JSWebAssemblyCodeBlockSubspace::finishSweep):
+ * wasm/js/JSWebAssemblyCodeBlockSubspace.h:
+
2017-06-02 Yusuke Suzuki <[email protected]>
[JSC] Use @globalPrivate for concatSlowPath
Modified: trunk/Source/_javascript_Core/_javascript_Core.xcodeproj/project.pbxproj (217710 => 217711)
--- trunk/Source/_javascript_Core/_javascript_Core.xcodeproj/project.pbxproj 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/_javascript_Core.xcodeproj/project.pbxproj 2017-06-02 15:59:22 UTC (rev 217711)
@@ -501,6 +501,7 @@
0F64B2791A7957B2006E4E66 /* CallEdge.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 0F64B2771A7957B2006E4E66 /* CallEdge.cpp */; };
0F64B27A1A7957B2006E4E66 /* CallEdge.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F64B2781A7957B2006E4E66 /* CallEdge.h */; settings = {ATTRIBUTES = (Private, ); }; };
0F64EAF31C4ECD0600621E9B /* AirArgInlines.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F64EAF21C4ECD0600621E9B /* AirArgInlines.h */; };
+ 0F6585E11EE0805A0095176D /* FreeListInlines.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F6585E01EE080570095176D /* FreeListInlines.h */; settings = {ATTRIBUTES = (Private, ); }; };
0F660E371E0517B90031462C /* MarkingConstraint.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 0F660E331E0517B70031462C /* MarkingConstraint.cpp */; };
0F660E381E0517BB0031462C /* MarkingConstraint.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F660E341E0517B70031462C /* MarkingConstraint.h */; settings = {ATTRIBUTES = (Private, ); }; };
0F660E391E0517BF0031462C /* MarkingConstraintSet.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 0F660E351E0517B70031462C /* MarkingConstraintSet.cpp */; };
@@ -3077,6 +3078,7 @@
0F64B2771A7957B2006E4E66 /* CallEdge.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = CallEdge.cpp; sourceTree = "<group>"; };
0F64B2781A7957B2006E4E66 /* CallEdge.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CallEdge.h; sourceTree = "<group>"; };
0F64EAF21C4ECD0600621E9B /* AirArgInlines.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = AirArgInlines.h; path = b3/air/AirArgInlines.h; sourceTree = "<group>"; };
+ 0F6585E01EE080570095176D /* FreeListInlines.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = FreeListInlines.h; sourceTree = "<group>"; };
0F660E331E0517B70031462C /* MarkingConstraint.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = MarkingConstraint.cpp; sourceTree = "<group>"; };
0F660E341E0517B70031462C /* MarkingConstraint.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MarkingConstraint.h; sourceTree = "<group>"; };
0F660E351E0517B70031462C /* MarkingConstraintSet.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = MarkingConstraintSet.cpp; sourceTree = "<group>"; };
@@ -6019,6 +6021,7 @@
2A83638418D7D0EE0000EBCC /* EdenGCActivityCallback.h */,
0F5513A71D5A68CB00C32BD8 /* FreeList.cpp */,
0F5513A51D5A682A00C32BD8 /* FreeList.h */,
+ 0F6585E01EE080570095176D /* FreeListInlines.h */,
2A83638718D7D0FE0000EBCC /* FullGCActivityCallback.cpp */,
2A83638818D7D0FE0000EBCC /* FullGCActivityCallback.h */,
2AACE63A18CA5A0300ED0191 /* GCActivityCallback.cpp */,
@@ -8661,6 +8664,7 @@
0F235BEE17178E7300690C7F /* DFGOSRExitPreparation.h in Headers */,
0F2C63B71E6343ED00C13839 /* B3AtomicValue.h in Headers */,
0F6237981AE45CA700D402EA /* DFGPhantomInsertionPhase.h in Headers */,
+ 0F6585E11EE0805A0095176D /* FreeListInlines.h in Headers */,
0FFFC95C14EF90AF00C72532 /* DFGPhase.h in Headers */,
37C738D21EDB56E4003F2B0B /* ParseInt.h in Headers */,
0F2B9CEB19D0BA7D00B1D1B5 /* DFGPhiChildren.h in Headers */,
Modified: trunk/Source/_javascript_Core/assembler/MacroAssembler.h (217710 => 217711)
--- trunk/Source/_javascript_Core/assembler/MacroAssembler.h 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/assembler/MacroAssembler.h 2017-06-02 15:59:22 UTC (rev 217711)
@@ -643,6 +643,10 @@
xor32(imm, srcDest);
}
+ void xorPtr(Address src, RegisterID dest)
+ {
+ xor32(src, dest);
+ }
void loadPtr(ImplicitAddress address, RegisterID dest)
{
@@ -953,6 +957,11 @@
xor64(src, dest);
}
+ void xorPtr(Address src, RegisterID dest)
+ {
+ xor64(src, dest);
+ }
+
void xorPtr(RegisterID src, Address dest)
{
xor64(src, dest);
Modified: trunk/Source/_javascript_Core/assembler/MacroAssemblerARM64.h (217710 => 217711)
--- trunk/Source/_javascript_Core/assembler/MacroAssemblerARM64.h 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/assembler/MacroAssemblerARM64.h 2017-06-02 15:59:22 UTC (rev 217711)
@@ -909,6 +909,12 @@
xor32(dest, src, dest);
}
+ void xor32(Address src, RegisterID dest)
+ {
+ load32(src, getCachedDataTemptRegisterIDAndInvalidate());
+ xor32(dataTempRegister, dest);
+ }
+
void xor32(RegisterID op1, RegisterID op2, RegisterID dest)
{
m_assembler.eor<32>(dest, op1, op2);
@@ -991,6 +997,12 @@
m_assembler.eor<64>(dest, src, dataTempRegister);
}
}
+
+ void xor64(Address src, RegisterID dest)
+ {
+ load64(src, getCachedDataTemptRegisterIDAndInvalidate());
+ xor64(dataTempRegister, dest);
+ }
void not32(RegisterID src, RegisterID dest)
{
Modified: trunk/Source/_javascript_Core/assembler/MacroAssemblerARMv7.h (217710 => 217711)
--- trunk/Source/_javascript_Core/assembler/MacroAssemblerARMv7.h 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/assembler/MacroAssemblerARMv7.h 2017-06-02 15:59:22 UTC (rev 217711)
@@ -543,6 +543,12 @@
xor32(dest, src, dest);
}
+ void xor32(Address src, RegisterID dest)
+ {
+ load32(src, dataTempRegister);
+ xor32(dataTempRegister, dest);
+ }
+
void xor32(TrustedImm32 imm, RegisterID dest)
{
if (imm.m_value == -1)
Modified: trunk/Source/_javascript_Core/heap/FreeList.cpp (217710 => 217711)
--- trunk/Source/_javascript_Core/heap/FreeList.cpp 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/heap/FreeList.cpp 2017-06-02 15:59:22 UTC (rev 217711)
@@ -26,21 +26,62 @@
#include "config.h"
#include "FreeList.h"
+#include "FreeListInlines.h"
+#include "MarkedBlock.h"
+#include <wtf/CommaPrinter.h>
+
namespace JSC {
-bool FreeList::contains(const void* target) const
+FreeList::FreeList(unsigned cellSize)
+ : m_cellSize(cellSize)
{
- if (remaining) {
- const void* start = (payloadEnd - remaining);
- const void* end = payloadEnd;
+}
+
+FreeList::~FreeList()
+{
+}
+
+void FreeList::clear()
+{
+ m_scrambledHead = 0;
+ m_secret = 0;
+ m_payloadEnd = nullptr;
+ m_remaining = 0;
+ m_originalSize = 0;
+}
+
+void FreeList::initializeList(FreeCell* head, uintptr_t secret, unsigned bytes)
+{
+ // It's *slightly* more optimal to use a scrambled head. It saves a register on the fast path.
+ m_scrambledHead = FreeCell::scramble(head, secret);
+ m_secret = secret;
+ m_payloadEnd = nullptr;
+ m_remaining = 0;
+ m_originalSize = bytes;
+}
+
+void FreeList::initializeBump(char* payloadEnd, unsigned remaining)
+{
+ m_scrambledHead = 0;
+ m_secret = 0;
+ m_payloadEnd = payloadEnd;
+ m_remaining = remaining;
+ m_originalSize = remaining;
+}
+
+bool FreeList::contains(HeapCell* target) const
+{
+ if (m_remaining) {
+ const void* start = (m_payloadEnd - m_remaining);
+ const void* end = m_payloadEnd;
return (start <= target) && (target < end);
}
- FreeCell* candidate = head;
+ FreeCell* candidate = head();
while (candidate) {
- if (candidate == target)
+ if (bitwise_cast<HeapCell*>(candidate) == target)
return true;
- candidate = candidate->next;
+ candidate = candidate->next(m_secret);
}
return false;
@@ -48,7 +89,7 @@
void FreeList::dump(PrintStream& out) const
{
- out.print("{head = ", RawPointer(head), ", payloadEnd = ", RawPointer(payloadEnd), ", remaining = ", remaining, ", originalSize = ", originalSize, "}");
+ out.print("{head = ", RawPointer(head()), ", secret = ", m_secret, ", payloadEnd = ", RawPointer(m_payloadEnd), ", remaining = ", m_remaining, ", originalSize = ", m_originalSize, "}");
}
} // namespace JSC
Modified: trunk/Source/_javascript_Core/heap/FreeList.h (217710 => 217711)
--- trunk/Source/_javascript_Core/heap/FreeList.h 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/heap/FreeList.h 2017-06-02 15:59:22 UTC (rev 217711)
@@ -25,68 +25,79 @@
#pragma once
+#include <wtf/Noncopyable.h>
#include <wtf/PrintStream.h>
namespace JSC {
+class HeapCell;
+
struct FreeCell {
- FreeCell* next;
-};
-
-// This representation of a FreeList is convenient for the MarkedAllocator.
-
-struct FreeList {
- FreeCell* head { nullptr };
- char* payloadEnd { nullptr };
- unsigned remaining { 0 };
- unsigned originalSize { 0 };
-
- FreeList()
+ static uintptr_t scramble(FreeCell* cell, uintptr_t secret)
{
+ return bitwise_cast<uintptr_t>(cell) ^ secret;
}
- static FreeList list(FreeCell* head, unsigned bytes)
+ static FreeCell* descramble(uintptr_t cell, uintptr_t secret)
{
- FreeList result;
- result.head = head;
- result.remaining = 0;
- result.originalSize = bytes;
- return result;
+ return bitwise_cast<FreeCell*>(cell ^ secret);
}
- static FreeList bump(char* payloadEnd, unsigned remaining)
+ void setNext(FreeCell* next, uintptr_t secret)
{
- FreeList result;
- result.payloadEnd = payloadEnd;
- result.remaining = remaining;
- result.originalSize = remaining;
- return result;
+ scrambledNext = scramble(next, secret);
}
- bool operator==(const FreeList& other) const
+ FreeCell* next(uintptr_t secret) const
{
- return head == other.head
- && payloadEnd == other.payloadEnd
- && remaining == other.remaining
- && originalSize == other.originalSize;
+ return descramble(scrambledNext, secret);
}
- bool operator!=(const FreeList& other) const
- {
- return !(*this == other);
- }
+ uintptr_t scrambledNext;
+};
+
+class FreeList {
+ WTF_MAKE_NONCOPYABLE(FreeList);
- explicit operator bool() const
- {
- return *this != FreeList();
- }
-
- bool contains(const void* target) const;
-
- bool allocationWillFail() const { return !head && !remaining; }
+public:
+ FreeList(unsigned cellSize);
+ ~FreeList();
+
+ void clear();
+
+ void initializeList(FreeCell* head, uintptr_t secret, unsigned bytes);
+ void initializeBump(char* payloadEnd, unsigned remaining);
+
+ bool allocationWillFail() const { return !head() && !m_remaining; }
bool allocationWillSucceed() const { return !allocationWillFail(); }
+ template<typename Func>
+ HeapCell* allocate(const Func& slowPath);
+
+ bool contains(HeapCell*) const;
+
+ template<typename Func>
+ void forEach(const Func&) const;
+
+ unsigned originalSize() const { return m_originalSize; }
+
+ static ptrdiff_t offsetOfScrambledHead() { return OBJECT_OFFSETOF(FreeList, m_scrambledHead); }
+ static ptrdiff_t offsetOfSecret() { return OBJECT_OFFSETOF(FreeList, m_secret); }
+ static ptrdiff_t offsetOfPayloadEnd() { return OBJECT_OFFSETOF(FreeList, m_payloadEnd); }
+ static ptrdiff_t offsetOfRemaining() { return OBJECT_OFFSETOF(FreeList, m_remaining); }
+ static ptrdiff_t offsetOfOriginalSize() { return OBJECT_OFFSETOF(FreeList, m_originalSize); }
+
void dump(PrintStream&) const;
+
+private:
+ FreeCell* head() const { return FreeCell::descramble(m_scrambledHead, m_secret); }
+
+ uintptr_t m_scrambledHead { 0 };
+ uintptr_t m_secret { 0 };
+ char* m_payloadEnd { nullptr };
+ unsigned m_remaining { 0 };
+ unsigned m_originalSize { 0 };
+ unsigned m_cellSize { 0 };
};
} // namespace JSC
Added: trunk/Source/_javascript_Core/heap/FreeListInlines.h (0 => 217711)
--- trunk/Source/_javascript_Core/heap/FreeListInlines.h (rev 0)
+++ trunk/Source/_javascript_Core/heap/FreeListInlines.h 2017-06-02 15:59:22 UTC (rev 217711)
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2017 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+
+#include "FreeList.h"
+#include "MarkedBlock.h"
+
+namespace JSC {
+
+template<typename Func>
+HeapCell* FreeList::allocate(const Func& slowPath)
+{
+ unsigned remaining = m_remaining;
+ if (remaining) {
+ unsigned cellSize = m_cellSize;
+ remaining -= cellSize;
+ m_remaining = remaining;
+ return bitwise_cast<HeapCell*>(m_payloadEnd - remaining - cellSize);
+ }
+
+ FreeCell* result = head();
+ if (UNLIKELY(!result))
+ return slowPath();
+
+ m_scrambledHead = result->scrambledNext;
+ return bitwise_cast<HeapCell*>(result);
+}
+
+template<typename Func>
+void FreeList::forEach(const Func& func) const
+{
+ if (m_remaining) {
+ for (unsigned remaining = m_remaining; remaining; remaining -= m_cellSize)
+ func(bitwise_cast<HeapCell*>(m_payloadEnd - remaining));
+ } else {
+ for (FreeCell* cell = head(); cell;) {
+ // We can use this to overwrite free objects before destroying the free list. So, we need
+ // to get next before proceeding further.
+ FreeCell* next = cell->next(m_secret);
+ func(bitwise_cast<HeapCell*>(cell));
+ cell = next;
+ }
+ }
+}
+
+} // namespace JSC
+
Modified: trunk/Source/_javascript_Core/heap/IncrementalSweeper.cpp (217710 => 217711)
--- trunk/Source/_javascript_Core/heap/IncrementalSweeper.cpp 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/heap/IncrementalSweeper.cpp 2017-06-02 15:59:22 UTC (rev 217711)
@@ -87,7 +87,7 @@
if (block) {
DeferGCForAWhile deferGC(m_vm->heap);
- block->sweep();
+ block->sweep(nullptr);
m_vm->heap.objectSpace().freeOrShrinkBlock(block);
return true;
}
Modified: trunk/Source/_javascript_Core/heap/MarkedAllocator.cpp (217710 => 217711)
--- trunk/Source/_javascript_Core/heap/MarkedAllocator.cpp 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/heap/MarkedAllocator.cpp 2017-06-02 15:59:22 UTC (rev 217711)
@@ -40,7 +40,8 @@
namespace JSC {
MarkedAllocator::MarkedAllocator(Heap* heap, Subspace* subspace, size_t cellSize)
- : m_currentBlock(0)
+ : m_freeList(cellSize)
+ , m_currentBlock(0)
, m_lastActiveBlock(0)
, m_cellSize(static_cast<unsigned>(cellSize))
, m_attributes(subspace->attributes())
@@ -88,7 +89,7 @@
if (m_currentBlock)
m_currentBlock->didConsumeFreeList();
- setFreeList(FreeList());
+ m_freeList.clear();
m_currentBlock = nullptr;
}
@@ -97,7 +98,7 @@
SuperSamplerScope superSamplerScope(false);
ASSERT(!m_currentBlock);
- ASSERT(!m_freeList);
+ ASSERT(m_freeList.allocationWillFail());
for (;;) {
m_allocationCursor = (m_canAllocateButNotEmpty | m_empty).findBit(m_allocationCursor, true);
@@ -113,7 +114,7 @@
if (Options::stealEmptyBlocksFromOtherAllocators()
&& shouldStealEmptyBlocksFromOtherAllocators()) {
if (MarkedBlock::Handle* block = markedSpace().findEmptyBlockToSteal()) {
- block->sweep();
+ block->sweep(nullptr);
// It's good that this clears canAllocateButNotEmpty as well as all other bits,
// because there is a remote chance that a block may have both canAllocateButNotEmpty
@@ -139,11 +140,11 @@
ASSERT(block);
ASSERT(!block->isFreeListed());
- FreeList freeList = block->sweep(MarkedBlock::Handle::SweepToFreeList);
+ block->sweep(&m_freeList);
// It's possible to stumble on a completely full block. Marking tries to retire these, but
// that algorithm is racy and may forget to do it sometimes.
- if (freeList.allocationWillFail()) {
+ if (m_freeList.allocationWillFail()) {
ASSERT(block->isFreeListed());
block->unsweepWithNoNewlyAllocated();
ASSERT(!block->isFreeListed());
@@ -153,19 +154,9 @@
}
m_currentBlock = block;
- setFreeList(freeList);
- void* result;
- if (m_freeList.remaining) {
- unsigned cellSize = m_cellSize;
- m_freeList.remaining -= cellSize;
- result = m_freeList.payloadEnd - m_freeList.remaining - cellSize;
- } else {
- FreeCell* head = m_freeList.head;
- m_freeList.head = head->next;
- result = head;
- }
- RELEASE_ASSERT(result);
+ void* result = m_freeList.allocate(
+ [] () -> HeapCell* { RELEASE_ASSERT_NOT_REACHED(); return nullptr; });
setIsEden(NoLockingNecessary, m_currentBlock, true);
markedSpace().didAllocateInBlock(m_currentBlock);
return result;
@@ -208,7 +199,7 @@
doTestCollectionsIfNeeded(deferralContext);
ASSERT(!markedSpace().isIterating());
- m_heap->didAllocate(m_freeList.originalSize);
+ m_heap->didAllocate(m_freeList.originalSize());
didConsumeFreeList();
@@ -334,7 +325,7 @@
dataLog(RawPointer(this), ": MarkedAllocator::stopAllocating!\n");
ASSERT(!m_lastActiveBlock);
if (!m_currentBlock) {
- ASSERT(!m_freeList);
+ ASSERT(m_freeList.allocationWillFail());
return;
}
@@ -341,7 +332,7 @@
m_currentBlock->stopAllocating(m_freeList);
m_lastActiveBlock = m_currentBlock;
m_currentBlock = 0;
- m_freeList = FreeList();
+ m_freeList.clear();
}
void MarkedAllocator::prepareForAllocation()
@@ -348,7 +339,7 @@
{
m_lastActiveBlock = nullptr;
m_currentBlock = nullptr;
- setFreeList(FreeList());
+ m_freeList.clear();
m_allocationCursor = 0;
m_emptyCursor = 0;
@@ -371,17 +362,12 @@
});
}
-void MarkedAllocator::setFreeList(const FreeList& freeList)
-{
- m_freeList = freeList;
-}
-
void MarkedAllocator::resumeAllocating()
{
if (!m_lastActiveBlock)
return;
- m_freeList = m_lastActiveBlock->resumeAllocating();
+ m_lastActiveBlock->resumeAllocating(m_freeList);
m_currentBlock = m_lastActiveBlock;
m_lastActiveBlock = nullptr;
}
@@ -445,7 +431,7 @@
m_unswept.forEachSetBit(
[&] (size_t index) {
MarkedBlock::Handle* block = m_blocks[index];
- block->sweep();
+ block->sweep(nullptr);
});
}
Modified: trunk/Source/_javascript_Core/heap/MarkedAllocator.h (217710 => 217711)
--- trunk/Source/_javascript_Core/heap/MarkedAllocator.h 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/heap/MarkedAllocator.h 2017-06-02 15:59:22 UTC (rev 217711)
@@ -156,7 +156,7 @@
void* tryAllocate(GCDeferralContext* = nullptr);
Heap* heap() { return m_heap; }
- bool isFreeListedCell(const void* target) const { return m_freeList.contains(target); }
+ bool isFreeListedCell(const void* target) const;
template<typename Functor> void forEachBlock(const Functor&);
template<typename Functor> void forEachNotEmptyBlock(const Functor&);
@@ -209,6 +209,8 @@
Subspace* subspace() const { return m_subspace; }
MarkedSpace& markedSpace() const;
+ const FreeList& freeList() const { return m_freeList; }
+
void dump(PrintStream&) const;
void dumpBits(PrintStream& = WTF::dataFile());
@@ -227,8 +229,6 @@
void* allocateIn(MarkedBlock::Handle*);
ALWAYS_INLINE void doTestCollectionsIfNeeded(GCDeferralContext*);
- void setFreeList(const FreeList&);
-
FreeList m_freeList;
Vector<MarkedBlock::Handle*> m_blocks;
Modified: trunk/Source/_javascript_Core/heap/MarkedAllocatorInlines.h (217710 => 217711)
--- trunk/Source/_javascript_Core/heap/MarkedAllocatorInlines.h 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/heap/MarkedAllocatorInlines.h 2017-06-02 15:59:22 UTC (rev 217711)
@@ -25,44 +25,30 @@
#pragma once
+#include "FreeListInlines.h"
#include "MarkedAllocator.h"
namespace JSC {
+inline bool MarkedAllocator::isFreeListedCell(const void* target) const
+{
+ return m_freeList.contains(bitwise_cast<HeapCell*>(target));
+}
+
ALWAYS_INLINE void* MarkedAllocator::tryAllocate(GCDeferralContext* deferralContext)
{
- unsigned remaining = m_freeList.remaining;
- if (remaining) {
- unsigned cellSize = m_cellSize;
- remaining -= cellSize;
- m_freeList.remaining = remaining;
- return m_freeList.payloadEnd - remaining - cellSize;
- }
-
- FreeCell* head = m_freeList.head;
- if (UNLIKELY(!head))
- return tryAllocateSlowCase(deferralContext);
-
- m_freeList.head = head->next;
- return head;
+ return m_freeList.allocate(
+ [&] () -> HeapCell* {
+ return static_cast<HeapCell*>(tryAllocateSlowCase(deferralContext));
+ });
}
ALWAYS_INLINE void* MarkedAllocator::allocate(GCDeferralContext* deferralContext)
{
- unsigned remaining = m_freeList.remaining;
- if (remaining) {
- unsigned cellSize = m_cellSize;
- remaining -= cellSize;
- m_freeList.remaining = remaining;
- return m_freeList.payloadEnd - remaining - cellSize;
- }
-
- FreeCell* head = m_freeList.head;
- if (UNLIKELY(!head))
- return allocateSlowCase(deferralContext);
-
- m_freeList.head = head->next;
- return head;
+ return m_freeList.allocate(
+ [&] () -> HeapCell* {
+ return static_cast<HeapCell*>(allocateSlowCase(deferralContext));
+ });
}
template <typename Functor> inline void MarkedAllocator::forEachBlock(const Functor& functor)
Modified: trunk/Source/_javascript_Core/heap/MarkedBlock.cpp (217710 => 217711)
--- trunk/Source/_javascript_Core/heap/MarkedBlock.cpp 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/heap/MarkedBlock.cpp 2017-06-02 15:59:22 UTC (rev 217711)
@@ -26,9 +26,11 @@
#include "config.h"
#include "MarkedBlock.h"
+#include "FreeListInlines.h"
#include "JSCell.h"
#include "JSDestructibleObject.h"
#include "JSCInlines.h"
+#include "MarkedAllocatorInlines.h"
#include "MarkedBlockInlines.h"
#include "SuperSampler.h"
#include "SweepingScope.h"
@@ -134,8 +136,7 @@
return IterationStatus::Continue;
});
- forEachFreeCell(
- freeList,
+ freeList.forEach(
[&] (HeapCell* cell) {
if (false)
dataLog("Free cell: ", RawPointer(cell), "\n");
@@ -156,10 +157,10 @@
m_weakSet.lastChanceToFinalize();
m_newlyAllocated.clearAll();
m_newlyAllocatedVersion = heap()->objectSpace().newlyAllocatedVersion();
- sweep();
+ sweep(nullptr);
}
-FreeList MarkedBlock::Handle::resumeAllocating()
+void MarkedBlock::Handle::resumeAllocating(FreeList& freeList)
{
{
auto locker = holdLock(block().m_lock);
@@ -173,19 +174,19 @@
if (false)
dataLog("There ain't no newly allocated.\n");
// This means we had already exhausted the block when we stopped allocation.
- return FreeList();
+ freeList.clear();
+ return;
}
}
// Re-create our free list from before stopping allocation. Note that this may return an empty
// freelist, in which case the block will still be Marked!
- return sweep(SweepToFreeList);
+ sweep(&freeList);
}
void MarkedBlock::Handle::zap(const FreeList& freeList)
{
- forEachFreeCell(
- freeList,
+ freeList.forEach(
[&] (HeapCell* cell) {
if (m_attributes.destruction == NeedsDestruction)
cell->zap();
@@ -192,21 +193,6 @@
});
}
-template<typename Func>
-void MarkedBlock::Handle::forEachFreeCell(const FreeList& freeList, const Func& func)
-{
- if (freeList.remaining) {
- for (unsigned remaining = freeList.remaining; remaining; remaining -= cellSize())
- func(bitwise_cast<HeapCell*>(freeList.payloadEnd - remaining));
- } else {
- for (FreeCell* current = freeList.head; current;) {
- FreeCell* next = current->next;
- func(bitwise_cast<HeapCell*>(current));
- current = next;
- }
- }
-}
-
void MarkedBlock::aboutToMarkSlow(HeapVersion markingVersion)
{
ASSERT(vm()->heap.objectSpace().isMarking());
@@ -407,20 +393,22 @@
return allocator()->subspace();
}
-FreeList MarkedBlock::Handle::sweep(SweepMode sweepMode)
+void MarkedBlock::Handle::sweep(FreeList* freeList)
{
SweepingScope sweepingScope(*heap());
+ SweepMode sweepMode = freeList ? SweepToFreeList : SweepOnly;
+
m_allocator->setIsUnswept(NoLockingNecessary, this, false);
m_weakSet.sweep();
if (sweepMode == SweepOnly && m_attributes.destruction == DoesNotNeedDestruction)
- return FreeList();
+ return;
if (UNLIKELY(m_isFreeListed)) {
RELEASE_ASSERT(sweepMode == SweepToFreeList);
- return FreeList();
+ return;
}
ASSERT(!m_allocator->isAllocated(NoLockingNecessary, this));
@@ -428,8 +416,10 @@
if (space()->isMarking())
block().m_lock.lock();
- if (m_attributes.destruction == NeedsDestruction)
- return subspace()->finishSweep(*this, sweepMode);
+ if (m_attributes.destruction == NeedsDestruction) {
+ subspace()->finishSweep(*this, freeList);
+ return;
+ }
// Handle the no-destructor specializations here, since we have the most of those. This
// ensures that they don't get re-specialized for every destructor space.
@@ -439,7 +429,6 @@
NewlyAllocatedMode newlyAllocatedMode = this->newlyAllocatedMode();
MarksMode marksMode = this->marksMode();
- FreeList result;
auto trySpecialized = [&] () -> bool {
if (sweepMode != SweepToFreeList)
return false;
@@ -452,10 +441,10 @@
case IsEmpty:
switch (marksMode) {
case MarksNotStale:
- result = specializedSweep<true, IsEmpty, SweepToFreeList, BlockHasNoDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksNotStale>(IsEmpty, SweepToFreeList, BlockHasNoDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksNotStale, [] (VM&, JSCell*) { });
+ specializedSweep<true, IsEmpty, SweepToFreeList, BlockHasNoDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksNotStale>(freeList, IsEmpty, SweepToFreeList, BlockHasNoDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksNotStale, [] (VM&, JSCell*) { });
return true;
case MarksStale:
- result = specializedSweep<true, IsEmpty, SweepToFreeList, BlockHasNoDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksStale>(IsEmpty, SweepToFreeList, BlockHasNoDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksStale, [] (VM&, JSCell*) { });
+ specializedSweep<true, IsEmpty, SweepToFreeList, BlockHasNoDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksStale>(freeList, IsEmpty, SweepToFreeList, BlockHasNoDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksStale, [] (VM&, JSCell*) { });
return true;
}
break;
@@ -462,10 +451,10 @@
case NotEmpty:
switch (marksMode) {
case MarksNotStale:
- result = specializedSweep<true, NotEmpty, SweepToFreeList, BlockHasNoDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksNotStale>(IsEmpty, SweepToFreeList, BlockHasNoDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksNotStale, [] (VM&, JSCell*) { });
+ specializedSweep<true, NotEmpty, SweepToFreeList, BlockHasNoDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksNotStale>(freeList, IsEmpty, SweepToFreeList, BlockHasNoDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksNotStale, [] (VM&, JSCell*) { });
return true;
case MarksStale:
- result = specializedSweep<true, NotEmpty, SweepToFreeList, BlockHasNoDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksStale>(IsEmpty, SweepToFreeList, BlockHasNoDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksStale, [] (VM&, JSCell*) { });
+ specializedSweep<true, NotEmpty, SweepToFreeList, BlockHasNoDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksStale>(freeList, IsEmpty, SweepToFreeList, BlockHasNoDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksStale, [] (VM&, JSCell*) { });
return true;
}
break;
@@ -475,12 +464,18 @@
};
if (trySpecialized())
- return result;
+ return;
// The template arguments don't matter because the first one is false.
- return specializedSweep<false, IsEmpty, SweepOnly, BlockHasNoDestructors, DontScribble, HasNewlyAllocated, MarksStale>(emptyMode, sweepMode, BlockHasNoDestructors, scribbleMode, newlyAllocatedMode, marksMode, [] (VM&, JSCell*) { });
+ specializedSweep<false, IsEmpty, SweepOnly, BlockHasNoDestructors, DontScribble, HasNewlyAllocated, MarksStale>(freeList, emptyMode, sweepMode, BlockHasNoDestructors, scribbleMode, newlyAllocatedMode, marksMode, [] (VM&, JSCell*) { });
}
+bool MarkedBlock::Handle::isFreeListedCell(const void* target) const
+{
+ ASSERT(isFreeListed());
+ return m_allocator->isFreeListedCell(target);
+}
+
} // namespace JSC
namespace WTF {
Modified: trunk/Source/_javascript_Core/heap/MarkedBlock.h (217710 => 217711)
--- trunk/Source/_javascript_Core/heap/MarkedBlock.h 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/heap/MarkedBlock.h 2017-06-02 15:59:22 UTC (rev 217711)
@@ -23,7 +23,6 @@
#include "AllocatorAttributes.h"
#include "DestructionMode.h"
-#include "FreeList.h"
#include "HeapCell.h"
#include "IterationStatus.h"
#include "WeakSet.h"
@@ -36,6 +35,7 @@
namespace JSC {
+class FreeList;
class Heap;
class JSCell;
class MarkedAllocator;
@@ -117,21 +117,22 @@
VM* vm() const;
WeakSet& weakSet();
+ enum SweepMode { SweepOnly, SweepToFreeList };
+
// Sweeping ensures that destructors get called and removes the block from the unswept
// set. Sweeping to free list also removes the block from the empty set, if it was in that
// set. Sweeping with SweepOnly may add this block to the empty set, if the block is found
- // to be empty.
+ // to be empty. The free-list being null implies SweepOnly.
//
// Note that you need to make sure that the empty bit reflects reality. If it's not set
// and the block is freshly created, then we'll make the mistake of running destructors in
// the block. If it's not set and the block has nothing marked, then we'll make the
// mistake of making a pop freelist rather than a bump freelist.
- enum SweepMode { SweepOnly, SweepToFreeList };
- FreeList sweep(SweepMode = SweepOnly);
+ void sweep(FreeList*);
// This is to be called by Subspace.
template<typename DestroyFunc>
- FreeList finishSweepKnowingSubspace(SweepMode, const DestroyFunc&);
+ void finishSweepKnowingSubspace(FreeList*, const DestroyFunc&);
void unsweepWithNoNewlyAllocated();
@@ -147,7 +148,7 @@
// of these functions:
void didConsumeFreeList(); // Call this once you've allocated all the items in the free list.
void stopAllocating(const FreeList&);
- FreeList resumeAllocating(); // Call this if you canonicalized a block for some non-collection related purpose.
+ void resumeAllocating(FreeList&); // Call this if you canonicalized a block for some non-collection related purpose.
size_t cellSize();
inline unsigned cellsPerBlock();
@@ -215,11 +216,8 @@
MarksMode marksMode();
template<bool, EmptyMode, SweepMode, SweepDestructionMode, ScribbleMode, NewlyAllocatedMode, MarksMode, typename DestroyFunc>
- FreeList specializedSweep(EmptyMode, SweepMode, SweepDestructionMode, ScribbleMode, NewlyAllocatedMode, MarksMode, const DestroyFunc&);
+ void specializedSweep(FreeList*, EmptyMode, SweepMode, SweepDestructionMode, ScribbleMode, NewlyAllocatedMode, MarksMode, const DestroyFunc&);
- template<typename Func>
- void forEachFreeCell(const FreeList&, const Func&);
-
void setIsFreeListed();
MarkedBlock::Handle* m_prev;
Modified: trunk/Source/_javascript_Core/heap/MarkedBlockInlines.h (217710 => 217711)
--- trunk/Source/_javascript_Core/heap/MarkedBlockInlines.h 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/heap/MarkedBlockInlines.h 2017-06-02 15:59:22 UTC (rev 217711)
@@ -117,12 +117,6 @@
return isLive(markingVersion, isMarking, static_cast<const HeapCell*>(p));
}
-inline bool MarkedBlock::Handle::isFreeListedCell(const void* target) const
-{
- ASSERT(isFreeListed());
- return m_allocator->isFreeListedCell(target);
-}
-
// The following has to be true for specialization to kick in:
//
// sweepMode == SweepToFreeList
@@ -145,7 +139,7 @@
// Only the DoesNotNeedDestruction one should be specialized by MarkedBlock.
template<bool specialize, MarkedBlock::Handle::EmptyMode specializedEmptyMode, MarkedBlock::Handle::SweepMode specializedSweepMode, MarkedBlock::Handle::SweepDestructionMode specializedDestructionMode, MarkedBlock::Handle::ScribbleMode specializedScribbleMode, MarkedBlock::Handle::NewlyAllocatedMode specializedNewlyAllocatedMode, MarkedBlock::Handle::MarksMode specializedMarksMode, typename DestroyFunc>
-FreeList MarkedBlock::Handle::specializedSweep(MarkedBlock::Handle::EmptyMode emptyMode, MarkedBlock::Handle::SweepMode sweepMode, MarkedBlock::Handle::SweepDestructionMode destructionMode, MarkedBlock::Handle::ScribbleMode scribbleMode, MarkedBlock::Handle::NewlyAllocatedMode newlyAllocatedMode, MarkedBlock::Handle::MarksMode marksMode, const DestroyFunc& destroyFunc)
+void MarkedBlock::Handle::specializedSweep(FreeList* freeList, MarkedBlock::Handle::EmptyMode emptyMode, MarkedBlock::Handle::SweepMode sweepMode, MarkedBlock::Handle::SweepDestructionMode destructionMode, MarkedBlock::Handle::ScribbleMode scribbleMode, MarkedBlock::Handle::NewlyAllocatedMode newlyAllocatedMode, MarkedBlock::Handle::MarksMode marksMode, const DestroyFunc& destroyFunc)
{
if (specialize) {
emptyMode = specializedEmptyMode;
@@ -165,6 +159,8 @@
if (false)
dataLog(RawPointer(this), "/", RawPointer(&block), ": MarkedBlock::Handle::specializedSweep!\n");
+ unsigned cellSize = this->cellSize();
+
if (Options::useBumpAllocator()
&& emptyMode == IsEmpty
&& newlyAllocatedMode == DoesNotHaveNewlyAllocated) {
@@ -182,7 +178,7 @@
}
char* startOfLastCell = static_cast<char*>(cellAlign(block.atoms() + m_endAtom - 1));
- char* payloadEnd = startOfLastCell + cellSize();
+ char* payloadEnd = startOfLastCell + cellSize;
RELEASE_ASSERT(payloadEnd - MarkedBlock::blockSize <= bitwise_cast<char*>(&block));
char* payloadBegin = bitwise_cast<char*>(block.atoms() + firstAtom());
if (scribbleMode == Scribble)
@@ -193,10 +189,11 @@
m_allocator->setIsEmpty(NoLockingNecessary, this, true);
if (space()->isMarking())
block.m_lock.unlock();
- FreeList result = FreeList::bump(payloadEnd, payloadEnd - payloadBegin);
+ if (sweepMode == SweepToFreeList)
+ freeList->initializeBump(payloadEnd, payloadEnd - payloadBegin);
if (false)
- dataLog("Quickly swept block ", RawPointer(this), " with cell size ", cellSize(), " and attributes ", m_attributes, ": ", result, "\n");
- return result;
+ dataLog("Quickly swept block ", RawPointer(this), " with cell size ", cellSize, " and attributes ", m_attributes, ": ", pointerDump(freeList), "\n");
+ return;
}
// This produces a free list that is ordered in reverse through the block.
@@ -204,6 +201,8 @@
// order of the free list.
FreeCell* head = 0;
size_t count = 0;
+ uintptr_t secret;
+ cryptographicallyRandomValues(&secret, sizeof(uintptr_t));
bool isEmpty = true;
Vector<size_t> deadCells;
VM& vm = *this->vm();
@@ -221,8 +220,8 @@
if (sweepMode == SweepToFreeList) {
FreeCell* freeCell = reinterpret_cast_ptr<FreeCell*>(cell);
if (scribbleMode == Scribble)
- scribble(freeCell, cellSize());
- freeCell->next = head;
+ scribble(freeCell, cellSize);
+ freeCell->setNext(head, secret);
head = freeCell;
++count;
}
@@ -254,19 +253,19 @@
handleDeadCell(i);
}
- FreeList result = FreeList::list(head, count * cellSize());
- if (sweepMode == SweepToFreeList)
+ if (sweepMode == SweepToFreeList) {
+ freeList->initializeList(head, secret, count * cellSize);
setIsFreeListed();
- else if (isEmpty)
+ } else if (isEmpty)
m_allocator->setIsEmpty(NoLockingNecessary, this, true);
if (false)
- dataLog("Slowly swept block ", RawPointer(&block), " with cell size ", cellSize(), " and attributes ", m_attributes, ": ", result, "\n");
- return result;
+ dataLog("Slowly swept block ", RawPointer(&block), " with cell size ", cellSize, " and attributes ", m_attributes, ": ", pointerDump(freeList), "\n");
}
template<typename DestroyFunc>
-FreeList MarkedBlock::Handle::finishSweepKnowingSubspace(SweepMode sweepMode, const DestroyFunc& destroyFunc)
+void MarkedBlock::Handle::finishSweepKnowingSubspace(FreeList* freeList, const DestroyFunc& destroyFunc)
{
+ SweepMode sweepMode = freeList ? SweepToFreeList : SweepOnly;
SweepDestructionMode destructionMode = this->sweepDestructionMode();
EmptyMode emptyMode = this->emptyMode();
ScribbleMode scribbleMode = this->scribbleMode();
@@ -273,7 +272,6 @@
NewlyAllocatedMode newlyAllocatedMode = this->newlyAllocatedMode();
MarksMode marksMode = this->marksMode();
- FreeList result;
auto trySpecialized = [&] () -> bool {
if (sweepMode != SweepToFreeList)
return false;
@@ -288,10 +286,10 @@
switch (marksMode) {
case MarksNotStale:
- result = specializedSweep<true, NotEmpty, SweepToFreeList, BlockHasDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksNotStale>(IsEmpty, SweepToFreeList, BlockHasDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksNotStale, destroyFunc);
+ specializedSweep<true, NotEmpty, SweepToFreeList, BlockHasDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksNotStale>(freeList, IsEmpty, SweepToFreeList, BlockHasDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksNotStale, destroyFunc);
return true;
case MarksStale:
- result = specializedSweep<true, NotEmpty, SweepToFreeList, BlockHasDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksStale>(IsEmpty, SweepToFreeList, BlockHasDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksStale, destroyFunc);
+ specializedSweep<true, NotEmpty, SweepToFreeList, BlockHasDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksStale>(freeList, IsEmpty, SweepToFreeList, BlockHasDestructors, DontScribble, DoesNotHaveNewlyAllocated, MarksStale, destroyFunc);
return true;
}
@@ -299,10 +297,10 @@
};
if (trySpecialized())
- return result;
+ return;
// The template arguments don't matter because the first one is false.
- return specializedSweep<false, IsEmpty, SweepOnly, BlockHasNoDestructors, DontScribble, HasNewlyAllocated, MarksStale>(emptyMode, sweepMode, destructionMode, scribbleMode, newlyAllocatedMode, marksMode, destroyFunc);
+ specializedSweep<false, IsEmpty, SweepOnly, BlockHasNoDestructors, DontScribble, HasNewlyAllocated, MarksStale>(freeList, emptyMode, sweepMode, destructionMode, scribbleMode, newlyAllocatedMode, marksMode, destroyFunc);
}
inline MarkedBlock::Handle::SweepDestructionMode MarkedBlock::Handle::sweepDestructionMode()
Modified: trunk/Source/_javascript_Core/heap/Subspace.cpp (217710 => 217711)
--- trunk/Source/_javascript_Core/heap/Subspace.cpp 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/heap/Subspace.cpp 2017-06-02 15:59:22 UTC (rev 217711)
@@ -77,9 +77,9 @@
{
}
-FreeList Subspace::finishSweep(MarkedBlock::Handle& block, MarkedBlock::Handle::SweepMode sweepMode)
+void Subspace::finishSweep(MarkedBlock::Handle& block, FreeList* freeList)
{
- return block.finishSweepKnowingSubspace(sweepMode, DestroyFunc());
+ block.finishSweepKnowingSubspace(freeList, DestroyFunc());
}
void Subspace::destroy(VM& vm, JSCell* cell)
Modified: trunk/Source/_javascript_Core/heap/Subspace.h (217710 => 217711)
--- trunk/Source/_javascript_Core/heap/Subspace.h 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/heap/Subspace.h 2017-06-02 15:59:22 UTC (rev 217711)
@@ -54,7 +54,7 @@
// The purpose of overriding this is to specialize the sweep for your destructors. This won't
// be called for no-destructor blocks. This must call MarkedBlock::finishSweepKnowingSubspace.
- virtual FreeList finishSweep(MarkedBlock::Handle&, MarkedBlock::Handle::SweepMode);
+ virtual void finishSweep(MarkedBlock::Handle&, FreeList*);
// These get called for large objects.
virtual void destroy(VM&, JSCell*);
Modified: trunk/Source/_javascript_Core/heap/SubspaceInlines.h (217710 => 217711)
--- trunk/Source/_javascript_Core/heap/SubspaceInlines.h 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/heap/SubspaceInlines.h 2017-06-02 15:59:22 UTC (rev 217711)
@@ -26,7 +26,7 @@
#pragma once
#include "JSCell.h"
-#include "MarkedAllocator.h"
+#include "MarkedAllocatorInlines.h"
#include "MarkedBlock.h"
#include "MarkedSpace.h"
#include "Subspace.h"
Modified: trunk/Source/_javascript_Core/jit/AssemblyHelpers.h (217710 => 217711)
--- trunk/Source/_javascript_Core/jit/AssemblyHelpers.h 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/jit/AssemblyHelpers.h 2017-06-02 15:59:22 UTC (rev 217711)
@@ -1458,7 +1458,7 @@
Jump popPath;
Jump done;
- load32(Address(allocatorGPR, MarkedAllocator::offsetOfFreeList() + OBJECT_OFFSETOF(FreeList, remaining)), resultGPR);
+ load32(Address(allocatorGPR, MarkedAllocator::offsetOfFreeList() + FreeList::offsetOfRemaining()), resultGPR);
popPath = branchTest32(Zero, resultGPR);
if (allocator)
add32(TrustedImm32(-allocator->cellSize()), resultGPR, scratchGPR);
@@ -1472,8 +1472,8 @@
}
}
negPtr(resultGPR);
- store32(scratchGPR, Address(allocatorGPR, MarkedAllocator::offsetOfFreeList() + OBJECT_OFFSETOF(FreeList, remaining)));
- Address payloadEndAddr = Address(allocatorGPR, MarkedAllocator::offsetOfFreeList() + OBJECT_OFFSETOF(FreeList, payloadEnd));
+ store32(scratchGPR, Address(allocatorGPR, MarkedAllocator::offsetOfFreeList() + FreeList::offsetOfRemaining()));
+ Address payloadEndAddr = Address(allocatorGPR, MarkedAllocator::offsetOfFreeList() + FreeList::offsetOfPayloadEnd());
if (isX86())
addPtr(payloadEndAddr, resultGPR);
else {
@@ -1485,13 +1485,19 @@
popPath.link(this);
- loadPtr(Address(allocatorGPR, MarkedAllocator::offsetOfFreeList() + OBJECT_OFFSETOF(FreeList, head)), resultGPR);
+ loadPtr(Address(allocatorGPR, MarkedAllocator::offsetOfFreeList() + FreeList::offsetOfScrambledHead()), resultGPR);
+ if (isX86())
+ xorPtr(Address(allocatorGPR, MarkedAllocator::offsetOfFreeList() + FreeList::offsetOfSecret()), resultGPR);
+ else {
+ loadPtr(Address(allocatorGPR, MarkedAllocator::offsetOfFreeList() + FreeList::offsetOfSecret()), scratchGPR);
+ xorPtr(scratchGPR, resultGPR);
+ }
slowPath.append(branchTestPtr(Zero, resultGPR));
// The object is half-allocated: we have what we know is a fresh object, but
// it's still on the GC's free list.
loadPtr(Address(resultGPR), scratchGPR);
- storePtr(scratchGPR, Address(allocatorGPR, MarkedAllocator::offsetOfFreeList() + OBJECT_OFFSETOF(FreeList, head)));
+ storePtr(scratchGPR, Address(allocatorGPR, MarkedAllocator::offsetOfFreeList() + FreeList::offsetOfScrambledHead()));
done.link(this);
}
Modified: trunk/Source/_javascript_Core/runtime/JSDestructibleObjectSubspace.cpp (217710 => 217711)
--- trunk/Source/_javascript_Core/runtime/JSDestructibleObjectSubspace.cpp 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/runtime/JSDestructibleObjectSubspace.cpp 2017-06-02 15:59:22 UTC (rev 217711)
@@ -52,9 +52,9 @@
{
}
-FreeList JSDestructibleObjectSubspace::finishSweep(MarkedBlock::Handle& handle, MarkedBlock::Handle::SweepMode sweepMode)
+void JSDestructibleObjectSubspace::finishSweep(MarkedBlock::Handle& handle, FreeList* freeList)
{
- return handle.finishSweepKnowingSubspace(sweepMode, DestroyFunc());
+ handle.finishSweepKnowingSubspace(freeList, DestroyFunc());
}
void JSDestructibleObjectSubspace::destroy(VM& vm, JSCell* cell)
Modified: trunk/Source/_javascript_Core/runtime/JSDestructibleObjectSubspace.h (217710 => 217711)
--- trunk/Source/_javascript_Core/runtime/JSDestructibleObjectSubspace.h 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/runtime/JSDestructibleObjectSubspace.h 2017-06-02 15:59:22 UTC (rev 217711)
@@ -34,7 +34,7 @@
JS_EXPORT_PRIVATE JSDestructibleObjectSubspace(CString name, Heap&);
JS_EXPORT_PRIVATE virtual ~JSDestructibleObjectSubspace();
- FreeList finishSweep(MarkedBlock::Handle&, MarkedBlock::Handle::SweepMode) override;
+ void finishSweep(MarkedBlock::Handle&, FreeList*) override;
void destroy(VM&, JSCell*) override;
};
Modified: trunk/Source/_javascript_Core/runtime/JSSegmentedVariableObjectSubspace.cpp (217710 => 217711)
--- trunk/Source/_javascript_Core/runtime/JSSegmentedVariableObjectSubspace.cpp 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/runtime/JSSegmentedVariableObjectSubspace.cpp 2017-06-02 15:59:22 UTC (rev 217711)
@@ -52,9 +52,9 @@
{
}
-FreeList JSSegmentedVariableObjectSubspace::finishSweep(MarkedBlock::Handle& handle, MarkedBlock::Handle::SweepMode sweepMode)
+void JSSegmentedVariableObjectSubspace::finishSweep(MarkedBlock::Handle& handle, FreeList* freeList)
{
- return handle.finishSweepKnowingSubspace(sweepMode, DestroyFunc());
+ handle.finishSweepKnowingSubspace(freeList, DestroyFunc());
}
void JSSegmentedVariableObjectSubspace::destroy(VM& vm, JSCell* cell)
Modified: trunk/Source/_javascript_Core/runtime/JSSegmentedVariableObjectSubspace.h (217710 => 217711)
--- trunk/Source/_javascript_Core/runtime/JSSegmentedVariableObjectSubspace.h 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/runtime/JSSegmentedVariableObjectSubspace.h 2017-06-02 15:59:22 UTC (rev 217711)
@@ -34,7 +34,7 @@
JS_EXPORT_PRIVATE JSSegmentedVariableObjectSubspace(CString name, Heap&);
JS_EXPORT_PRIVATE virtual ~JSSegmentedVariableObjectSubspace();
- FreeList finishSweep(MarkedBlock::Handle&, MarkedBlock::Handle::SweepMode) override;
+ void finishSweep(MarkedBlock::Handle&, FreeList*) override;
void destroy(VM&, JSCell*) override;
};
Modified: trunk/Source/_javascript_Core/runtime/JSStringSubspace.cpp (217710 => 217711)
--- trunk/Source/_javascript_Core/runtime/JSStringSubspace.cpp 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/runtime/JSStringSubspace.cpp 2017-06-02 15:59:22 UTC (rev 217711)
@@ -52,9 +52,9 @@
{
}
-FreeList JSStringSubspace::finishSweep(MarkedBlock::Handle& handle, MarkedBlock::Handle::SweepMode sweepMode)
+void JSStringSubspace::finishSweep(MarkedBlock::Handle& handle, FreeList* freeList)
{
- return handle.finishSweepKnowingSubspace(sweepMode, DestroyFunc());
+ handle.finishSweepKnowingSubspace(freeList, DestroyFunc());
}
void JSStringSubspace::destroy(VM& vm, JSCell* cell)
Modified: trunk/Source/_javascript_Core/runtime/JSStringSubspace.h (217710 => 217711)
--- trunk/Source/_javascript_Core/runtime/JSStringSubspace.h 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/runtime/JSStringSubspace.h 2017-06-02 15:59:22 UTC (rev 217711)
@@ -34,7 +34,7 @@
JS_EXPORT_PRIVATE JSStringSubspace(CString name, Heap&);
JS_EXPORT_PRIVATE virtual ~JSStringSubspace();
- FreeList finishSweep(MarkedBlock::Handle&, MarkedBlock::Handle::SweepMode) override;
+ void finishSweep(MarkedBlock::Handle&, FreeList*) override;
void destroy(VM&, JSCell*) override;
};
Modified: trunk/Source/_javascript_Core/runtime/Options.h (217710 => 217711)
--- trunk/Source/_javascript_Core/runtime/Options.h 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/runtime/Options.h 2017-06-02 15:59:22 UTC (rev 217711)
@@ -362,7 +362,7 @@
v(gcLogLevel, logGC, GCLogging::None, Normal, "debugging option to log GC activity (0 = None, 1 = Basic, 2 = Verbose)") \
v(bool, useGC, true, Normal, nullptr) \
v(bool, gcAtEnd, false, Normal, "If true, the jsc CLI will do a GC before exiting") \
- v(bool, forceGCSlowPaths, false, Normal, "If true, we will force all JIT fast allocations down their slow paths.")\
+ v(bool, forceGCSlowPaths, false, Normal, "If true, we will force all JIT fast allocations down their slow paths.") \
v(unsigned, gcMaxHeapSize, 0, Normal, nullptr) \
v(unsigned, forceRAMSize, 0, Normal, nullptr) \
v(bool, recordGCPauseTimes, false, Normal, nullptr) \
Modified: trunk/Source/_javascript_Core/wasm/js/JSWebAssemblyCodeBlockSubspace.cpp (217710 => 217711)
--- trunk/Source/_javascript_Core/wasm/js/JSWebAssemblyCodeBlockSubspace.cpp 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/wasm/js/JSWebAssemblyCodeBlockSubspace.cpp 2017-06-02 15:59:22 UTC (rev 217711)
@@ -56,9 +56,9 @@
{
}
-FreeList JSWebAssemblyCodeBlockSubspace::finishSweep(MarkedBlock::Handle& handle, MarkedBlock::Handle::SweepMode sweepMode)
+void JSWebAssemblyCodeBlockSubspace::finishSweep(MarkedBlock::Handle& handle, FreeList* freeList)
{
- return handle.finishSweepKnowingSubspace(sweepMode, DestroyFunc());
+ handle.finishSweepKnowingSubspace(freeList, DestroyFunc());
}
void JSWebAssemblyCodeBlockSubspace::destroy(VM& vm, JSCell* cell)
Modified: trunk/Source/_javascript_Core/wasm/js/JSWebAssemblyCodeBlockSubspace.h (217710 => 217711)
--- trunk/Source/_javascript_Core/wasm/js/JSWebAssemblyCodeBlockSubspace.h 2017-06-02 15:18:25 UTC (rev 217710)
+++ trunk/Source/_javascript_Core/wasm/js/JSWebAssemblyCodeBlockSubspace.h 2017-06-02 15:59:22 UTC (rev 217711)
@@ -36,7 +36,7 @@
JSWebAssemblyCodeBlockSubspace(CString name, Heap&);
virtual ~JSWebAssemblyCodeBlockSubspace();
- FreeList finishSweep(MarkedBlock::Handle&, MarkedBlock::Handle::SweepMode) override;
+ void finishSweep(MarkedBlock::Handle&, FreeList*) override;
void destroy(VM&, JSCell*) override;
};