Diff
Modified: trunk/Source/_javascript_Core/ChangeLog (174794 => 174795)
--- trunk/Source/_javascript_Core/ChangeLog 2014-10-16 21:58:06 UTC (rev 174794)
+++ trunk/Source/_javascript_Core/ChangeLog 2014-10-16 22:02:16 UTC (rev 174795)
@@ -1,3 +1,45 @@
+2014-10-15 Oliver Hunt <[email protected]>
+
+ Use a single allocation for the Arguments object
+ https://bugs.webkit.org/show_bug.cgi?id=137751
+
+ Reviewed by Filip Pizlo.
+
+ This patch removes the secondary allocation for parameters in the Arguments
+ object. This is faily simple, but we needed to make it possible for the JIT
+ to allocate a variable GC object. To do this i've added a new
+ emitAllocateVariableSizedJSObject function to the JIT that does the work to
+ find the correct heap for a variable sized allocation and then bump that
+ allocator.
+
+ * dfg/DFGSpeculativeJIT.cpp:
+ (JSC::DFG::SpeculativeJIT::emitAllocateArguments):
+ * dfg/DFGSpeculativeJIT.h:
+ (JSC::DFG::SpeculativeJIT::emitAllocateVariableSizedJSObject):
+ * heap/CopyToken.h:
+ * heap/Heap.h:
+ (JSC::Heap::subspaceForObjectWithoutDestructor):
+ (JSC::Heap::subspaceForObjectNormalDestructor):
+ (JSC::Heap::subspaceForObjectsWithImmortalStructure):
+ * heap/MarkedSpace.h:
+ (JSC::MarkedSpace::subspaceForObjectsWithNormalDestructor):
+ (JSC::MarkedSpace::subspaceForObjectsWithImmortalStructure):
+ (JSC::MarkedSpace::subspaceForObjectsWithoutDestructor):
+ * interpreter/StackVisitor.cpp:
+ (JSC::StackVisitor::Frame::createArguments):
+ * runtime/Arguments.cpp:
+ (JSC::Arguments::visitChildren):
+ (JSC::Arguments::copyBackingStore):
+ (JSC::Arguments::tearOff):
+ (JSC::Arguments::allocateRegisterArray): Deleted.
+ * runtime/Arguments.h:
+ (JSC::Arguments::create):
+ (JSC::Arguments::isTornOff):
+ (JSC::Arguments::offsetOfRegisterArray):
+ (JSC::Arguments::registerArraySizeInBytes):
+ (JSC::Arguments::registerArray):
+ (JSC::Arguments::allocationSize): Deleted.
+
2014-10-15 Filip Pizlo <[email protected]>
Apparently we've had a hole in arguments capture all along
Modified: trunk/Source/_javascript_Core/dfg/DFGSpeculativeJIT.cpp (174794 => 174795)
--- trunk/Source/_javascript_Core/dfg/DFGSpeculativeJIT.cpp 2014-10-16 21:58:06 UTC (rev 174794)
+++ trunk/Source/_javascript_Core/dfg/DFGSpeculativeJIT.cpp 2014-10-16 22:02:16 UTC (rev 174795)
@@ -111,8 +111,12 @@
void SpeculativeJIT::emitAllocateArguments(GPRReg resultGPR, GPRReg scratchGPR1, GPRReg scratchGPR2, MacroAssembler::JumpList& slowPath)
{
Structure* structure = m_jit.graph().globalObjectFor(m_currentNode->origin.semantic)->argumentsStructure();
- emitAllocateDestructibleObject<Arguments>(resultGPR, structure, scratchGPR1, scratchGPR2, slowPath);
+ m_jit.load32(JITCompiler::payloadFor(JSStack::ArgumentCount), scratchGPR1);
+ m_jit.mul32(TrustedImm32(sizeof(JSValue)), scratchGPR1, scratchGPR1);
+ m_jit.add32(TrustedImm32(Arguments::offsetOfInlineRegisterArray()), scratchGPR1);
+ emitAllocateVariableSizedJSObject<Arguments>(resultGPR, structure, scratchGPR1, scratchGPR1, scratchGPR2, slowPath);
+
m_jit.storePtr(TrustedImmPtr(0), MacroAssembler::Address(resultGPR, Arguments::offsetOfActivation()));
m_jit.load32(JITCompiler::payloadFor(JSStack::ArgumentCount), scratchGPR1);
@@ -124,11 +128,11 @@
m_jit.store8(TrustedImm32(1), MacroAssembler::Address(resultGPR, Arguments::offsetOfIsStrictMode()));
m_jit.storePtr(GPRInfo::callFrameRegister, MacroAssembler::Address(resultGPR, Arguments::offsetOfRegisters()));
- m_jit.storePtr(TrustedImmPtr(0), MacroAssembler::Address(resultGPR, Arguments::offsetOfRegisterArray()));
m_jit.storePtr(TrustedImmPtr(0), MacroAssembler::Address(resultGPR, Arguments::offsetOfSlowArgumentData()));
m_jit.loadPtr(JITCompiler::addressFor(JSStack::Callee), scratchGPR1);
m_jit.storePtr(scratchGPR1, MacroAssembler::Address(resultGPR, Arguments::offsetOfCallee()));
+
}
void SpeculativeJIT::speculationCheck(ExitKind kind, JSValueSource jsValueSource, Node* node, MacroAssembler::Jump jumpToFail)
Modified: trunk/Source/_javascript_Core/dfg/DFGSpeculativeJIT.h (174794 => 174795)
--- trunk/Source/_javascript_Core/dfg/DFGSpeculativeJIT.h 2014-10-16 21:58:06 UTC (rev 174794)
+++ trunk/Source/_javascript_Core/dfg/DFGSpeculativeJIT.h 2014-10-16 22:02:16 UTC (rev 174795)
@@ -2252,6 +2252,38 @@
emitAllocateJSObject(resultGPR, scratchGPR1, structure, storage, scratchGPR2, slowPath);
}
+ template <typename ClassType, typename StructureType> // StructureType and StorageType can be GPR or ImmPtr.
+ void emitAllocateVariableSizedJSObject(GPRReg resultGPR, StructureType structure, GPRReg allocationSize, GPRReg scratchGPR1, GPRReg scratchGPR2, MacroAssembler::JumpList& slowPath)
+ {
+ static_assert(!(MarkedSpace::preciseStep & (MarkedSpace::preciseStep - 1)), "MarkedSpace::preciseStep must be a power of two.");
+ static_assert(!(MarkedSpace::impreciseStep & (MarkedSpace::impreciseStep - 1)), "MarkedSpace::impreciseStep must be a power of two.");
+
+ MarkedSpace::Subspace* subspace;
+ if (ClassType::needsDestruction && ClassType::hasImmortalStructure)
+ subspace = &m_jit.vm()->heap.subspaceForObjectsWithImmortalStructure();
+ else if (ClassType::needsDestruction)
+ subspace = &m_jit.vm()->heap.subspaceForObjectNormalDestructor();
+ else
+ subspace = &m_jit.vm()->heap.subspaceForObjectWithoutDestructor();
+ m_jit.add32(TrustedImm32(MarkedSpace::preciseStep - 1), allocationSize);
+ MacroAssembler::Jump notSmall = m_jit.branch32(MacroAssembler::AboveOrEqual, allocationSize, TrustedImm32(MarkedSpace::preciseCutoff));
+ m_jit.rshift32(allocationSize, TrustedImm32(getLSBSet(MarkedSpace::preciseStep)), scratchGPR1);
+ m_jit.mul32(TrustedImm32(sizeof(MarkedAllocator)), scratchGPR1, scratchGPR1);
+ m_jit.addPtr(MacroAssembler::TrustedImmPtr(&subspace->preciseAllocators[0]), scratchGPR1);
+
+ MacroAssembler::Jump selectedSmallSpace = m_jit.jump();
+ notSmall.link(&m_jit);
+ slowPath.append(m_jit.branch32(MacroAssembler::AboveOrEqual, allocationSize, TrustedImm32(MarkedSpace::impreciseCutoff)));
+ m_jit.rshift32(allocationSize, TrustedImm32(getLSBSet(MarkedSpace::impreciseStep)), scratchGPR1);
+ m_jit.mul32(TrustedImm32(sizeof(MarkedAllocator)), scratchGPR1, scratchGPR1);
+ m_jit.addPtr(MacroAssembler::TrustedImmPtr(&subspace->impreciseAllocators[0]), scratchGPR1);
+
+ selectedSmallSpace.link(&m_jit);
+
+ emitAllocateJSObject(resultGPR, scratchGPR1, TrustedImmPtr(structure), TrustedImmPtr(0), scratchGPR2, slowPath);
+ m_jit.storePtr(TrustedImmPtr(structure->classInfo()), MacroAssembler::Address(resultGPR, JSDestructibleObject::classInfoOffset()));
+ }
+
template <typename T>
void emitAllocateDestructibleObject(GPRReg resultGPR, Structure* structure,
GPRReg scratchGPR1, GPRReg scratchGPR2, MacroAssembler::JumpList& slowPath)
Modified: trunk/Source/_javascript_Core/heap/CopyToken.h (174794 => 174795)
--- trunk/Source/_javascript_Core/heap/CopyToken.h 2014-10-16 21:58:06 UTC (rev 174794)
+++ trunk/Source/_javascript_Core/heap/CopyToken.h 2014-10-16 22:02:16 UTC (rev 174795)
@@ -32,7 +32,6 @@
ButterflyCopyToken,
TypedArrayVectorCopyToken,
MapBackingStoreCopyToken,
- ArgumentsRegisterArrayCopyToken,
ArgumentsSlowArgumentDataCopyToken
};
Modified: trunk/Source/_javascript_Core/heap/Heap.h (174794 => 174795)
--- trunk/Source/_javascript_Core/heap/Heap.h 2014-10-16 21:58:06 UTC (rev 174794)
+++ trunk/Source/_javascript_Core/heap/Heap.h 2014-10-16 22:02:16 UTC (rev 174795)
@@ -135,7 +135,9 @@
HeapOperation operationInProgress() { return m_operationInProgress; }
// true if an allocation or collection is in progress
bool isBusy();
-
+ MarkedSpace::Subspace& subspaceForObjectWithoutDestructor() { return m_objectSpace.subspaceForObjectsWithoutDestructor(); }
+ MarkedSpace::Subspace& subspaceForObjectNormalDestructor() { return m_objectSpace.subspaceForObjectsWithNormalDestructor(); }
+ MarkedSpace::Subspace& subspaceForObjectsWithImmortalStructure() { return m_objectSpace.subspaceForObjectsWithImmortalStructure(); }
MarkedAllocator& allocatorForObjectWithoutDestructor(size_t bytes) { return m_objectSpace.allocatorFor(bytes); }
MarkedAllocator& allocatorForObjectWithNormalDestructor(size_t bytes) { return m_objectSpace.normalDestructorAllocatorFor(bytes); }
MarkedAllocator& allocatorForObjectWithImmortalStructureDestructor(size_t bytes) { return m_objectSpace.immortalStructureDestructorAllocatorFor(bytes); }
Modified: trunk/Source/_javascript_Core/heap/MarkedSpace.h (174794 => 174795)
--- trunk/Source/_javascript_Core/heap/MarkedSpace.h 2014-10-16 21:58:06 UTC (rev 174794)
+++ trunk/Source/_javascript_Core/heap/MarkedSpace.h 2014-10-16 22:02:16 UTC (rev 174795)
@@ -82,6 +82,22 @@
class MarkedSpace {
WTF_MAKE_NONCOPYABLE(MarkedSpace);
public:
+ // [ 32... 128 ]
+ static const size_t preciseStep = MarkedBlock::atomSize;
+ static const size_t preciseCutoff = 128;
+ static const size_t preciseCount = preciseCutoff / preciseStep;
+
+ // [ 1024... blockSize ]
+ static const size_t impreciseStep = 2 * preciseCutoff;
+ static const size_t impreciseCutoff = MarkedBlock::blockSize / 2;
+ static const size_t impreciseCount = impreciseCutoff / impreciseStep;
+
+ struct Subspace {
+ std::array<MarkedAllocator, preciseCount> preciseAllocators;
+ std::array<MarkedAllocator, impreciseCount> impreciseAllocators;
+ MarkedAllocator largeAllocator;
+ };
+
MarkedSpace(Heap*);
~MarkedSpace();
void lastChanceToFinalize();
@@ -93,7 +109,11 @@
void* allocateWithNormalDestructor(size_t);
void* allocateWithImmortalStructureDestructor(size_t);
void* allocateWithoutDestructor(size_t);
-
+
+ Subspace& subspaceForObjectsWithNormalDestructor() { return m_normalDestructorSpace; }
+ Subspace& subspaceForObjectsWithImmortalStructure() { return m_immortalStructureDestructorSpace; }
+ Subspace& subspaceForObjectsWithoutDestructor() { return m_normalSpace; }
+
void resetAllocators();
void visitWeakSets(HeapRootVisitor&);
@@ -143,26 +163,11 @@
private:
friend class DelayedReleaseScope;
friend class LLIntOffsetsExtractor;
+ friend class JIT;
template<typename Functor> void forEachAllocator(Functor&);
template<typename Functor> void forEachAllocator();
- // [ 32... 128 ]
- static const size_t preciseStep = MarkedBlock::atomSize;
- static const size_t preciseCutoff = 128;
- static const size_t preciseCount = preciseCutoff / preciseStep;
-
- // [ 1024... blockSize ]
- static const size_t impreciseStep = 2 * preciseCutoff;
- static const size_t impreciseCutoff = MarkedBlock::blockSize / 2;
- static const size_t impreciseCount = impreciseCutoff / impreciseStep;
-
- struct Subspace {
- std::array<MarkedAllocator, preciseCount> preciseAllocators;
- std::array<MarkedAllocator, impreciseCount> impreciseAllocators;
- MarkedAllocator largeAllocator;
- };
-
Subspace m_normalDestructorSpace;
Subspace m_immortalStructureDestructorSpace;
Subspace m_normalSpace;
Modified: trunk/Source/_javascript_Core/interpreter/StackVisitor.cpp (174794 => 174795)
--- trunk/Source/_javascript_Core/interpreter/StackVisitor.cpp 2014-10-16 21:58:06 UTC (rev 174794)
+++ trunk/Source/_javascript_Core/interpreter/StackVisitor.cpp 2014-10-16 22:02:16 UTC (rev 174795)
@@ -271,6 +271,7 @@
ASSERT(m_inlineCallFrame);
arguments = Arguments::create(vm, physicalFrame, m_inlineCallFrame, mode);
arguments->tearOff(physicalFrame, m_inlineCallFrame);
+ jsCast<Arguments*>((JSCell*)arguments);
} else
#endif
{
Modified: trunk/Source/_javascript_Core/runtime/Arguments.cpp (174794 => 174795)
--- trunk/Source/_javascript_Core/runtime/Arguments.cpp 2014-10-16 21:58:06 UTC (rev 174794)
+++ trunk/Source/_javascript_Core/runtime/Arguments.cpp 2014-10-16 22:02:16 UTC (rev 174795)
@@ -43,14 +43,13 @@
void Arguments::visitChildren(JSCell* cell, SlotVisitor& visitor)
{
Arguments* thisObject = jsCast<Arguments*>(cell);
+
ASSERT_GC_OBJECT_INHERITS(thisObject, info());
JSObject::visitChildren(thisObject, visitor);
- if (thisObject->m_registerArray) {
- visitor.copyLater(thisObject, ArgumentsRegisterArrayCopyToken,
- thisObject->m_registerArray.get(), thisObject->registerArraySizeInBytes());
- visitor.appendValues(thisObject->m_registerArray.get(), thisObject->m_numArguments);
- }
+ if (thisObject->isTornOff())
+ visitor.appendValues(&thisObject->registerArray(), thisObject->m_numArguments);
+
if (thisObject->m_slowArgumentData) {
visitor.copyLater(thisObject, ArgumentsSlowArgumentDataCopyToken,
thisObject->m_slowArgumentData.get(), SlowArgumentData::sizeForNumArguments(thisObject->m_numArguments));
@@ -66,22 +65,6 @@
switch (token) {
- case ArgumentsRegisterArrayCopyToken: {
- WriteBarrier<Unknown>* registerArray = thisObject->m_registerArray.get();
- if (!registerArray)
- return;
-
- if (visitor.checkIfShouldCopy(registerArray)) {
- size_t bytes = thisObject->registerArraySizeInBytes();
- WriteBarrier<Unknown>* newRegisterArray = static_cast<WriteBarrier<Unknown>*>(visitor.allocateNewSpace(bytes));
- memcpy(newRegisterArray, registerArray, bytes);
- thisObject->m_registerArray.setWithoutWriteBarrier(newRegisterArray);
- thisObject->m_registers = newRegisterArray - CallFrame::offsetFor(1) - 1;
- visitor.didCopy(registerArray, bytes);
- }
- return;
- }
-
case ArgumentsSlowArgumentDataCopyToken: {
SlowArgumentData* slowArgumentData = thisObject->m_slowArgumentData.get();
if (!slowArgumentData)
@@ -361,15 +344,6 @@
return Base::defineOwnProperty(object, exec, propertyName, descriptor, shouldThrow);
}
-void Arguments::allocateRegisterArray(VM& vm)
-{
- ASSERT(!m_registerArray);
- void* backingStore;
- if (!vm.heap.tryAllocateStorage(this, registerArraySizeInBytes(), &backingStore))
- RELEASE_ASSERT_NOT_REACHED();
- m_registerArray.set(vm, this, static_cast<WriteBarrier<Unknown>*>(backingStore));
-}
-
void Arguments::tearOff(CallFrame* callFrame)
{
if (isTornOff())
@@ -380,13 +354,14 @@
// Must be called for the same call frame from which it was created.
ASSERT(bitwise_cast<WriteBarrier<Unknown>*>(callFrame) == m_registers);
-
- allocateRegisterArray(callFrame->vm());
- m_registers = m_registerArray.get() - CallFrame::offsetFor(1) - 1;
+ m_registers = ®isterArray() - CallFrame::offsetFor(1) - 1;
+
for (size_t i = 0; i < m_numArguments; ++i) {
- if (m_slowArgumentData && m_slowArgumentData->slowArguments()[i].status == SlowArgument::Captured)
+ if (m_slowArgumentData && m_slowArgumentData->slowArguments()[i].status == SlowArgument::Captured) {
+ m_registers[CallFrame::argumentOffset(i)].setUndefined();
continue;
+ }
trySetArgument(callFrame->vm(), i, callFrame->argumentAfterCapture(i));
}
}
@@ -399,10 +374,9 @@
if (!m_numArguments)
return;
-
- allocateRegisterArray(callFrame->vm());
- m_registers = m_registerArray.get() - CallFrame::offsetFor(1) - 1;
+ m_registers = ®isterArray() - CallFrame::offsetFor(1) - 1;
+
for (size_t i = 0; i < m_numArguments; ++i) {
ValueRecovery& recovery = inlineCallFrame->arguments[i + 1];
trySetArgument(callFrame->vm(), i, recovery.recover(callFrame));
Modified: trunk/Source/_javascript_Core/runtime/Arguments.h (174794 => 174795)
--- trunk/Source/_javascript_Core/runtime/Arguments.h 2014-10-16 21:58:06 UTC (rev 174794)
+++ trunk/Source/_javascript_Core/runtime/Arguments.h 2014-10-16 22:02:16 UTC (rev 174795)
@@ -48,14 +48,14 @@
static Arguments* create(VM& vm, CallFrame* callFrame, ArgumentsMode mode = NormalArgumentsCreationMode)
{
- Arguments* arguments = new (NotNull, allocateCell<Arguments>(vm.heap)) Arguments(callFrame);
+ Arguments* arguments = new (NotNull, allocateCell<Arguments>(vm.heap, offsetOfInlineRegisterArray() + registerArraySizeInBytes(callFrame))) Arguments(callFrame);
arguments->finishCreation(callFrame, mode);
return arguments;
}
static Arguments* create(VM& vm, CallFrame* callFrame, InlineCallFrame* inlineCallFrame, ArgumentsMode mode = NormalArgumentsCreationMode)
{
- Arguments* arguments = new (NotNull, allocateCell<Arguments>(vm.heap)) Arguments(callFrame);
+ Arguments* arguments = new (NotNull, allocateCell<Arguments>(vm.heap, offsetOfInlineRegisterArray() + registerArraySizeInBytes(inlineCallFrame))) Arguments(callFrame);
arguments->finishCreation(callFrame, inlineCallFrame, mode);
return arguments;
}
@@ -86,7 +86,7 @@
void copyToArguments(ExecState*, CallFrame*, uint32_t copyLength, int32_t firstArgumentOffset);
void tearOff(CallFrame*);
void tearOff(CallFrame*, InlineCallFrame*);
- bool isTornOff() const { return m_registerArray.get(); }
+ bool isTornOff() const { return m_registers == (®isterArray() - CallFrame::offsetFor(1) - 1); }
static Structure* createStructure(VM& vm, JSGlobalObject* globalObject, JSValue prototype)
{
@@ -98,15 +98,9 @@
static ptrdiff_t offsetOfOverrodeLength() { return OBJECT_OFFSETOF(Arguments, m_overrodeLength); }
static ptrdiff_t offsetOfIsStrictMode() { return OBJECT_OFFSETOF(Arguments, m_isStrictMode); }
static ptrdiff_t offsetOfRegisters() { return OBJECT_OFFSETOF(Arguments, m_registers); }
- static ptrdiff_t offsetOfRegisterArray() { return OBJECT_OFFSETOF(Arguments, m_registerArray); }
+ static ptrdiff_t offsetOfInlineRegisterArray() { return WTF::roundUpToMultipleOf<8>(sizeof(Arguments)); }
static ptrdiff_t offsetOfSlowArgumentData() { return OBJECT_OFFSETOF(Arguments, m_slowArgumentData); }
static ptrdiff_t offsetOfCallee() { return OBJECT_OFFSETOF(Arguments, m_callee); }
-
- static size_t allocationSize(size_t inlineCapacity)
- {
- ASSERT_UNUSED(inlineCapacity, !inlineCapacity);
- return sizeof(Arguments);
- }
protected:
static const unsigned StructureFlags = OverridesGetOwnPropertySlot | InterceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero | OverridesGetPropertyNames | JSObject::StructureFlags;
@@ -126,8 +120,8 @@
void createStrictModeCallerIfNecessary(ExecState*);
void createStrictModeCalleeIfNecessary(ExecState*);
- size_t registerArraySizeInBytes() const { return sizeof(WriteBarrier<Unknown>) * m_numArguments; }
- void allocateRegisterArray(VM&);
+ static size_t registerArraySizeInBytes(CallFrame* callFrame) { return sizeof(WriteBarrier<Unknown>) * callFrame->argumentCount(); }
+ static size_t registerArraySizeInBytes(InlineCallFrame* inlineCallFrame) { return sizeof(WriteBarrier<Unknown>) * (inlineCallFrame->arguments.size() - 1); }
bool isArgument(size_t);
bool trySetArgument(VM&, size_t argument, JSValue);
JSValue tryGetArgument(size_t argument);
@@ -151,7 +145,8 @@
bool m_isStrictMode;
WriteBarrierBase<Unknown>* m_registers;
- CopyWriteBarrier<WriteBarrier<Unknown>> m_registerArray;
+ WriteBarrier<Unknown>& registerArray() { return *reinterpret_cast<WriteBarrier<Unknown>*>(reinterpret_cast<char*>(this) + offsetOfInlineRegisterArray()); }
+ const WriteBarrier<Unknown>& registerArray() const { return *reinterpret_cast<const WriteBarrier<Unknown>*>(reinterpret_cast<const char*>(this) + offsetOfInlineRegisterArray()); }
public:
struct SlowArgumentData {