Diff
Modified: trunk/Source/_javascript_Core/ChangeLog (230443 => 230444)
--- trunk/Source/_javascript_Core/ChangeLog 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/ChangeLog 2018-04-09 17:42:01 UTC (rev 230444)
@@ -1,3 +1,82 @@
+2018-04-08 Mark Lam <mark....@apple.com>
+
+ Add pointer profiling to the FTL and supporting code.
+ https://bugs.webkit.org/show_bug.cgi?id=184395
+ <rdar://problem/39264019>
+
+ Reviewed by Michael Saboff and Filip Pizlo.
+
+ * assembler/CodeLocation.h:
+ (JSC::CodeLocationLabel::retagged):
+ (JSC::CodeLocationJump::retagged):
+ * assembler/LinkBuffer.h:
+ (JSC::LinkBuffer::locationOf):
+ * dfg/DFGJITCompiler.cpp:
+ (JSC::DFG::JITCompiler::linkOSRExits):
+ (JSC::DFG::JITCompiler::link):
+ * ftl/FTLCompile.cpp:
+ (JSC::FTL::compile):
+ * ftl/FTLExceptionTarget.cpp:
+ (JSC::FTL::ExceptionTarget::label):
+ (JSC::FTL::ExceptionTarget::jumps):
+ * ftl/FTLExceptionTarget.h:
+ * ftl/FTLJITCode.cpp:
+ (JSC::FTL::JITCode::executableAddressAtOffset):
+ * ftl/FTLLazySlowPath.cpp:
+ (JSC::FTL::LazySlowPath::~LazySlowPath):
+ (JSC::FTL::LazySlowPath::initialize):
+ (JSC::FTL::LazySlowPath::generate):
+ (JSC::FTL::LazySlowPath::LazySlowPath): Deleted.
+ * ftl/FTLLazySlowPath.h:
+ * ftl/FTLLink.cpp:
+ (JSC::FTL::link):
+ * ftl/FTLLowerDFGToB3.cpp:
+ (JSC::FTL::DFG::LowerDFGToB3::lower):
+ (JSC::FTL::DFG::LowerDFGToB3::compileCallOrConstruct):
+ (JSC::FTL::DFG::LowerDFGToB3::compileDirectCallOrConstruct):
+ (JSC::FTL::DFG::LowerDFGToB3::compileTailCall):
+ (JSC::FTL::DFG::LowerDFGToB3::compileCallOrConstructVarargsSpread):
+ (JSC::FTL::DFG::LowerDFGToB3::compileCallOrConstructVarargs):
+ (JSC::FTL::DFG::LowerDFGToB3::compileCallEval):
+ (JSC::FTL::DFG::LowerDFGToB3::lazySlowPath):
+ * ftl/FTLOSRExitCompiler.cpp:
+ (JSC::FTL::compileStub):
+ (JSC::FTL::compileFTLOSRExit):
+ * ftl/FTLOSRExitHandle.cpp:
+ (JSC::FTL::OSRExitHandle::emitExitThunk):
+ * ftl/FTLOperations.cpp:
+ (JSC::FTL::compileFTLLazySlowPath):
+ * ftl/FTLOutput.h:
+ (JSC::FTL::Output::callWithoutSideEffects):
+ (JSC::FTL::Output::operation):
+ * ftl/FTLPatchpointExceptionHandle.cpp:
+ (JSC::FTL::PatchpointExceptionHandle::scheduleExitCreationForUnwind):
+ * ftl/FTLSlowPathCall.cpp:
+ (JSC::FTL::SlowPathCallContext::makeCall):
+ * ftl/FTLSlowPathCallKey.h:
+ (JSC::FTL::SlowPathCallKey::withCallTarget):
+ (JSC::FTL::SlowPathCallKey::callPtrTag const):
+ * ftl/FTLThunks.cpp:
+ (JSC::FTL::genericGenerationThunkGenerator):
+ (JSC::FTL::osrExitGenerationThunkGenerator):
+ (JSC::FTL::lazySlowPathGenerationThunkGenerator):
+ (JSC::FTL::slowPathCallThunkGenerator):
+ * jit/JITMathIC.h:
+ (JSC::isProfileEmpty):
+ * jit/Repatch.cpp:
+ (JSC::readPutICCallTarget):
+ (JSC::ftlThunkAwareRepatchCall):
+ (JSC::tryCacheGetByID):
+ (JSC::repatchGetByID):
+ (JSC::tryCachePutByID):
+ (JSC::repatchPutByID):
+ (JSC::repatchIn):
+ (JSC::resetGetByID):
+ (JSC::resetPutByID):
+ (JSC::readCallTarget): Deleted.
+ * jit/Repatch.h:
+ * runtime/PtrTag.h:
+
2018-04-08 Yusuke Suzuki <utatane....@gmail.com>
Unreviewed, attempt to fix Windows build
Modified: trunk/Source/_javascript_Core/assembler/CodeLocation.h (230443 => 230444)
--- trunk/Source/_javascript_Core/assembler/CodeLocation.h 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/assembler/CodeLocation.h 2018-04-09 17:42:01 UTC (rev 230444)
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2009-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2009-2018 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -93,6 +93,8 @@
: CodeLocationCommon(location) {}
explicit CodeLocationLabel(void* location)
: CodeLocationCommon(MacroAssemblerCodePtr(location)) {}
+
+ CodeLocationLabel retagged(PtrTag oldTag, PtrTag newTag) { return CodeLocationLabel(MacroAssemblerCodePtr::retagged(oldTag, newTag)); }
};
class CodeLocationJump : public CodeLocationCommon {
@@ -102,6 +104,8 @@
: CodeLocationCommon(location) {}
explicit CodeLocationJump(void* location)
: CodeLocationCommon(MacroAssemblerCodePtr(location)) {}
+
+ CodeLocationJump retagged(PtrTag oldTag, PtrTag newTag) { return CodeLocationJump(MacroAssemblerCodePtr::retagged(oldTag, newTag)); }
};
class CodeLocationCall : public CodeLocationCommon {
Modified: trunk/Source/_javascript_Core/assembler/LinkBuffer.h (230443 => 230444)
--- trunk/Source/_javascript_Core/assembler/LinkBuffer.h 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/assembler/LinkBuffer.h 2018-04-09 17:42:01 UTC (rev 230444)
@@ -186,9 +186,9 @@
call.isFlagSet(Call::Tail) ? NearCallMode::Tail : NearCallMode::Regular);
}
- CodeLocationLabel locationOf(PatchableJump jump)
+ CodeLocationLabel locationOf(PatchableJump jump, PtrTag tag = NoPtrTag)
{
- return CodeLocationLabel(MacroAssembler::getLinkerAddress(code(), applyOffset(jump.m_jump.m_label)));
+ return CodeLocationLabel(MacroAssembler::getLinkerAddress(code(), applyOffset(jump.m_jump.m_label), tag));
}
CodeLocationLabel locationOf(Label label, PtrTag tag = NoPtrTag)
Modified: trunk/Source/_javascript_Core/dfg/DFGJITCompiler.cpp (230443 => 230444)
--- trunk/Source/_javascript_Core/dfg/DFGJITCompiler.cpp 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/dfg/DFGJITCompiler.cpp 2018-04-09 17:42:01 UTC (rev 230444)
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2011-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -86,7 +86,8 @@
}
MacroAssemblerCodeRef osrExitThunk = vm()->getCTIStub(osrExitThunkGenerator);
- CodeLocationLabel osrExitThunkLabel = CodeLocationLabel(osrExitThunk.code());
+ PtrTag osrExitThunkTag = ptrTag(DFGOSRExitPtrTag, vm());
+ CodeLocationLabel osrExitThunkLabel = CodeLocationLabel(osrExitThunk.retaggedCode(osrExitThunkTag, NearJumpPtrTag));
for (unsigned i = 0; i < m_jitCode->osrExit.size(); ++i) {
OSRExitCompilationInfo& info = m_exitCompilationInfo[i];
JumpList& failureJumps = info.m_failureJumps;
@@ -320,7 +321,8 @@
}
MacroAssemblerCodeRef osrExitThunk = vm()->getCTIStub(osrExitGenerationThunkGenerator);
- CodeLocationLabel target = CodeLocationLabel(osrExitThunk.code());
+ PtrTag osrExitThunkTag = ptrTag(DFGOSRExitPtrTag, vm());
+ CodeLocationLabel target = CodeLocationLabel(osrExitThunk.retaggedCode(osrExitThunkTag, NearJumpPtrTag));
for (unsigned i = 0; i < m_jitCode->osrExit.size(); ++i) {
OSRExitCompilationInfo& info = m_exitCompilationInfo[i];
if (!Options::useProbeOSRExit()) {
Modified: trunk/Source/_javascript_Core/ftl/FTLCompile.cpp (230443 => 230444)
--- trunk/Source/_javascript_Core/ftl/FTLCompile.cpp 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/ftl/FTLCompile.cpp 2018-04-09 17:42:01 UTC (rev 230444)
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2015-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2015-2018 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -79,8 +79,8 @@
std::unique_ptr<RegisterAtOffsetList> registerOffsets =
std::make_unique<RegisterAtOffsetList>(state.proc->calleeSaveRegisterAtOffsetList());
if (shouldDumpDisassembly())
- dataLog("Unwind info for ", CodeBlockWithJITType(state.graph.m_codeBlock, JITCode::FTLJIT), ": ", *registerOffsets, "\n");
- state.graph.m_codeBlock->setCalleeSaveRegisters(WTFMove(registerOffsets));
+ dataLog("Unwind info for ", CodeBlockWithJITType(codeBlock, JITCode::FTLJIT), ": ", *registerOffsets, "\n");
+ codeBlock->setCalleeSaveRegisters(WTFMove(registerOffsets));
ASSERT(!(state.proc->frameSize() % sizeof(EncodedJSValue)));
state.jitCode->common.frameRegisterCount = state.proc->frameSize() / sizeof(EncodedJSValue);
@@ -134,11 +134,12 @@
jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm.topEntryFrame);
jit.move(MacroAssembler::TrustedImmPtr(&vm), GPRInfo::argumentGPR0);
jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR1);
- CCallHelpers::Call call = jit.call(NoPtrTag);
+ PtrTag callTag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
+ CCallHelpers::Call call = jit.call(callTag);
jit.jumpToExceptionHandler(vm);
jit.addLinkTask(
[=] (LinkBuffer& linkBuffer) {
- linkBuffer.link(call, FunctionPtr(lookupExceptionHandler));
+ linkBuffer.link(call, FunctionPtr(lookupExceptionHandler, callTag));
});
state.finalizer->b3CodeLinkBuffer = std::make_unique<LinkBuffer>(jit, codeBlock, JITCompilationCanFail);
@@ -152,7 +153,8 @@
if (vm.shouldBuilderPCToCodeOriginMapping())
codeBlock->setPCToCodeOriginMap(std::make_unique<PCToCodeOriginMap>(PCToCodeOriginMapBuilder(vm, WTFMove(originMap)), *state.finalizer->b3CodeLinkBuffer));
- CodeLocationLabel label = state.finalizer->b3CodeLinkBuffer->locationOf(state.proc->entrypointLabel(0));
+ PtrTag entryTag = ptrTag(FTLCodePtrTag, codeBlock);
+ CodeLocationLabel label = state.finalizer->b3CodeLinkBuffer->locationOf(state.proc->entrypointLabel(0), entryTag);
state.generatedFunction = label.executableAddress<GeneratedFunction>();
state.jitCode->initializeB3Byproducts(state.proc->releaseByproducts());
@@ -161,7 +163,7 @@
unsigned entrypointIndex = pair.key;
Vector<FlushFormat> argumentFormats = state.graph.m_argumentFormats[entrypointIndex];
state.jitCode->common.appendCatchEntrypoint(
- catchBytecodeOffset, state.finalizer->b3CodeLinkBuffer->locationOf(state.proc->entrypointLabel(entrypointIndex)).executableAddress(), WTFMove(argumentFormats));
+ catchBytecodeOffset, state.finalizer->b3CodeLinkBuffer->locationOf(state.proc->entrypointLabel(entrypointIndex), ExceptionHandlerPtrTag).executableAddress(), WTFMove(argumentFormats));
}
state.jitCode->common.finalizeCatchEntrypoints();
Modified: trunk/Source/_javascript_Core/ftl/FTLExceptionTarget.cpp (230443 => 230444)
--- trunk/Source/_javascript_Core/ftl/FTLExceptionTarget.cpp 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/ftl/FTLExceptionTarget.cpp 2018-04-09 17:42:01 UTC (rev 230444)
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2016-2018 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -36,11 +36,11 @@
{
}
-CodeLocationLabel ExceptionTarget::label(LinkBuffer& linkBuffer)
+CodeLocationLabel ExceptionTarget::label(LinkBuffer& linkBuffer, PtrTag handlerTag)
{
if (m_isDefaultHandler)
- return linkBuffer.locationOf(*m_defaultHandler);
- return linkBuffer.locationOf(m_handle->label);
+ return linkBuffer.locationOf(*m_defaultHandler, handlerTag);
+ return linkBuffer.locationOf(m_handle->label, handlerTag);
}
Box<CCallHelpers::JumpList> ExceptionTarget::jumps(CCallHelpers& jit)
@@ -50,13 +50,13 @@
Box<CCallHelpers::Label> defaultHandler = m_defaultHandler;
jit.addLinkTask(
[=] (LinkBuffer& linkBuffer) {
- linkBuffer.link(*result, linkBuffer.locationOf(*defaultHandler));
+ linkBuffer.link(*result, linkBuffer.locationOf(*defaultHandler, ExceptionHandlerPtrTag));
});
} else {
RefPtr<OSRExitHandle> handle = m_handle;
jit.addLinkTask(
[=] (LinkBuffer& linkBuffer) {
- linkBuffer.link(*result, linkBuffer.locationOf(handle->label));
+ linkBuffer.link(*result, linkBuffer.locationOf(handle->label, DFGOSRExitPtrTag));
});
}
return result;
Modified: trunk/Source/_javascript_Core/ftl/FTLExceptionTarget.h (230443 => 230444)
--- trunk/Source/_javascript_Core/ftl/FTLExceptionTarget.h 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/ftl/FTLExceptionTarget.h 2018-04-09 17:42:01 UTC (rev 230444)
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2016-2018 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -43,7 +43,7 @@
~ExceptionTarget();
// It's OK to call this during linking, but not any sooner.
- CodeLocationLabel label(LinkBuffer&);
+ CodeLocationLabel label(LinkBuffer&, PtrTag handlerTag);
// Or, you can get a JumpList at any time. Anything you add to this JumpList will be linked to
// the target's label.
Modified: trunk/Source/_javascript_Core/ftl/FTLJITCode.cpp (230443 => 230444)
--- trunk/Source/_javascript_Core/ftl/FTLJITCode.cpp 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/ftl/FTLJITCode.cpp 2018-04-09 17:42:01 UTC (rev 230444)
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2013, 2015-2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -29,6 +29,7 @@
#if ENABLE(FTL_JIT)
#include "FTLState.h"
+#include "PtrTag.h"
namespace JSC { namespace FTL {
@@ -85,6 +86,12 @@
void* JITCode::executableAddressAtOffset(size_t offset)
{
return m_addressForCall.executableAddress<char*>() + offset;
+ assertIsTaggedWith(m_addressForCall.executableAddress(), CodeEntryPtrTag);
+ if (!offset)
+ return m_addressForCall.executableAddress();
+
+ char* executableAddress = untagCodePtr<char*>(m_addressForCall.executableAddress(), CodeEntryPtrTag);
+ return tagCodePtr(executableAddress + offset, CodeEntryPtrTag);
}
void* JITCode::dataAddressAtOffset(size_t)
Modified: trunk/Source/_javascript_Core/ftl/FTLLazySlowPath.cpp (230443 => 230444)
--- trunk/Source/_javascript_Core/ftl/FTLLazySlowPath.cpp 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/ftl/FTLLazySlowPath.cpp 2018-04-09 17:42:01 UTC (rev 230444)
@@ -33,24 +33,24 @@
namespace JSC { namespace FTL {
-LazySlowPath::LazySlowPath(
+LazySlowPath::~LazySlowPath()
+{
+}
+
+void LazySlowPath::initialize(
CodeLocationJump patchableJump, CodeLocationLabel done,
CodeLocationLabel exceptionTarget,
const RegisterSet& usedRegisters, CallSiteIndex callSiteIndex, RefPtr<Generator> generator
)
- : m_patchableJump(patchableJump)
- , m_done(done)
- , m_exceptionTarget(exceptionTarget)
- , m_usedRegisters(usedRegisters)
- , m_callSiteIndex(callSiteIndex)
- , m_generator(generator)
{
+ m_patchableJump = patchableJump;
+ m_done = done;
+ m_exceptionTarget = exceptionTarget;
+ m_usedRegisters = usedRegisters;
+ m_callSiteIndex = callSiteIndex;
+ m_generator = generator;
}
-LazySlowPath::~LazySlowPath()
-{
-}
-
void LazySlowPath::generate(CodeBlock* codeBlock)
{
RELEASE_ASSERT(!m_stub);
@@ -63,13 +63,14 @@
m_generator->run(jit, params);
+ PtrTag slowPathTag = ptrTag(FTLLazySlowPathPtrTag, bitwise_cast<PtrTag>(this));
LinkBuffer linkBuffer(jit, codeBlock, JITCompilationMustSucceed);
- linkBuffer.link(params.doneJumps, m_done);
+ linkBuffer.link(params.doneJumps, m_done.retagged(slowPathTag, NearJumpPtrTag));
if (m_exceptionTarget)
- linkBuffer.link(exceptionJumps, m_exceptionTarget);
- m_stub = FINALIZE_CODE_FOR(codeBlock, linkBuffer, NoPtrTag, "Lazy slow path call stub");
+ linkBuffer.link(exceptionJumps, m_exceptionTarget.retagged(slowPathTag, NearJumpPtrTag));
+ m_stub = FINALIZE_CODE_FOR(codeBlock, linkBuffer, slowPathTag, "Lazy slow path call stub");
- MacroAssembler::repatchJump(m_patchableJump, CodeLocationLabel(m_stub.code()));
+ MacroAssembler::repatchJump(m_patchableJump.retagged(slowPathTag, NearJumpPtrTag), CodeLocationLabel(m_stub.retaggedCode(slowPathTag, NearJumpPtrTag)));
}
} } // namespace JSC::FTL
Modified: trunk/Source/_javascript_Core/ftl/FTLLazySlowPath.h (230443 => 230444)
--- trunk/Source/_javascript_Core/ftl/FTLLazySlowPath.h 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/ftl/FTLLazySlowPath.h 2018-04-09 17:42:01 UTC (rev 230444)
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2015 Apple Inc. All rights reserved.
+ * Copyright (C) 2015-2018 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -63,15 +63,17 @@
{
return createSharedTask<GeneratorFunction>(functor);
}
-
- LazySlowPath(
+
+ LazySlowPath() = default;
+
+ ~LazySlowPath();
+
+ void initialize(
CodeLocationJump patchableJump, CodeLocationLabel done,
CodeLocationLabel exceptionTarget, const RegisterSet& usedRegisters,
CallSiteIndex, RefPtr<Generator>
);
- ~LazySlowPath();
-
CodeLocationJump patchableJump() const { return m_patchableJump; }
CodeLocationLabel done() const { return m_done; }
const RegisterSet& usedRegisters() const { return m_usedRegisters; }
Modified: trunk/Source/_javascript_Core/ftl/FTLLink.cpp (230443 => 230444)
--- trunk/Source/_javascript_Core/ftl/FTLLink.cpp 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/ftl/FTLLink.cpp 2018-04-09 17:42:01 UTC (rev 230444)
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -140,13 +140,15 @@
jit.emitFunctionPrologue();
jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
- CCallHelpers::Call callArityCheck = jit.call(NoPtrTag);
+ PtrTag callTag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
+ CCallHelpers::Call callArityCheck = jit.call(callTag);
auto noException = jit.branch32(CCallHelpers::GreaterThanOrEqual, GPRInfo::returnValueGPR, CCallHelpers::TrustedImm32(0));
jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm.topEntryFrame);
jit.move(CCallHelpers::TrustedImmPtr(&vm), GPRInfo::argumentGPR0);
jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR1);
- CCallHelpers::Call callLookupExceptionHandlerFromCallerFrame = jit.call(NoPtrTag);
+ PtrTag lookupTag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
+ CCallHelpers::Call callLookupExceptionHandlerFromCallerFrame = jit.call(lookupTag);
jit.jumpToExceptionHandler(vm);
noException.link(&jit);
@@ -157,10 +159,12 @@
jit.move(GPRInfo::returnValueGPR, GPRInfo::argumentGPR0);
jit.emitFunctionEpilogue();
+ jit.untagReturnAddress();
mainPathJumps.append(jit.branchTest32(CCallHelpers::Zero, GPRInfo::argumentGPR0));
jit.emitFunctionPrologue();
- CCallHelpers::Call callArityFixup = jit.call(NoPtrTag);
+ CCallHelpers::Call callArityFixup = jit.nearCall();
jit.emitFunctionEpilogue();
+ jit.untagReturnAddress();
mainPathJumps.append(jit.jump());
linkBuffer = std::make_unique<LinkBuffer>(jit, codeBlock, JITCompilationCanFail);
@@ -168,13 +172,14 @@
state.allocationFailed = true;
return;
}
- linkBuffer->link(callArityCheck, codeBlock->m_isConstructor ? operationConstructArityCheck : operationCallArityCheck);
- linkBuffer->link(callLookupExceptionHandlerFromCallerFrame, lookupExceptionHandlerFromCallerFrame);
- linkBuffer->link(callArityFixup, FunctionPtr((vm.getCTIStub(arityFixupGenerator)).code()));
+ linkBuffer->link(callArityCheck, FunctionPtr(codeBlock->m_isConstructor ? operationConstructArityCheck : operationCallArityCheck, callTag));
+ linkBuffer->link(callLookupExceptionHandlerFromCallerFrame, FunctionPtr(lookupExceptionHandlerFromCallerFrame, lookupTag));
+ linkBuffer->link(callArityFixup, FunctionPtr(vm.getCTIStub(arityFixupGenerator).retaggedCode(ptrTag(ArityFixupPtrTag, &vm), NearCallPtrTag)));
linkBuffer->link(mainPathJumps, CodeLocationLabel(bitwise_cast<void*>(state.generatedFunction)));
}
- state.jitCode->initializeAddressForCall(MacroAssemblerCodePtr(bitwise_cast<void*>(state.generatedFunction)));
+ PtrTag entryTag = ptrTag(FTLCodePtrTag, codeBlock);
+ state.jitCode->initializeAddressForCall(MacroAssemblerCodePtr(retagCodePtr<void*>(state.generatedFunction, entryTag, CodeEntryPtrTag)));
break;
}
@@ -185,6 +190,7 @@
// call to the B3-generated code.
CCallHelpers::Label start = jit.label();
jit.emitFunctionEpilogue();
+ jit.untagReturnAddress();
CCallHelpers::Jump mainPathJump = jit.jump();
linkBuffer = std::make_unique<LinkBuffer>(jit, codeBlock, JITCompilationCanFail);
@@ -194,7 +200,7 @@
}
linkBuffer->link(mainPathJump, CodeLocationLabel(bitwise_cast<void*>(state.generatedFunction)));
- state.jitCode->initializeAddressForCall(linkBuffer->locationOf(start));
+ state.jitCode->initializeAddressForCall(linkBuffer->locationOf(start, CodeEntryPtrTag));
break;
}
Modified: trunk/Source/_javascript_Core/ftl/FTLLowerDFGToB3.cpp (230443 => 230444)
--- trunk/Source/_javascript_Core/ftl/FTLLowerDFGToB3.cpp 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/ftl/FTLLowerDFGToB3.cpp 2018-04-09 17:42:01 UTC (rev 230444)
@@ -286,17 +286,19 @@
jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
jit.move(CCallHelpers::TrustedImmPtr(jit.codeBlock()), GPRInfo::argumentGPR1);
- CCallHelpers::Call throwCall = jit.call(NoPtrTag);
+ PtrTag throwTag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
+ CCallHelpers::Call throwCall = jit.call(throwTag);
jit.move(CCallHelpers::TrustedImmPtr(vm), GPRInfo::argumentGPR0);
jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR1);
- CCallHelpers::Call lookupExceptionHandlerCall = jit.call(NoPtrTag);
+ PtrTag lookupTag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
+ CCallHelpers::Call lookupExceptionHandlerCall = jit.call(lookupTag);
jit.jumpToExceptionHandler(*vm);
jit.addLinkTask(
[=] (LinkBuffer& linkBuffer) {
- linkBuffer.link(throwCall, FunctionPtr(operationThrowStackOverflowError));
- linkBuffer.link(lookupExceptionHandlerCall, FunctionPtr(lookupExceptionHandlerFromCallerFrame));
+ linkBuffer.link(throwCall, FunctionPtr(operationThrowStackOverflowError, throwTag));
+ linkBuffer.link(lookupExceptionHandlerCall, FunctionPtr(lookupExceptionHandlerFromCallerFrame, lookupTag));
});
});
});
@@ -363,7 +365,7 @@
CCallHelpers::Jump jump = jit.jump();
jit.addLinkTask(
[=] (LinkBuffer& linkBuffer) {
- linkBuffer.link(jump, linkBuffer.locationOf(*exceptionHandler));
+ linkBuffer.link(jump, linkBuffer.locationOf(*exceptionHandler, ExceptionHandlerPtrTag));
});
});
m_out.unreachable();
@@ -7145,8 +7147,9 @@
jit.addLinkTask(
[=] (LinkBuffer& linkBuffer) {
+ PtrTag linkTag = ptrTag(LinkCallPtrTag, vm);
MacroAssemblerCodePtr linkCall =
- vm->getCTIStub(linkCallThunkGenerator).code();
+ vm->getCTIStub(linkCallThunkGenerator).retaggedCode(linkTag, NearCallPtrTag);
linkBuffer.link(slowCall, FunctionPtr(linkCall));
callLinkInfo->setCallLocations(
@@ -7294,7 +7297,7 @@
[=] (LinkBuffer& linkBuffer) {
CodeLocationLabel patchableJumpLocation = linkBuffer.locationOf(patchableJump);
CodeLocationNearCall callLocation = linkBuffer.locationOfNearCall(call);
- CodeLocationLabel slowPathLocation = linkBuffer.locationOf(slowPath);
+ CodeLocationLabel slowPathLocation = linkBuffer.locationOf(slowPath, SlowPathPtrTag);
callLinkInfo->setCallLocations(
patchableJumpLocation,
@@ -7342,7 +7345,7 @@
jit.addLinkTask(
[=] (LinkBuffer& linkBuffer) {
CodeLocationNearCall callLocation = linkBuffer.locationOfNearCall(call);
- CodeLocationLabel slowPathLocation = linkBuffer.locationOf(slowPath);
+ CodeLocationLabel slowPathLocation = linkBuffer.locationOf(slowPath, NearCallPtrTag);
linkBuffer.link(call, slowPathLocation);
@@ -7466,8 +7469,9 @@
jit.addLinkTask(
[=] (LinkBuffer& linkBuffer) {
+ PtrTag linkTag = ptrTag(LinkCallPtrTag, vm);
MacroAssemblerCodePtr linkCall =
- vm->getCTIStub(linkCallThunkGenerator).code();
+ vm->getCTIStub(linkCallThunkGenerator).retaggedCode(linkTag, NearCallPtrTag);
linkBuffer.link(slowCall, FunctionPtr(linkCall));
callLinkInfo->setCallLocations(
@@ -7610,8 +7614,9 @@
};
auto callWithExceptionCheck = [&] (void* callee) {
- jit.move(CCallHelpers::TrustedImmPtr(callee), GPRInfo::nonPreservedNonArgumentGPR0);
- jit.call(GPRInfo::nonPreservedNonArgumentGPR0, NoPtrTag);
+ PtrTag tag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
+ jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr(callee, tag)), GPRInfo::nonPreservedNonArgumentGPR0);
+ jit.call(GPRInfo::nonPreservedNonArgumentGPR0, tag);
exceptions->append(jit.emitExceptionCheck(*vm, AssemblyHelpers::NormalExceptionCheck, AssemblyHelpers::FarJumpWidth));
};
@@ -7765,8 +7770,9 @@
jit.addLinkTask(
[=] (LinkBuffer& linkBuffer) {
+ PtrTag linkTag = ptrTag(LinkCallPtrTag, vm);
MacroAssemblerCodePtr linkCall =
- vm->getCTIStub(linkCallThunkGenerator).code();
+ vm->getCTIStub(linkCallThunkGenerator).retaggedCode(linkTag, NearCallPtrTag);
linkBuffer.link(slowCall, FunctionPtr(linkCall));
callLinkInfo->setCallLocations(
@@ -7949,8 +7955,9 @@
RELEASE_ASSERT(!allocator.numberOfReusedRegisters());
auto callWithExceptionCheck = [&] (void* callee) {
- jit.move(CCallHelpers::TrustedImmPtr(callee), GPRInfo::nonPreservedNonArgumentGPR0);
- jit.call(GPRInfo::nonPreservedNonArgumentGPR0, NoPtrTag);
+ PtrTag tag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
+ jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr(callee, tag)), GPRInfo::nonPreservedNonArgumentGPR0);
+ jit.call(GPRInfo::nonPreservedNonArgumentGPR0, tag);
exceptions->append(jit.emitExceptionCheck(*vm, AssemblyHelpers::NormalExceptionCheck, AssemblyHelpers::FarJumpWidth));
};
@@ -8048,8 +8055,9 @@
jit.addLinkTask(
[=] (LinkBuffer& linkBuffer) {
+ PtrTag linkTag = ptrTag(LinkCallPtrTag, vm);
MacroAssemblerCodePtr linkCall =
- vm->getCTIStub(linkCallThunkGenerator).code();
+ vm->getCTIStub(linkCallThunkGenerator).retaggedCode(linkTag, NearCallPtrTag);
linkBuffer.link(slowCall, FunctionPtr(linkCall));
callLinkInfo->setCallLocations(
@@ -8137,8 +8145,9 @@
requiredBytes = WTF::roundUpToMultipleOf(stackAlignmentBytes(), requiredBytes);
jit.subPtr(CCallHelpers::TrustedImm32(requiredBytes), CCallHelpers::stackPointerRegister);
jit.setupArguments<decltype(operationCallEval)>(GPRInfo::regT1);
- jit.move(CCallHelpers::TrustedImmPtr(bitwise_cast<void*>(operationCallEval)), GPRInfo::nonPreservedNonArgumentGPR0);
- jit.call(GPRInfo::nonPreservedNonArgumentGPR0, NoPtrTag);
+ PtrTag tag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
+ jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr(operationCallEval, tag)), GPRInfo::nonPreservedNonArgumentGPR0);
+ jit.call(GPRInfo::nonPreservedNonArgumentGPR0, tag);
exceptions->append(jit.emitExceptionCheck(state->vm(), AssemblyHelpers::NormalExceptionCheck, AssemblyHelpers::FarJumpWidth));
CCallHelpers::Jump done = jit.branchTest64(CCallHelpers::NonZero, GPRInfo::returnValueGPR);
@@ -13957,22 +13966,26 @@
jit.addLinkTask(
[=] (LinkBuffer& linkBuffer) {
+ PtrTag thunkTag = ptrTag(FTLLazySlowPathPtrTag, vm);
linkBuffer.link(
generatorJump, CodeLocationLabel(
vm->getCTIStub(
- lazySlowPathGenerationThunkGenerator).code()));
+ lazySlowPathGenerationThunkGenerator).retaggedCode(thunkTag, NearJumpPtrTag)));
+ std::unique_ptr<LazySlowPath> lazySlowPath = std::make_unique<LazySlowPath>();
+
+ PtrTag slowPathTag = ptrTag(FTLLazySlowPathPtrTag, bitwise_cast<PtrTag>(lazySlowPath.get()));
CodeLocationJump linkedPatchableJump = CodeLocationJump(
- linkBuffer.locationOf(patchableJump));
- CodeLocationLabel linkedDone = linkBuffer.locationOf(done);
+ linkBuffer.locationOf(patchableJump, slowPathTag));
+ CodeLocationLabel linkedDone = linkBuffer.locationOf(done, slowPathTag);
+
CallSiteIndex callSiteIndex =
jitCode->common.addUniqueCallSiteIndex(origin);
- std::unique_ptr<LazySlowPath> lazySlowPath =
- std::make_unique<LazySlowPath>(
+ lazySlowPath->initialize(
linkedPatchableJump, linkedDone,
- exceptionTarget->label(linkBuffer), usedRegisters,
+ exceptionTarget->label(linkBuffer, slowPathTag), usedRegisters,
callSiteIndex, generator);
jitCode->lazySlowPaths[index] = WTFMove(lazySlowPath);
Modified: trunk/Source/_javascript_Core/ftl/FTLOSRExitCompiler.cpp (230443 => 230444)
--- trunk/Source/_javascript_Core/ftl/FTLOSRExitCompiler.cpp 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/ftl/FTLOSRExitCompiler.cpp 2018-04-09 17:42:01 UTC (rev 230444)
@@ -176,7 +176,7 @@
}
static void compileStub(
- unsigned exitID, JITCode* jitCode, OSRExit& exit, VM* vm, CodeBlock* codeBlock)
+ unsigned exitID, JITCode* jitCode, OSRExit& exit, VM* vm, CodeBlock* codeBlock, PtrTag exitSiteTag)
{
// This code requires framePointerRegister is the same as callFrameRegister
static_assert(MacroAssembler::framePointerRegister == GPRInfo::callFrameRegister, "MacroAssembler::framePointerRegister and GPRInfo::callFrameRegister must be the same");
@@ -338,8 +338,9 @@
jit.setupArguments<decltype(operationMaterializeObjectInOSR)>(
CCallHelpers::TrustedImmPtr(materialization),
CCallHelpers::TrustedImmPtr(materializationArguments));
- jit.move(CCallHelpers::TrustedImmPtr(bitwise_cast<void*>(operationMaterializeObjectInOSR)), GPRInfo::nonArgGPR0);
- jit.call(GPRInfo::nonArgGPR0, NoPtrTag);
+ PtrTag tag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
+ jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr(operationMaterializeObjectInOSR, tag)), GPRInfo::nonArgGPR0);
+ jit.call(GPRInfo::nonArgGPR0, tag);
jit.storePtr(GPRInfo::returnValueGPR, materializationToPointer.get(materialization));
// Let everyone know that we're done.
@@ -366,8 +367,9 @@
CCallHelpers::TrustedImmPtr(materialization),
CCallHelpers::TrustedImmPtr(materializationToPointer.get(materialization)),
CCallHelpers::TrustedImmPtr(materializationArguments));
- jit.move(CCallHelpers::TrustedImmPtr(bitwise_cast<void*>(operationPopulateObjectInOSR)), GPRInfo::nonArgGPR0);
- jit.call(GPRInfo::nonArgGPR0, NoPtrTag);
+ PtrTag tag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
+ jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr(operationPopulateObjectInOSR, tag)), GPRInfo::nonArgGPR0);
+ jit.call(GPRInfo::nonArgGPR0, tag);
}
// Save all state from wherever the exit data tells us it was, into the appropriate place in
@@ -494,7 +496,7 @@
LinkBuffer patchBuffer(jit, codeBlock);
exit.m_code = FINALIZE_CODE_IF(
shouldDumpDisassembly() || Options::verboseOSR() || Options::verboseFTLOSRExit(),
- patchBuffer, NoPtrTag,
+ patchBuffer, exitSiteTag,
"FTL OSR exit #%u (%s, %s) from %s, with operands = %s",
exitID, toCString(exit.m_codeOrigin).data(),
exitKindToString(exit.m_kind), toCString(*codeBlock).data(),
@@ -542,12 +544,13 @@
prepareCodeOriginForOSRExit(exec, exit.m_codeOrigin);
- compileStub(exitID, jitCode, exit, &vm, codeBlock);
+ PtrTag thunkTag = ptrTag(FTLOSRExitPtrTag, &exit);
+ compileStub(exitID, jitCode, exit, &vm, codeBlock, thunkTag);
MacroAssembler::repatchJump(
- exit.codeLocationForRepatch(codeBlock), CodeLocationLabel(exit.m_code.code()));
+ exit.codeLocationForRepatch(codeBlock), CodeLocationLabel(exit.m_code.retaggedCode(thunkTag, NearJumpPtrTag)));
- return exit.m_code.code().executableAddress();
+ return exit.m_code.retaggedCode(thunkTag, bitwise_cast<PtrTag>(exec)).executableAddress();
}
} } // namespace JSC::FTL
Modified: trunk/Source/_javascript_Core/ftl/FTLOSRExitHandle.cpp (230443 => 230444)
--- trunk/Source/_javascript_Core/ftl/FTLOSRExitHandle.cpp 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/ftl/FTLOSRExitHandle.cpp 2018-04-09 17:42:01 UTC (rev 230444)
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2015-2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2015-2018 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -49,9 +49,10 @@
[self, jump, myLabel, compilation, &vm] (LinkBuffer& linkBuffer) {
self->exit.m_patchableJump = CodeLocationJump(linkBuffer.locationOf(jump));
+ PtrTag thunkTag = ptrTag(FTLOSRExitPtrTag, &vm);
linkBuffer.link(
jump.m_jump,
- CodeLocationLabel(vm.getCTIStub(osrExitGenerationThunkGenerator).code()));
+ CodeLocationLabel(vm.getCTIStub(osrExitGenerationThunkGenerator).retaggedCode(thunkTag, NearJumpPtrTag)));
if (compilation)
compilation->addOSRExitSite({ linkBuffer.locationOf(myLabel).executableAddress() });
});
Modified: trunk/Source/_javascript_Core/ftl/FTLOperations.cpp (230443 => 230444)
--- trunk/Source/_javascript_Core/ftl/FTLOperations.cpp 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/ftl/FTLOperations.cpp 2018-04-09 17:42:01 UTC (rev 230444)
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2014-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2014-2018 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -581,7 +581,8 @@
LazySlowPath& lazySlowPath = *jitCode->lazySlowPaths[index];
lazySlowPath.generate(codeBlock);
- return lazySlowPath.stub().code().executableAddress();
+ PtrTag slowPathTag = ptrTag(FTLLazySlowPathPtrTag, bitwise_cast<PtrTag>(&lazySlowPath));
+ return lazySlowPath.stub().retaggedCode(slowPathTag, bitwise_cast<PtrTag>(exec)).executableAddress();
}
} } // namespace JSC::FTL
Modified: trunk/Source/_javascript_Core/ftl/FTLOutput.h (230443 => 230444)
--- trunk/Source/_javascript_Core/ftl/FTLOutput.h 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/ftl/FTLOutput.h 2018-04-09 17:42:01 UTC (rev 230444)
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -401,11 +401,13 @@
LValue callWithoutSideEffects(B3::Type type, Function function, LValue arg1, Args... args)
{
return m_block->appendNew<B3::CCallValue>(m_proc, type, origin(), B3::Effects::none(),
- constIntPtr(bitwise_cast<void*>(function)), arg1, args...);
+ constIntPtr(tagCFunctionPtr<void*>(function, B3CCallPtrTag)), arg1, args...);
}
+ // FIXME: Consider enhancing this to allow the client to choose the target PtrTag to use.
+ // https://bugs.webkit.org/show_bug.cgi?id=184324
template<typename FunctionType>
- LValue operation(FunctionType function) { return constIntPtr(bitwise_cast<void*>(function)); }
+ LValue operation(FunctionType function) { return constIntPtr(tagCFunctionPtr<void*>(function, B3CCallPtrTag)); }
void jump(LBasicBlock);
void branch(LValue condition, LBasicBlock taken, Weight takenWeight, LBasicBlock notTaken, Weight notTakenWeight);
Modified: trunk/Source/_javascript_Core/ftl/FTLPatchpointExceptionHandle.cpp (230443 => 230444)
--- trunk/Source/_javascript_Core/ftl/FTLPatchpointExceptionHandle.cpp 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/ftl/FTLPatchpointExceptionHandle.cpp 2018-04-09 17:42:01 UTC (rev 230444)
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2016-2018 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -91,7 +91,7 @@
HandlerInfo newHandler = handler;
newHandler.start = callSiteIndex.bits();
newHandler.end = callSiteIndex.bits() + 1;
- newHandler.nativeCode = linkBuffer.locationOf(handle->label);
+ newHandler.nativeCode = linkBuffer.locationOf(handle->label, ExceptionHandlerPtrTag);
codeBlock->appendExceptionHandler(newHandler);
});
});
Modified: trunk/Source/_javascript_Core/ftl/FTLSlowPathCall.cpp (230443 => 230444)
--- trunk/Source/_javascript_Core/ftl/FTLSlowPathCall.cpp 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/ftl/FTLSlowPathCall.cpp 2018-04-09 17:42:01 UTC (rev 230444)
@@ -122,7 +122,9 @@
{
void* executableAddress = callTarget.executableAddress();
assertIsCFunctionPtr(executableAddress);
- SlowPathCall result = SlowPathCall(m_jit.call(NoPtrTag), keyWithTarget(executableAddress));
+ SlowPathCallKey key = keyWithTarget(executableAddress);
+ PtrTag callTag = key.callPtrTag();
+ SlowPathCall result = SlowPathCall(m_jit.call(callTag), key);
m_jit.addLinkTask(
[result, &vm] (LinkBuffer& linkBuffer) {
Modified: trunk/Source/_javascript_Core/ftl/FTLSlowPathCallKey.h (230443 => 230444)
--- trunk/Source/_javascript_Core/ftl/FTLSlowPathCallKey.h 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/ftl/FTLSlowPathCallKey.h 2018-04-09 17:42:01 UTC (rev 230444)
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -27,6 +27,7 @@
#if ENABLE(FTL_JIT)
+#include "PtrTag.h"
#include "RegisterSet.h"
namespace JSC { namespace FTL {
@@ -66,6 +67,7 @@
SlowPathCallKey withCallTarget(void* callTarget)
{
+ assertIsTaggedWith(callTarget, CFunctionPtrTag);
return SlowPathCallKey(usedRegisters(), callTarget, argumentRegisters(), offset());
}
@@ -102,6 +104,13 @@
return m_usedRegisters.hash() + PtrHash<void*>::hash(m_callTarget) + m_offset;
}
+ PtrTag callPtrTag() const
+ {
+ // We should only include factors which are invariant for the same slow path site.
+ // m_callTarget can vary and should be excluded.
+ return ptrTag(FTLSlowPathPtrTag, m_usedRegisters.hash(), m_offset);
+ }
+
private:
RegisterSet m_usedRegisters;
void* m_callTarget;
Modified: trunk/Source/_javascript_Core/ftl/FTLThunks.cpp (230443 => 230444)
--- trunk/Source/_javascript_Core/ftl/FTLThunks.cpp 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/ftl/FTLThunks.cpp 2018-04-09 17:42:01 UTC (rev 230444)
@@ -47,7 +47,7 @@
};
static MacroAssemblerCodeRef genericGenerationThunkGenerator(
- VM* vm, FunctionPtr generationFunction, const char* name, unsigned extraPopsToRestore, FrameAndStackAdjustmentRequirement frameAndStackAdjustmentRequirement)
+ VM* vm, FunctionPtr generationFunction, PtrTag resultThunkTag, const char* name, unsigned extraPopsToRestore, FrameAndStackAdjustmentRequirement frameAndStackAdjustmentRequirement)
{
AssemblyHelpers jit(nullptr);
@@ -86,7 +86,8 @@
jit.peek(
GPRInfo::argumentGPR1,
(stackMisalignment - MacroAssembler::pushToSaveByteOffset()) / sizeof(void*));
- MacroAssembler::Call functionCall = jit.call(NoPtrTag);
+ PtrTag generatorCallTag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
+ MacroAssembler::Call functionCall = jit.call(generatorCallTag);
// At this point we want to make a tail call to what was returned to us in the
// returnValueGPR. But at the same time as we do this, we must restore all registers.
@@ -115,27 +116,31 @@
restoreAllRegisters(jit, buffer);
+#if CPU(ARM64) && USE(POINTER_PROFILING)
+ jit.untagPtr(AssemblyHelpers::linkRegister, GPRInfo::callFrameRegister);
+ jit.tagReturnAddress();
+#endif
jit.ret();
LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
- patchBuffer.link(functionCall, generationFunction);
- return FINALIZE_CODE(patchBuffer, NoPtrTag, "%s", name);
+ patchBuffer.link(functionCall, FunctionPtr(generationFunction, generatorCallTag));
+ return FINALIZE_CODE(patchBuffer, resultThunkTag, "%s", name);
}
MacroAssemblerCodeRef osrExitGenerationThunkGenerator(VM* vm)
{
unsigned extraPopsToRestore = 0;
- PtrTag tag = ptrTag(JITThunkPtrTag, nextPtrTagID());
+ PtrTag thunkTag = ptrTag(FTLOSRExitPtrTag, vm);
return genericGenerationThunkGenerator(
- vm, FunctionPtr(compileFTLOSRExit, tag), "FTL OSR exit generation thunk", extraPopsToRestore, FrameAndStackAdjustmentRequirement::Needed);
+ vm, FunctionPtr(compileFTLOSRExit), thunkTag, "FTL OSR exit generation thunk", extraPopsToRestore, FrameAndStackAdjustmentRequirement::Needed);
}
MacroAssemblerCodeRef lazySlowPathGenerationThunkGenerator(VM* vm)
{
unsigned extraPopsToRestore = 1;
- PtrTag tag = ptrTag(JITThunkPtrTag, nextPtrTagID());
+ PtrTag thunkTag = ptrTag(FTLLazySlowPathPtrTag, vm);
return genericGenerationThunkGenerator(
- vm, FunctionPtr(compileFTLLazySlowPath, tag), "FTL lazy slow path generation thunk", extraPopsToRestore, FrameAndStackAdjustmentRequirement::NotNeeded);
+ vm, FunctionPtr(compileFTLLazySlowPath), thunkTag, "FTL lazy slow path generation thunk", extraPopsToRestore, FrameAndStackAdjustmentRequirement::NotNeeded);
}
static void registerClobberCheck(AssemblyHelpers& jit, RegisterSet dontClobber)
@@ -169,7 +174,8 @@
MacroAssemblerCodeRef slowPathCallThunkGenerator(const SlowPathCallKey& key)
{
AssemblyHelpers jit(nullptr);
-
+ jit.tagReturnAddress();
+
// We want to save the given registers at the given offset, then we want to save the
// old return address somewhere past that offset, and then finally we want to make the
// call.
@@ -199,7 +205,7 @@
registerClobberCheck(jit, key.argumentRegisters());
- PtrTag callTag = ptrTag(JITThunkPtrTag, nextPtrTagID());
+ PtrTag callTag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
AssemblyHelpers::Call call = jit.call(callTag);
jit.loadPtr(AssemblyHelpers::Address(MacroAssembler::stackPointerRegister, key.offset()), GPRInfo::nonPreservedNonReturnGPR);
@@ -227,7 +233,7 @@
LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
patchBuffer.link(call, FunctionPtr(key.callTarget(), callTag));
- return FINALIZE_CODE(patchBuffer, NoPtrTag, "FTL slow path call thunk for %s", toCString(key).data());
+ return FINALIZE_CODE(patchBuffer, key.callPtrTag(), "FTL slow path call thunk for %s", toCString(key).data());
}
} } // namespace JSC::FTL
Modified: trunk/Source/_javascript_Core/jit/JITMathIC.h (230443 => 230444)
--- trunk/Source/_javascript_Core/jit/JITMathIC.h 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/jit/JITMathIC.h 2018-04-09 17:42:01 UTC (rev 230444)
@@ -144,8 +144,8 @@
};
auto replaceCall = [&] () {
- PtrTag tag = ptrTag(MathICPtrTag, m_instruction);
- ftlThunkAwareRepatchCall(codeBlock, slowPathCallLocation(), FunctionPtr(callReplacement, tag));
+ PtrTag callTag = ptrTag(MathICPtrTag, m_instruction);
+ ftlThunkAwareRepatchCall(codeBlock, slowPathCallLocation(), callReplacement, callTag);
};
bool shouldEmitProfiling = !JITCode::isOptimizingJIT(codeBlock->jitType());
@@ -228,7 +228,7 @@
m_deltaFromStartToSlowPathCallLocation = MacroAssembler::differenceBetweenCodePtr(
start, linkBuffer.locationOf(state.slowPathCall));
m_deltaFromStartToSlowPathStart = MacroAssembler::differenceBetweenCodePtr(
- start, linkBuffer.locationOf(state.slowPathStart, NoPtrTag));
+ start, linkBuffer.locationOf(state.slowPathStart));
}
ArithProfile* arithProfile() const { return m_arithProfile; }
Modified: trunk/Source/_javascript_Core/jit/Repatch.cpp (230443 => 230444)
--- trunk/Source/_javascript_Core/jit/Repatch.cpp 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/jit/Repatch.cpp 2018-04-09 17:42:01 UTC (rev 230444)
@@ -66,37 +66,40 @@
namespace JSC {
-static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
+static FunctionPtr readPutICCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
{
- FunctionPtr result = MacroAssembler::readCallTarget(call);
+ FunctionPtr target = MacroAssembler::readCallTarget(call);
#if ENABLE(FTL_JIT)
if (codeBlock->jitType() == JITCode::FTLJIT) {
- return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
- MacroAssemblerCodePtr::createFromExecutableAddress(
- result.executableAddress())).callTarget(), CodeEntryPtrTag);
+ MacroAssemblerCodePtr slowPathThunk = MacroAssemblerCodePtr::createFromExecutableAddress(target.executableAddress());
+ auto* callTarget = codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(slowPathThunk).callTarget();
+ return FunctionPtr(callTarget, CFunctionPtrTag);
}
#else
UNUSED_PARAM(codeBlock);
#endif // ENABLE(FTL_JIT)
- return result;
+ return FunctionPtr(untagCFunctionPtr(target.executableAddress(), PutPropertyPtrTag), CFunctionPtrTag);
}
-void ftlThunkAwareRepatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
+void ftlThunkAwareRepatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction, PtrTag callTag)
{
#if ENABLE(FTL_JIT)
if (codeBlock->jitType() == JITCode::FTLJIT) {
VM& vm = *codeBlock->vm();
FTL::Thunks& thunks = *vm.ftlThunks;
- FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
- MacroAssemblerCodePtr::createFromExecutableAddress(
- MacroAssembler::readCallTarget(call).executableAddress()));
+ FunctionPtr target = MacroAssembler::readCallTarget(call);
+ MacroAssemblerCodePtr slowPathThunk = MacroAssemblerCodePtr::createFromExecutableAddress(target.executableAddress());
+ FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(slowPathThunk);
key = key.withCallTarget(newCalleeFunction.executableAddress());
newCalleeFunction = FunctionPtr(thunks.getSlowPathCallThunk(key).code());
+ assertIsTaggedWith(newCalleeFunction.executableAddress(), key.callPtrTag());
+ MacroAssembler::repatchCall(call, newCalleeFunction);
+ return;
}
#else // ENABLE(FTL_JIT)
UNUSED_PARAM(codeBlock);
#endif // ENABLE(FTL_JIT)
- MacroAssembler::repatchCall(call, newCalleeFunction);
+ MacroAssembler::repatchCall(call, FunctionPtr(newCalleeFunction, callTag));
}
enum InlineCacheAction {
@@ -207,7 +210,7 @@
bool generatedCodeInline = InlineAccess::generateArrayLength(stubInfo, jsCast<JSArray*>(baseCell));
if (generatedCodeInline) {
- ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), FunctionPtr(appropriateOptimizingGetByIdFunction(kind), GetPropertyPtrTag));
+ ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind), GetPropertyPtrTag);
stubInfo.initArrayLength();
return RetryCacheLater;
}
@@ -264,7 +267,7 @@
if (generatedCodeInline) {
LOG_IC((ICEvent::GetByIdSelfPatch, structure->classInfo(), propertyName));
structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
- ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), FunctionPtr(appropriateOptimizingGetByIdFunction(kind), GetPropertyPtrTag));
+ ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind), GetPropertyPtrTag);
stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
return RetryCacheLater;
}
@@ -388,7 +391,7 @@
if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache) {
CodeBlock* codeBlock = exec->codeBlock();
- ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), FunctionPtr(appropriateGetByIdFunction(kind), GetPropertyPtrTag));
+ ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateGetByIdFunction(kind), GetPropertyPtrTag);
}
}
@@ -460,7 +463,7 @@
bool generatedCodeInline = InlineAccess::generateSelfPropertyReplace(stubInfo, structure, slot.cachedOffset());
if (generatedCodeInline) {
LOG_IC((ICEvent::PutByIdSelfPatch, structure->classInfo(), ident));
- ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), FunctionPtr(appropriateOptimizingPutByIdFunction(slot, putKind), PutPropertyPtrTag));
+ ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingPutByIdFunction(slot, putKind), PutPropertyPtrTag);
stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
return RetryCacheLater;
}
@@ -594,7 +597,7 @@
if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache) {
CodeBlock* codeBlock = exec->codeBlock();
- ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), FunctionPtr(appropriateGenericPutByIdFunction(slot, putKind), PutPropertyPtrTag));
+ ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateGenericPutByIdFunction(slot, putKind), PutPropertyPtrTag);
}
}
@@ -683,7 +686,7 @@
{
SuperSamplerScope superSamplerScope(false);
if (tryCacheIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
- ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), operationIn);
+ ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), operationIn, CFunctionPtrTag);
}
static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef, PtrTag linkTag)
@@ -1136,13 +1139,13 @@
void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
{
- ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), FunctionPtr(appropriateOptimizingGetByIdFunction(kind), GetPropertyPtrTag));
+ ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind), GetPropertyPtrTag);
InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
}
void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
{
- V_JITOperation_ESsiJJI unoptimizedFunction = untagCFunctionPtr<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.slowPathCallLocation()).executableAddress(), PutPropertyPtrTag);
+ V_JITOperation_ESsiJJI unoptimizedFunction = reinterpret_cast<V_JITOperation_ESsiJJI>(readPutICCallTarget(codeBlock, stubInfo.slowPathCallLocation()).executableAddress());
V_JITOperation_ESsiJJI optimizedFunction;
if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
optimizedFunction = operationPutByIdStrictOptimize;
@@ -1155,7 +1158,7 @@
optimizedFunction = operationPutByIdDirectNonStrictOptimize;
}
- ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), FunctionPtr(optimizedFunction, PutPropertyPtrTag));
+ ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), optimizedFunction, PutPropertyPtrTag);
InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
}
Modified: trunk/Source/_javascript_Core/jit/Repatch.h (230443 => 230444)
--- trunk/Source/_javascript_Core/jit/Repatch.h 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/jit/Repatch.h 2018-04-09 17:42:01 UTC (rev 230444)
@@ -54,7 +54,7 @@
void resetGetByID(CodeBlock*, StructureStubInfo&, GetByIDKind);
void resetPutByID(CodeBlock*, StructureStubInfo&);
void resetIn(CodeBlock*, StructureStubInfo&);
-void ftlThunkAwareRepatchCall(CodeBlock*, CodeLocationCall, FunctionPtr newCalleeFunction);
+void ftlThunkAwareRepatchCall(CodeBlock*, CodeLocationCall, FunctionPtr newCalleeFunction, PtrTag callTag);
} // namespace JSC
Modified: trunk/Source/_javascript_Core/runtime/PtrTag.h (230443 => 230444)
--- trunk/Source/_javascript_Core/runtime/PtrTag.h 2018-04-09 17:09:44 UTC (rev 230443)
+++ trunk/Source/_javascript_Core/runtime/PtrTag.h 2018-04-09 17:42:01 UTC (rev 230444)
@@ -45,6 +45,11 @@
v(DFGOSRExitPtrTag) \
v(DFGOperationPtrTag) \
v(ExceptionHandlerPtrTag) \
+ v(FTLCodePtrTag) \
+ v(FTLLazySlowPathPtrTag) \
+ v(FTLOSRExitPtrTag) \
+ v(FTLOperationPtrTag) \
+ v(FTLSlowPathPtrTag) \
v(GetPropertyPtrTag) \
v(GetterSetterPtrTag) \
v(HasPropertyPtrTag) \
@@ -66,13 +71,11 @@
v(SpecializedThunkPtrTag) \
v(SwitchTablePtrTag) \
v(ThrowExceptionPtrTag) \
- \
v(Yarr8BitPtrTag) \
v(Yarr16BitPtrTag) \
v(YarrMatchOnly8BitPtrTag) \
v(YarrMatchOnly16BitPtrTag) \
v(YarrBacktrackPtrTag) \
- \
v(WasmCallPtrTag) \
v(WasmHelperPtrTag) \