sgraenitz updated this revision to Diff 447667.
sgraenitz marked 2 inline comments as done.
sgraenitz added a comment.

Rebase and polish comments


Repository:
  rG LLVM Github Monorepo

CHANGES SINCE LAST ACTION
  https://reviews.llvm.org/D128190/new/

https://reviews.llvm.org/D128190

Files:
  clang/lib/CodeGen/CGCall.cpp
  clang/test/CodeGenObjCXX/arc-exceptions-seh.mm
  llvm/include/llvm/IR/IntrinsicInst.h
  llvm/lib/CodeGen/PreISelIntrinsicLowering.cpp
  llvm/lib/IR/IntrinsicInst.cpp
  llvm/lib/Transforms/Utils/InlineFunction.cpp
  llvm/test/CodeGen/X86/win64-funclet-preisel-intrinsics.ll
  llvm/test/Feature/OperandBundles/inliner-funclet-wineh.ll

Index: llvm/test/Feature/OperandBundles/inliner-funclet-wineh.ll
===================================================================
--- /dev/null
+++ llvm/test/Feature/OperandBundles/inliner-funclet-wineh.ll
@@ -0,0 +1,51 @@
+; RUN: opt -S -always-inline -mtriple=x86_64-windows-msvc < %s | FileCheck %s
+
+; WinEH requires funclet tokens on nounwind intrinsics if they can lower to
+; regular function calls in the course of IR transformations.
+;
+; Test that the inliner propagates funclet tokens to such intrinsics when
+; inlining into EH funclets, i.e.: llvm.objc.storeStrong inherits the "funclet"
+; token from inlined_fn.
+
+define void @inlined_fn(ptr %ex) #1 {
+entry:
+  call void @llvm.objc.storeStrong(ptr %ex, ptr null)
+  ret void
+}
+
+define void @test_catch_with_inline() personality ptr @__CxxFrameHandler3 {
+entry:
+  %exn.slot = alloca ptr, align 8
+  %ex = alloca ptr, align 8
+  invoke void @opaque() to label %invoke.cont unwind label %catch.dispatch
+
+catch.dispatch:
+  %0 = catchswitch within none [label %catch] unwind to caller
+
+invoke.cont:
+  unreachable
+
+catch:
+  %1 = catchpad within %0 [ptr null, i32 64, ptr %exn.slot]
+  call void @inlined_fn(ptr %ex) [ "funclet"(token %1) ]
+  catchret from %1 to label %catchret.dest
+
+catchret.dest:
+  ret void
+}
+
+declare void @opaque()
+declare void @llvm.objc.storeStrong(ptr, ptr) #0
+declare i32 @__CxxFrameHandler3(...)
+
+attributes #0 = { nounwind }
+attributes #1 = { alwaysinline }
+
+; After inlining, llvm.objc.storeStrong inherited the "funclet" token:
+;
+;   CHECK-LABEL:  define void @test_catch_with_inline()
+;                   ...
+;   CHECK:        catch:
+;   CHECK-NEXT:     %1 = catchpad within %0 [ptr null, i32 64, ptr %exn.slot]
+;   CHECK-NEXT:     call void @llvm.objc.storeStrong(ptr %ex, ptr null) [ "funclet"(token %1) ]
+;   CHECK-NEXT:     catchret from %1 to label %catchret.dest
Index: llvm/test/CodeGen/X86/win64-funclet-preisel-intrinsics.ll
===================================================================
--- /dev/null
+++ llvm/test/CodeGen/X86/win64-funclet-preisel-intrinsics.ll
@@ -0,0 +1,77 @@
+; RUN: llc -mtriple=x86_64-windows-msvc < %s | FileCheck %s
+
+; WinEH requires funclet tokens on nounwind intrinsics if they can lower to
+; regular function calls in the course of IR transformations.
+;
+; Test that the code generator will emit the function call and not consider it
+; an "implausible instruciton". In the past this silently truncated code on
+; exception paths and caused crashes at runtime.
+;
+; Reduced IR generated from ObjC++ source:
+;
+;     @class Ety;
+;     void opaque(void);
+;     void test_catch_with_objc_intrinsic(void) {
+;       @try {
+;         opaque();
+;       } @catch (Ety *ex) {
+;         // Destroy ex when leaving catchpad. This would emit calls to two
+;         // intrinsic functions: llvm.objc.retain and llvm.objc.storeStrong
+;       }
+;     }
+;
+; llvm.objc.retain and llvm.objc.storeStrong both lower into regular function
+; calls before ISel. We only need one of them to trigger funclet truncation
+; during codegen:
+
+define void @test_catch_with_objc_intrinsic() personality ptr @__CxxFrameHandler3 {
+entry:
+  %exn.slot = alloca ptr, align 8
+  %ex2 = alloca ptr, align 8
+  invoke void @opaque() to label %invoke.cont unwind label %catch.dispatch
+
+catch.dispatch:
+  %0 = catchswitch within none [label %catch] unwind to caller
+
+invoke.cont:
+  unreachable
+
+catch:
+  %1 = catchpad within %0 [ptr null, i32 64, ptr %exn.slot]
+  call void @llvm.objc.storeStrong(ptr %ex2, ptr null) [ "funclet"(token %1) ]
+  catchret from %1 to label %catchret.dest
+
+catchret.dest:
+  ret void
+}
+
+declare void @opaque()
+declare void @llvm.objc.storeStrong(ptr, ptr) #0
+declare i32 @__CxxFrameHandler3(...)
+
+attributes #0 = { nounwind }
+
+; EH catchpad with SEH prologue:
+;     CHECK: # %catch
+;     CHECK: pushq   %rbp
+;     CHECK: .seh_pushreg %rbp
+;            ...
+;     CHECK: .seh_endprologue
+;
+; At this point the code used to be truncated (and sometimes terminated with an
+; int3 opcode):
+;     CHECK-NOT: int3
+;
+; Instead, the call to objc_storeStrong should be emitted:
+;     CHECK: leaq	-24(%rbp), %rcx
+;     CHECK: xorl	%edx, %edx
+;     CHECK: callq	objc_storeStrong
+;        ...
+;
+; This is the end of the funclet:
+;     CHECK: popq	%rbp
+;     CHECK: retq                                    # CATCHRET
+;            ...
+;     CHECK: .seh_handlerdata
+;            ...
+;     CHECK: .seh_endproc
Index: llvm/lib/Transforms/Utils/InlineFunction.cpp
===================================================================
--- llvm/lib/Transforms/Utils/InlineFunction.cpp
+++ llvm/lib/Transforms/Utils/InlineFunction.cpp
@@ -825,6 +825,35 @@
   }
 }
 
+/// Bundle operands of the inlined function must be added to inlined call sites.
+static void PropagateOperandBundles(Function::iterator InlinedBB,
+                                    Instruction *CallSiteEHPad) {
+  for (Instruction &II : llvm::make_early_inc_range(*InlinedBB)) {
+    CallBase *I = dyn_cast<CallBase>(&II);
+    if (!I)
+      continue;
+    // Skip call sites which already have a "funclet" bundle.
+    if (I->getOperandBundle(LLVMContext::OB_funclet))
+      continue;
+    // Skip call sites which are nounwind intrinsics (as long as they don't
+    // lower into regular function calls in the course of IR transformations).
+    auto *CalledFn =
+        dyn_cast<Function>(I->getCalledOperand()->stripPointerCasts());
+    if (CalledFn && CalledFn->isIntrinsic() && I->doesNotThrow() &&
+        !IntrinsicInst::mayLowerToFunctionCall(CalledFn->getIntrinsicID()))
+      continue;
+
+    SmallVector<OperandBundleDef, 1> OpBundles;
+    I->getOperandBundlesAsDefs(OpBundles);
+    OpBundles.emplace_back("funclet", CallSiteEHPad);
+
+    Instruction *NewInst = CallBase::Create(I, OpBundles, I);
+    NewInst->takeName(I);
+    I->replaceAllUsesWith(NewInst);
+    I->eraseFromParent();
+  }
+}
+
 namespace {
 /// Utility for cloning !noalias and !alias.scope metadata. When a code region
 /// using scoped alias metadata is inlined, the aliasing relationships may not
@@ -2302,38 +2331,12 @@
   // Update the lexical scopes of the new funclets and callsites.
   // Anything that had 'none' as its parent is now nested inside the callsite's
   // EHPad.
-
   if (CallSiteEHPad) {
     for (Function::iterator BB = FirstNewBlock->getIterator(),
                             E = Caller->end();
          BB != E; ++BB) {
-      // Add bundle operands to any top-level call sites.
-      SmallVector<OperandBundleDef, 1> OpBundles;
-      for (Instruction &II : llvm::make_early_inc_range(*BB)) {
-        CallBase *I = dyn_cast<CallBase>(&II);
-        if (!I)
-          continue;
-
-        // Skip call sites which are nounwind intrinsics.
-        auto *CalledFn =
-            dyn_cast<Function>(I->getCalledOperand()->stripPointerCasts());
-        if (CalledFn && CalledFn->isIntrinsic() && I->doesNotThrow())
-          continue;
-
-        // Skip call sites which already have a "funclet" bundle.
-        if (I->getOperandBundle(LLVMContext::OB_funclet))
-          continue;
-
-        I->getOperandBundlesAsDefs(OpBundles);
-        OpBundles.emplace_back("funclet", CallSiteEHPad);
-
-        Instruction *NewInst = CallBase::Create(I, OpBundles, I);
-        NewInst->takeName(I);
-        I->replaceAllUsesWith(NewInst);
-        I->eraseFromParent();
-
-        OpBundles.clear();
-      }
+      // Add bundle operands to inlined call sites.
+      PropagateOperandBundles(BB, CallSiteEHPad);
 
       // It is problematic if the inlinee has a cleanupret which unwinds to
       // caller and we inline it into a call site which doesn't unwind but into
Index: llvm/lib/IR/IntrinsicInst.cpp
===================================================================
--- llvm/lib/IR/IntrinsicInst.cpp
+++ llvm/lib/IR/IntrinsicInst.cpp
@@ -32,6 +32,39 @@
 
 using namespace llvm;
 
+bool IntrinsicInst::mayLowerToFunctionCall(Intrinsic::ID IID) {
+  switch (IID) {
+  case Intrinsic::objc_autorelease:
+  case Intrinsic::objc_autoreleasePoolPop:
+  case Intrinsic::objc_autoreleasePoolPush:
+  case Intrinsic::objc_autoreleaseReturnValue:
+  case Intrinsic::objc_copyWeak:
+  case Intrinsic::objc_destroyWeak:
+  case Intrinsic::objc_initWeak:
+  case Intrinsic::objc_loadWeak:
+  case Intrinsic::objc_loadWeakRetained:
+  case Intrinsic::objc_moveWeak:
+  case Intrinsic::objc_release:
+  case Intrinsic::objc_retain:
+  case Intrinsic::objc_retainAutorelease:
+  case Intrinsic::objc_retainAutoreleaseReturnValue:
+  case Intrinsic::objc_retainAutoreleasedReturnValue:
+  case Intrinsic::objc_retainBlock:
+  case Intrinsic::objc_storeStrong:
+  case Intrinsic::objc_storeWeak:
+  case Intrinsic::objc_unsafeClaimAutoreleasedReturnValue:
+  case Intrinsic::objc_retainedObject:
+  case Intrinsic::objc_unretainedObject:
+  case Intrinsic::objc_unretainedPointer:
+  case Intrinsic::objc_retain_autorelease:
+  case Intrinsic::objc_sync_enter:
+  case Intrinsic::objc_sync_exit:
+    return true;
+  default:
+    return false;
+  }
+}
+
 //===----------------------------------------------------------------------===//
 /// DbgVariableIntrinsic - This is the common base class for debug info
 /// intrinsics for variables.
Index: llvm/lib/CodeGen/PreISelIntrinsicLowering.cpp
===================================================================
--- llvm/lib/CodeGen/PreISelIntrinsicLowering.cpp
+++ llvm/lib/CodeGen/PreISelIntrinsicLowering.cpp
@@ -18,6 +18,7 @@
 #include "llvm/IR/Function.h"
 #include "llvm/IR/IRBuilder.h"
 #include "llvm/IR/Instructions.h"
+#include "llvm/IR/IntrinsicInst.h"
 #include "llvm/IR/Module.h"
 #include "llvm/IR/Type.h"
 #include "llvm/InitializePasses.h"
@@ -69,6 +70,8 @@
 
 static bool lowerObjCCall(Function &F, const char *NewFn,
                           bool setNonLazyBind = false) {
+  assert(IntrinsicInst::mayLowerToFunctionCall(F.getIntrinsicID()) &&
+         "Pre-ISel intrinsics do lower into regular function calls");
   if (F.use_empty())
     return false;
 
@@ -107,7 +110,9 @@
 
     IRBuilder<> Builder(CI->getParent(), CI->getIterator());
     SmallVector<Value *, 8> Args(CI->args());
-    CallInst *NewCI = Builder.CreateCall(FCache, Args);
+    SmallVector<llvm::OperandBundleDef, 1> BundleList;
+    CI->getOperandBundlesAsDefs(BundleList);
+    CallInst *NewCI = Builder.CreateCall(FCache, Args, BundleList);
     NewCI->setName(CI->getName());
 
     // Try to set the most appropriate TailCallKind based on both the current
Index: llvm/include/llvm/IR/IntrinsicInst.h
===================================================================
--- llvm/include/llvm/IR/IntrinsicInst.h
+++ llvm/include/llvm/IR/IntrinsicInst.h
@@ -84,7 +84,7 @@
     }
   }
 
-  // Checks if the intrinsic is an annotation.
+  /// Checks if the intrinsic is an annotation.
   bool isAssumeLikeIntrinsic() const {
     switch (getIntrinsicID()) {
     default: break;
@@ -107,7 +107,11 @@
     return false;
   }
 
-  // Methods for support type inquiry through isa, cast, and dyn_cast:
+  /// Check if the intrinsic might lower into a regular function call in the
+  /// course of IR transformations
+  static bool mayLowerToFunctionCall(Intrinsic::ID IID);
+
+  /// Methods for support type inquiry through isa, cast, and dyn_cast:
   static bool classof(const CallInst *I) {
     if (const Function *CF = I->getCalledFunction())
       return CF->isIntrinsic();
Index: clang/test/CodeGenObjCXX/arc-exceptions-seh.mm
===================================================================
--- /dev/null
+++ clang/test/CodeGenObjCXX/arc-exceptions-seh.mm
@@ -0,0 +1,29 @@
+// RUN: %clang_cc1 -triple x86_64-pc-windows-msvc -emit-llvm -fobjc-arc -fexceptions -fobjc-exceptions -fobjc-runtime=gnustep-2.0 -o - %s | FileCheck %s
+
+// WinEH requires funclet tokens on nounwind intrinsics if they can lower to
+// regular function calls in the course of IR transformations.
+//
+// This is the case for ObjC ARC runtime intrinsics. Test that clang emits the
+// funclet tokens for llvm.objc.retain and llvm.objc.storeStrong and that they
+// refer to their catchpad's SSA value.
+
+@class Ety;
+void opaque(void);
+void test_catch_with_objc_intrinsic(void) {
+  @try {
+    opaque();
+  } @catch (Ety *ex) {
+    // Destroy ex when leaving catchpad. This emits calls to intrinsic functions
+    // llvm.objc.retain and llvm.objc.storeStrong
+  }
+}
+
+// CHECK-LABEL: define{{.*}} void {{.*}}test_catch_with_objc_intrinsic
+//                ...
+// CHECK:       catch.dispatch:
+// CHECK-NEXT:    [[CATCHSWITCH:%[0-9]+]] = catchswitch within none
+//                ...
+// CHECK:       catch:
+// CHECK-NEXT:    [[CATCHPAD:%[0-9]+]] = catchpad within [[CATCHSWITCH]]
+// CHECK:         {{%[0-9]+}} = call {{.*}} @llvm.objc.retain{{.*}} [ "funclet"(token [[CATCHPAD]]) ]
+// CHECK:         call {{.*}} @llvm.objc.storeStrong{{.*}} [ "funclet"(token [[CATCHPAD]]) ]
Index: clang/lib/CodeGen/CGCall.cpp
===================================================================
--- clang/lib/CodeGen/CGCall.cpp
+++ clang/lib/CodeGen/CGCall.cpp
@@ -4473,17 +4473,22 @@
 // they are nested within.
 SmallVector<llvm::OperandBundleDef, 1>
 CodeGenFunction::getBundlesForFunclet(llvm::Value *Callee) {
-  SmallVector<llvm::OperandBundleDef, 1> BundleList;
   // There is no need for a funclet operand bundle if we aren't inside a
   // funclet.
   if (!CurrentFuncletPad)
-    return BundleList;
-
-  // Skip intrinsics which cannot throw.
-  auto *CalleeFn = dyn_cast<llvm::Function>(Callee->stripPointerCasts());
-  if (CalleeFn && CalleeFn->isIntrinsic() && CalleeFn->doesNotThrow())
-    return BundleList;
+    return (SmallVector<llvm::OperandBundleDef, 1>());
+
+  // Skip intrinsics which cannot throw (as long as they don't lower into
+  // regular function calls in the course of IR transformations).
+  if (auto *CalleeFn = dyn_cast<llvm::Function>(Callee->stripPointerCasts())) {
+    if (CalleeFn->isIntrinsic() && CalleeFn->doesNotThrow()) {
+      auto IID = CalleeFn->getIntrinsicID();
+      if (!llvm::IntrinsicInst::mayLowerToFunctionCall(IID))
+        return (SmallVector<llvm::OperandBundleDef, 1>());
+    }
+  }
 
+  SmallVector<llvm::OperandBundleDef, 1> BundleList;
   BundleList.emplace_back("funclet", CurrentFuncletPad);
   return BundleList;
 }
_______________________________________________
cfe-commits mailing list
cfe-commits@lists.llvm.org
https://lists.llvm.org/cgi-bin/mailman/listinfo/cfe-commits

Reply via email to