llvmbot wrote:
<!--LLVM PR SUMMARY COMMENT--> @llvm/pr-subscribers-clang Author: Andy Kaylor (andykaylor) <details> <summary>Changes</summary> This adds CIR support for handling full expression cleanups in conditional branches. Because CIR uses structured control flow, it was necessary to handle these cleanups differently than is done in classic codegen. CIR speculatively creates a cleanup scope when an ExprWithCleanups contains a conditional operator and maintains a dedicated stack of these deferred cleanups, which is added to the cleanup scope at the end of the full expression with an active flag used to control whether the cleanup should be executed based on any branches that may have been taken during the conditional expression evaluation. This is similar to the mechanism used for lifetime extended cleanups, but the timing of when the cleanups are moved to the main EH stack is different, so we need to maintain two different pending cleanup stacks. We are able to use the same PendingCleanupEntry class for both. Assisted-by: Cursor / claude-4.6-opus-high --- Patch is 61.65 KiB, truncated to 20.00 KiB below, full version: https://github.com/llvm/llvm-project/pull/191479.diff 9 Files Affected: - (modified) clang/lib/CIR/CodeGen/CIRGenCleanup.cpp (+155-2) - (modified) clang/lib/CIR/CodeGen/CIRGenDecl.cpp (+12-2) - (modified) clang/lib/CIR/CodeGen/CIRGenExpr.cpp (+9-1) - (modified) clang/lib/CIR/CodeGen/CIRGenExprAggregate.cpp (+3) - (modified) clang/lib/CIR/CodeGen/CIRGenExprScalar.cpp (+26-3) - (modified) clang/lib/CIR/CodeGen/CIRGenFunction.h (+54-16) - (modified) clang/lib/CIR/CodeGen/CIRGenStmt.cpp (+2) - (added) clang/test/CIR/CodeGen/cleanup-conditional-eh.cpp (+494) - (added) clang/test/CIR/CodeGen/cleanup-conditional.cpp (+419) ``````````diff diff --git a/clang/lib/CIR/CodeGen/CIRGenCleanup.cpp b/clang/lib/CIR/CodeGen/CIRGenCleanup.cpp index d8d440a60110e..576b957612289 100644 --- a/clang/lib/CIR/CodeGen/CIRGenCleanup.cpp +++ b/clang/lib/CIR/CodeGen/CIRGenCleanup.cpp @@ -19,11 +19,37 @@ #include "CIRGenCleanup.h" #include "CIRGenFunction.h" +#include "clang/AST/RecursiveASTVisitor.h" #include "clang/CIR/MissingFeatures.h" using namespace clang; using namespace clang::CIRGen; +namespace { +/// Return true if the expression tree contains an AbstractConditionalOperator +/// (ternary ?:), which is the only construct whose CIR codegen calls +/// ConditionalEvaluation::beginEvaluation() and thus causes cleanups to be +/// deferred via pushFullExprCleanup. Logical &&/|| do NOT call +/// beginEvaluation(); their branch-local cleanups are handled by LexicalScope. +class ConditionalEvaluationFinder + : public RecursiveASTVisitor<ConditionalEvaluationFinder> { + bool foundConditional = false; + +public: + bool found() const { return foundConditional; } + + bool VisitAbstractConditionalOperator(AbstractConditionalOperator *) { + foundConditional = true; + return false; + } + + // Don't cross evaluation-context boundaries. + bool TraverseLambdaExpr(LambdaExpr *) { return true; } + bool TraverseBlockExpr(BlockExpr *) { return true; } + bool TraverseStmtExpr(StmtExpr *) { return true; } +}; +} // namespace + //===----------------------------------------------------------------------===// // CIRGenFunction cleanup related //===----------------------------------------------------------------------===// @@ -34,6 +60,133 @@ void CIRGenFunction::emitCXXTemporary(const CXXTemporary *temporary, pushDestroy(NormalAndEHCleanup, ptr, tempType, destroyCXXObject); } +Address CIRGenFunction::createCleanupActiveFlag() { + assert(isInConditionalBranch()); + mlir::Location loc = builder.getUnknownLoc(); + + // Place the alloca in the function entry block so it dominates everything, + // including both regions of any enclosing cir.cleanup.scope. We can't rely + // on the default curLexScope path because we may be inside a ternary branch + // whose LexicalScope would capture the alloca. + Address active = createTempAllocaWithoutCast( + builder.getBoolTy(), CharUnits::One(), loc, "cleanup.cond", + /*arraySize=*/nullptr, + builder.getBestAllocaInsertPoint(getCurFunctionEntryBlock())); + + // Initialize to false before the outermost conditional. + { + mlir::OpBuilder::InsertionGuard guard(builder); + builder.restoreInsertionPoint(outermostConditional->getInsertPoint()); + builder.createFlagStore(loc, false, active.getPointer()); + } + + // Set to true at the current location (inside the conditional branch). + builder.createFlagStore(loc, true, active.getPointer()); + + return active; +} + +void CIRGenFunction::enterFullExprCleanupScope(const Expr *subExpr) { + inFullExprCleanupScope = true; + // Eagerly create the CleanupScopeOp only when the expression contains a + // construct that will enter a ConditionalEvaluation. This ensures the scope + // structurally wraps the entire expression. For expressions without + // conditionals we skip it entirely so that values defined inside nested + // cleanup scopes remain directly accessible. + ConditionalEvaluationFinder finder; + finder.TraverseStmt(const_cast<Expr *>(subExpr)); + if (finder.found()) + createFullExprCleanupScope(); +} + +void CIRGenFunction::createFullExprCleanupScope() { + assert(inFullExprCleanupScope && "not in a full-expression cleanup scope"); + assert(!fullExprCleanupScope && "scope already created"); + + mlir::Location loc = builder.getUnknownLoc(); + cir::CleanupKind cleanupKind = getLangOpts().Exceptions + ? cir::CleanupKind::All + : cir::CleanupKind::Normal; + fullExprCleanupScope = cir::CleanupScopeOp::create( + builder, loc, cleanupKind, + /*bodyBuilder=*/ + [&](mlir::OpBuilder &b, mlir::Location loc) {}, + /*cleanupBuilder=*/ + [&](mlir::OpBuilder &b, mlir::Location loc) {}); + + mlir::Block &bodyBlock = fullExprCleanupScope.getBodyRegion().front(); + builder.setInsertionPointToEnd(&bodyBlock); +} + +void CIRGenFunction::exitFullExprCleanupScope() { + inFullExprCleanupScope = false; + + if (!fullExprCleanupScope) + return; + + cir::CleanupScopeOp scope = fullExprCleanupScope; + fullExprCleanupScope = nullptr; + if (!deferredConditionalCleanupStack.empty()) { + // Terminate the body region. + mlir::Block &bodyBlock = scope.getBodyRegion().front(); + { + mlir::OpBuilder::InsertionGuard guard(builder); + builder.setInsertionPointToEnd(&bodyBlock); + if (bodyBlock.empty() || + !bodyBlock.back().hasTrait<mlir::OpTrait::IsTerminator>()) + builder.createYield(scope.getLoc()); + } + + // Emit each deferred cleanup directly into the pre-created scope's + // cleanup region rather than going through the EH stack (which would + // create a second CleanupScopeOp). + { + mlir::OpBuilder::InsertionGuard guard(builder); + mlir::Block &cleanupBlock = scope.getCleanupRegion().front(); + builder.setInsertionPointToEnd(&cleanupBlock); + + for (const PendingCleanupEntry &entry : + llvm::reverse(deferredConditionalCleanupStack)) { + if (entry.activeFlag.isValid()) { + mlir::Value flag = + builder.createLoad(scope.getLoc(), entry.activeFlag); + cir::IfOp::create( + builder, scope.getLoc(), flag, /*withElseRegion=*/false, + [&](mlir::OpBuilder &b, mlir::Location loc) { + emitDestroy(entry.addr, entry.type, entry.destroyer); + builder.createYield(loc); + }); + } else { + emitDestroy(entry.addr, entry.type, entry.destroyer); + } + } + builder.createYield(scope.getLoc()); + } + deferredConditionalCleanupStack.clear(); + + // Move the builder to after the scope in the parent block so that + // subsequent code (e.g. value reloads) lands outside the scope. + builder.setInsertionPointAfter(scope); + return; + } + + // The scope was created (because the AST contained a conditional) but no + // conditional cleanups were actually deferred. Inline the body back into + // the parent block and erase the empty scope. + mlir::Block *parentBlock = scope->getBlock(); + mlir::Block &bodyBlock = scope.getBodyRegion().front(); + + if (!bodyBlock.empty() && + bodyBlock.back().hasTrait<mlir::OpTrait::IsTerminator>()) + bodyBlock.back().erase(); + + mlir::Block::iterator afterScope = std::next(scope->getIterator()); + parentBlock->getOperations().splice(scope->getIterator(), + bodyBlock.getOperations()); + scope->erase(); + builder.setInsertionPoint(parentBlock, afterScope); +} + //===----------------------------------------------------------------------===// // EHScopeStack //===----------------------------------------------------------------------===// @@ -425,9 +578,9 @@ void CIRGenFunction::popCleanupBlocks( popCleanupBlocks(oldCleanupStackDepth, valuesToReload); // Promote deferred lifetime-extended cleanups onto the EH scope stack. - for (const LifetimeExtendedCleanupEntry &cleanup : llvm::make_range( + for (const PendingCleanupEntry &cleanup : llvm::make_range( lifetimeExtendedCleanupStack.begin() + oldLifetimeExtendedSize, lifetimeExtendedCleanupStack.end())) - pushLifetimeExtendedCleanupToEHStack(cleanup); + pushPendingCleanupToEHStack(cleanup); lifetimeExtendedCleanupStack.truncate(oldLifetimeExtendedSize); } diff --git a/clang/lib/CIR/CodeGen/CIRGenDecl.cpp b/clang/lib/CIR/CodeGen/CIRGenDecl.cpp index b96b822609c10..38edc6d6bcb84 100644 --- a/clang/lib/CIR/CodeGen/CIRGenDecl.cpp +++ b/clang/lib/CIR/CodeGen/CIRGenDecl.cpp @@ -10,6 +10,7 @@ // //===----------------------------------------------------------------------===// +#include "CIRGenCleanup.h" #include "CIRGenConstantEmitter.h" #include "CIRGenFunction.h" #include "mlir/IR/Location.h" @@ -1032,10 +1033,19 @@ void CIRGenFunction::pushLifetimeExtendedDestroy(CleanupKind cleanupKind, pushCleanupAfterFullExpr(cleanupKind, addr, type, destroyer); } -void CIRGenFunction::pushLifetimeExtendedCleanupToEHStack( - const LifetimeExtendedCleanupEntry &entry) { +void CIRGenFunction::pushPendingCleanupToEHStack( + const PendingCleanupEntry &entry) { ehStack.pushCleanup<DestroyObject>(entry.kind, entry.addr, entry.type, entry.destroyer); + + if (entry.activeFlag.isValid()) { + EHCleanupScope &scope = cast<EHCleanupScope>(*ehStack.begin()); + scope.setActiveFlag(entry.activeFlag); + if (scope.isNormalCleanup()) + scope.setTestFlagInNormalCleanup(); + if (scope.isEHCleanup()) + scope.setTestFlagInEHCleanup(); + } } /// Destroys all the elements of the given array, beginning from last to first. diff --git a/clang/lib/CIR/CodeGen/CIRGenExpr.cpp b/clang/lib/CIR/CodeGen/CIRGenExpr.cpp index a306cc68dff8e..646255faeb347 100644 --- a/clang/lib/CIR/CodeGen/CIRGenExpr.cpp +++ b/clang/lib/CIR/CodeGen/CIRGenExpr.cpp @@ -1710,8 +1710,16 @@ static Address createReferenceTemporary(CIRGenFunction &cgf, extDeclAlloca = extDeclAddrIter->second.getDefiningOp<cir::AllocaOp>(); } mlir::OpBuilder::InsertPoint ip; - if (extDeclAlloca) + if (extDeclAlloca) { ip = {extDeclAlloca->getBlock(), extDeclAlloca->getIterator()}; + } else if (cgf.isInConditionalBranch() && + m->getStorageDuration() == SD_FullExpression) { + // Place in the function entry block so the alloca dominates both + // regions of any enclosing cir.cleanup.scope. The default path + // would use curLexScope which may be a ternary branch. + ip = cgf.getBuilder().getBestAllocaInsertPoint( + cgf.getCurFunctionEntryBlock()); + } return cgf.createMemTemp(ty, cgf.getLoc(m->getSourceRange()), cgf.getCounterRefTmpAsString(), /*alloca=*/nullptr, ip); diff --git a/clang/lib/CIR/CodeGen/CIRGenExprAggregate.cpp b/clang/lib/CIR/CodeGen/CIRGenExprAggregate.cpp index ffce8a6bf86a7..2a21b274103cb 100644 --- a/clang/lib/CIR/CodeGen/CIRGenExprAggregate.cpp +++ b/clang/lib/CIR/CodeGen/CIRGenExprAggregate.cpp @@ -980,7 +980,10 @@ void AggExprEmitter::VisitExprWithCleanups(ExprWithCleanups *e) { builder.restoreInsertionPoint(scopeBegin); CIRGenFunction::LexicalScope lexScope{cgf, scopeLoc, builder.getInsertionBlock()}; + + cgf.enterFullExprCleanupScope(e->getSubExpr()); Visit(e->getSubExpr()); + cgf.exitFullExprCleanupScope(); } } diff --git a/clang/lib/CIR/CodeGen/CIRGenExprScalar.cpp b/clang/lib/CIR/CodeGen/CIRGenExprScalar.cpp index b1498f376725d..47ef4f814b531 100644 --- a/clang/lib/CIR/CodeGen/CIRGenExprScalar.cpp +++ b/clang/lib/CIR/CodeGen/CIRGenExprScalar.cpp @@ -1597,10 +1597,33 @@ mlir::Value ScalarExprEmitter::emitCompoundAssign( mlir::Value ScalarExprEmitter::VisitExprWithCleanups(ExprWithCleanups *e) { CIRGenFunction::RunCleanupsScope cleanups(cgf); + + cgf.enterFullExprCleanupScope(e->getSubExpr()); mlir::Value v = Visit(e->getSubExpr()); - // Defend against dominance problems caused by jumps out of expression - // evaluation through the shared cleanup block. - cleanups.forceCleanup({&v}); + + bool hasDeferredCleanups = !cgf.deferredConditionalCleanupStack.empty(); + + // When deferred conditional cleanups exist, the expression result lives + // inside the cleanup scope body (an MLIR region). Spill it to a temporary + // (allocated in the function entry block) while the builder is still inside + // the body; we reload it after exitFullExprCleanupScope moves the builder + // outside the scope. + Address spill = Address::invalid(); + if (v && hasDeferredCleanups) { + spill = cgf.createDefaultAlignTempAlloca(v.getType(), v.getLoc(), + "tmp.exprcleanup"); + cgf.getBuilder().createStore(v.getLoc(), v, spill); + } + + cgf.exitFullExprCleanupScope(); + + if (hasDeferredCleanups) { + cleanups.forceCleanup({}); + if (spill.isValid()) + v = cgf.getBuilder().createLoad(v.getLoc(), spill); + } else { + cleanups.forceCleanup({&v}); + } return v; } diff --git a/clang/lib/CIR/CodeGen/CIRGenFunction.h b/clang/lib/CIR/CodeGen/CIRGenFunction.h index 88c7996eab569..597626ddc76a8 100644 --- a/clang/lib/CIR/CodeGen/CIRGenFunction.h +++ b/clang/lib/CIR/CodeGen/CIRGenFunction.h @@ -94,22 +94,36 @@ class CIRGenFunction : public CIRGenTypeCache { typedef void Destroyer(CIRGenFunction &cgf, Address addr, QualType ty); - /// An entry in the lifetime-extended cleanup stack. Each entry represents a - /// cleanup that was deferred past a full-expression boundary (e.g., - /// destroying a temporary bound to a local reference). When the enclosing - /// scope exits, these entries are promoted to the EH scope stack. + /// A cleanup entry that will be promoted onto the EH scope stack at a later + /// point. Used by both the lifetime-extended cleanup stack (promoted when + /// the enclosing scope exits) and the deferred conditional cleanup stack + /// (promoted at the enclosing full-expression level). /// - /// Currently only DestroyObject cleanups are lifetime-extended. When other - /// cleanup types are needed (e.g., CallLifetimeEnd), this struct can be - /// extended with a std::variant of cleanup data types. - struct LifetimeExtendedCleanupEntry { + /// Currently only DestroyObject cleanups use this. When other cleanup types + /// are needed (e.g., CallLifetimeEnd), this struct can be extended with a + /// std::variant of cleanup data types. + struct PendingCleanupEntry { CleanupKind kind; Address addr; QualType type; Destroyer *destroyer; + Address activeFlag = Address::invalid(); }; - llvm::SmallVector<LifetimeExtendedCleanupEntry> lifetimeExtendedCleanupStack; + llvm::SmallVector<PendingCleanupEntry> lifetimeExtendedCleanupStack; + + /// Deferred cleanup entries from conditional branches within the current + /// full-expression. Emitted into the cleanup region of fullExprCleanupScope + /// by exitFullExprCleanupScope. + llvm::SmallVector<PendingCleanupEntry> deferredConditionalCleanupStack; + + /// True while between enterFullExprCleanupScope/exitFullExprCleanupScope. + bool inFullExprCleanupScope = false; + + /// A lazily-created CleanupScopeOp for the current full-expression. Created + /// by createFullExprCleanupScope() when the first conditional cleanup is + /// deferred, so expressions without conditional cleanups pay no cost. + cir::CleanupScopeOp fullExprCleanupScope = nullptr; GlobalDecl curSEHParent; @@ -1009,17 +1023,45 @@ class CIRGenFunction : public CIRGenTypeCache { void deactivateCleanupBlock(EHScopeStack::stable_iterator cleanup, mlir::Operation *dominatingIP); + /// Create an active flag variable for use with conditional cleanups. The + /// flag is initialized to false before the outermost conditional and set to + /// true at the current insertion point (inside the conditional branch). + Address createCleanupActiveFlag(); + + /// Enter a full-expression cleanup scope. If \p subExpr contains a + /// conditional (ternary, &&, ||), eagerly creates a CleanupScopeOp that + /// will wrap the entire expression. Otherwise defers scope creation. + void enterFullExprCleanupScope(const Expr *subExpr); + + /// Create the CleanupScopeOp for the current full-expression. + /// Called from enterFullExprCleanupScope when a conditional is detected. + void createFullExprCleanupScope(); + + /// Finalize the full-expression cleanup scope after the sub-expression. + void exitFullExprCleanupScope(); + + /// Promote a single pending cleanup entry onto the EH scope stack. If the + /// entry has a valid activeFlag, the cleanup is configured as conditional. + /// Defined in CIRGenDecl.cpp where the concrete cleanup types are visible. + void pushPendingCleanupToEHStack(const PendingCleanupEntry &entry); + /// Push a cleanup to be run at the end of the current full-expression. Safe /// against the possibility that we're currently inside a /// conditionally-evaluated expression. template <class T, class... As> void pushFullExprCleanup(CleanupKind kind, As... a) { - // If we're not in a conditional branch, or if none of the - // arguments requires saving, then use the unconditional cleanup. if (!isInConditionalBranch()) return ehStack.pushCleanup<T>(kind, a...); - cgm.errorNYI("pushFullExprCleanup in conditional branch"); + // Defer the cleanup until exitFullExprCleanupScope. We can't push to + // the EH stack now because the ternary's inner LexicalScope would pop + // it prematurely. The scope must have been eagerly created by + // enterFullExprCleanupScope (which detected the conditional in the AST). + assert(fullExprCleanupScope && + "conditional cleanup pushed but no full-expression scope created"); + Address activeFlag = createCleanupActiveFlag(); + deferredConditionalCleanupStack.push_back( + PendingCleanupEntry{kind, a..., activeFlag}); } /// Queue a cleanup to be pushed after finishing the current full-expression. @@ -1271,10 +1313,6 @@ class CIRGenFunction : public CIRGenTypeCache { QualType type, Destroyer *destroyer, bool useEHCleanupForArray); - /// Promote a single lifetime-extended cleanup entry onto the EH scope stack. - /// Defined in CIRGenDecl.cpp where the concrete cleanup types are visible. - void pushLifetimeExtendedCleanupToEHStack( - const LifetimeExtendedCleanupEntry &entry); Destroyer *getDestroyer(clang::QualType::DestructionKind kind); diff --git a/clang/lib/CIR/CodeGen/CIRGenStmt.cpp b/clang/lib/CIR/CodeGen/CIRGenStmt.cpp index 07d1d62053ea6..93059d769816f 100644 --- a/clang/lib/CIR/CodeGen/CIRGenStmt.cpp +++ b/clang/lib/CIR/CodeGen/CIRGenStmt.cpp @@ -667,7 +667,9 @@ mlir::LogicalResult CIRGenFunction::emitReturnStmt(const ReturnStmt &s) { builder.restoreInsertionPoint(scopeBody); CIRGenFunction::LexicalScope lexScope{*this, scopeLoc, builder.getInsertionBlock()}; + enterFullExprCleanupScope(rv); handleReturnVal(); + exitFullExprCleanupScope(); } } diff --git a/clang/test/CIR/CodeGen/cleanup-conditional-eh.cpp b/clang/test/CIR/CodeGen/cleanup-conditional-eh.cpp new file mode 100644 index 0000000000000..a6fd88ddaa7cd --- /dev/null +++ b/clang/test/CIR/CodeGen/cleanup-conditional-eh.cpp @@ -0,0 +1,494 @@ +// Exceptions-enabled variant of cleanup-conditional.cpp. +// When -fcxx-exceptions is active, cleanup scopes use "cleanup all" (both +// normal and exception paths) and the LLVM lowering emits invoke/landingpad +// instead of plain calls for operations that can throw. +// +// RUN: %clang_cc1 -triple x86_64-unknown-linux-gnu -fclangir -emit-cir -fcxx-exceptions -fexceptions %s -o %t.cir +// RUN: FileCheck --input-file=%t.cir %s --check-prefix=CIR +// RUN: %clang_cc1 -triple x86_64-unknown-linux-gnu -fclangir -emit-llvm -fcxx-exceptions -fexceptions %s -o %t-cir.ll +// RUN: FileCheck --input-file=%t-cir.ll %s --check-prefix=LLVM +// RUN: %clang_cc1 -triple x86_64-unknown-linux-gnu -emit-llvm -fcxx-exceptions -fexceptions %s -o %t.ll +// RUN: FileCheck --input-file=%t.ll %s --check-prefix=OGCG + +struct S { + S(); + ~S(); + int get(); +}; + +struct A { + A(); + ~A(); + int get(); +}; + +struct B { + B(); + ~B(); + int get(); +}; + +void test_ternary_temporary(bool c, int x) { + int result = c ? S().get() : x; +} +// CIR-LABEL: @_Z22test_ternary_temporarybi +// CIR: %[[TMP:.*]] = cir.alloca !rec_S, !cir.ptr<!rec_S>, ["ref.tmp0"] +// CIR: %[[ACTIVE:.*]] = cir.alloca !cir.bool, !cir.ptr<!cir.bool>, ["cleanup.cond"] +// CIR: cir.cleanup.scope { +// CIR: %[[COND:.*]] = cir.load {{.*}} : !cir.ptr<!cir.bool>, !cir.bool +// CIR: %[[FALSE:.*]] = cir.const #false +// CIR: cir.store %[[FALSE]], %[[ACTIVE]] : !cir.bool, !cir.ptr<!cir.bool> +//... [truncated] `````````` </details> https://github.com/llvm/llvm-project/pull/191479 _______________________________________________ cfe-commits mailing list [email protected] https://lists.llvm.org/cgi-bin/mailman/listinfo/cfe-commits
