diff --git a/clang/lib/CIR/CodeGen/CIRGenCleanup.cpp b/clang/lib/CIR/CodeGen/CIRGenCleanup.cpp index 242aee079f22..84f479a3c832 100644 --- a/clang/lib/CIR/CodeGen/CIRGenCleanup.cpp +++ b/clang/lib/CIR/CodeGen/CIRGenCleanup.cpp @@ -643,12 +643,17 @@ void CIRGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) { // We only actually emit the cleanup code if the cleanup is either // active or was used before it was deactivated. if (EHActiveFlag.isValid() || IsActive) { - cleanupFlags.setIsForEHCleanup(); - mlir::OpBuilder::InsertionGuard guard(builder); - auto yield = cast(ehEntry->getTerminator()); - builder.setInsertionPoint(yield); - emitCleanup(*this, Fn, cleanupFlags, EHActiveFlag); + + // We skip the cleanups at the end of CIR scopes as they will be handled + // later. This prevents cases like multiple destructor calls for the same + // object. + if (!isa(yield->getParentOp())) { + cleanupFlags.setIsForEHCleanup(); + mlir::OpBuilder::InsertionGuard guard(builder); + builder.setInsertionPoint(yield); + emitCleanup(*this, Fn, cleanupFlags, EHActiveFlag); + } } if (CPI) diff --git a/clang/lib/CIR/CodeGen/CIRGenException.cpp b/clang/lib/CIR/CodeGen/CIRGenException.cpp index b438f9aae0f5..881f00211a69 100644 --- a/clang/lib/CIR/CodeGen/CIRGenException.cpp +++ b/clang/lib/CIR/CodeGen/CIRGenException.cpp @@ -418,7 +418,7 @@ static void emitCatchDispatchBlock(CIRGenFunction &CGF, // that catch-all as the dispatch block. if (catchScope.getNumHandlers() == 1 && catchScope.getHandler(0).isCatchAll()) { - // assert(dispatchBlock == catchScope.getHandler(0).Block); + assert(dispatchBlock == catchScope.getHandler(0).Block); return; } @@ -786,13 +786,21 @@ CIRGenFunction::getEHDispatchBlock(EHScopeStack::stable_iterator si, case EHScope::Catch: { // LLVM does some optimization with branches here, CIR just keep track of // the corresponding calls. - assert(callWithExceptionCtx && "expected call information"); - { - mlir::OpBuilder::InsertionGuard guard(getBuilder()); - assert(callWithExceptionCtx.getCleanup().empty() && - "one per call: expected empty region at this point"); - dispatchBlock = builder.createBlock(&callWithExceptionCtx.getCleanup()); - builder.createYield(callWithExceptionCtx.getLoc()); + EHCatchScope &catchScope = cast(scope); + if (catchScope.getNumHandlers() == 1 && + catchScope.getHandler(0).isCatchAll()) { + dispatchBlock = catchScope.getHandler(0).Block; + assert(dispatchBlock); + } else { + assert(callWithExceptionCtx && "expected call information"); + { + mlir::OpBuilder::InsertionGuard guard(getBuilder()); + assert(callWithExceptionCtx.getCleanup().empty() && + "one per call: expected empty region at this point"); + dispatchBlock = + builder.createBlock(&callWithExceptionCtx.getCleanup()); + builder.createYield(callWithExceptionCtx.getLoc()); + } } break; } diff --git a/clang/test/CIR/CodeGen/conditional-cleanup.cpp b/clang/test/CIR/CodeGen/conditional-cleanup.cpp index 2397bce547e2..b61b6c2e555c 100644 --- a/clang/test/CIR/CodeGen/conditional-cleanup.cpp +++ b/clang/test/CIR/CodeGen/conditional-cleanup.cpp @@ -218,10 +218,6 @@ namespace test7 { // CIR_EH: cir.if %[[VAL_41]] { // CIR_EH: cir.call @_ZN5test71AD1Ev(%[[VAL_2]]) : (!cir.ptr) -> () // CIR_EH: } -// CIR_EH: %[[VAL_42:.*]] = cir.load{{.*}} %[[VAL_3]] : !cir.ptr, !cir.bool -// CIR_EH: cir.if %[[VAL_42]] { -// CIR_EH: cir.call @_ZN5test71AD1Ev(%[[VAL_2]]) : (!cir.ptr) -> () -// CIR_EH: } // CIR_EH: %[[VAL_43:.*]] = cir.load{{.*}} %[[VAL_1]] : !cir.ptr, !cir.bool // CIR_EH: cir.if %[[VAL_43]] { // CIR_EH: cir.call @_ZdlPvm(%[[VAL_16]], %[[VAL_15]]) : (!cir.ptr, !u64i) -> () diff --git a/clang/test/CIR/CodeGen/try-catch-dtors.cpp b/clang/test/CIR/CodeGen/try-catch-dtors.cpp index ef333bf7c2c8..468e534a6118 100644 --- a/clang/test/CIR/CodeGen/try-catch-dtors.cpp +++ b/clang/test/CIR/CodeGen/try-catch-dtors.cpp @@ -351,24 +351,104 @@ void d() { } // CIR: %[[V0:.*]] = cir.alloca !rec_C, !cir.ptr, ["a"] {alignment = 1 : i64} -// CIR: %[[V1:.*]] = cir.alloca !rec_C, !cir.ptr, ["b"] {alignment = 1 : i64} -// CIR: cir.scope { -// CIR: %[[V2:.*]] = cir.alloca !rec_C, !cir.ptr, ["agg.tmp0"] {alignment = 1 : i64} -// CIR: cir.copy %[[V1]] to %[[V2]] : !cir.ptr -// CIR: %[[V3:.*]] = cir.load{{.*}} %[[V2]] : !cir.ptr, !rec_C -// CIR: cir.try synthetic cleanup { -// CIR: cir.call exception @_ZN1CaSES_(%[[V0]], %[[V3]]) : (!cir.ptr, !rec_C) -> () cleanup { -// CIR: cir.call @_ZN1CD1Ev(%[[V2]]) : (!cir.ptr) -> () extra(#fn_attr) -// CIR: cir.call @_ZN1CD1Ev(%[[V1]]) : (!cir.ptr) -> () extra(#fn_attr) +// CIR-NEXT: %[[V1:.*]] = cir.alloca !rec_C, !cir.ptr, ["b"] {alignment = 1 : i64} +// CIR-NEXT: cir.scope { +// CIR-NEXT: %[[V2:.*]] = cir.alloca !rec_C, !cir.ptr, ["agg.tmp0"] {alignment = 1 : i64} +// CIR-NEXT: cir.copy %[[V1]] to %[[V2]] : !cir.ptr +// CIR-NEXT: %[[V3:.*]] = cir.load{{.*}} %[[V2]] : !cir.ptr, !rec_C +// CIR-NEXT: cir.try synthetic cleanup { +// CIR-NEXT: cir.call exception @_ZN1CaSES_(%[[V0]], %[[V3]]) : (!cir.ptr, !rec_C) -> () cleanup { +// CIR-NEXT: cir.call @_ZN1CD1Ev(%[[V2]]) : (!cir.ptr) -> () extra(#fn_attr) +// CIR-NEXT: cir.call @_ZN1CD1Ev(%[[V1]]) : (!cir.ptr) -> () extra(#fn_attr) +// CIR-NEXT: cir.yield +// CIR-NEXT: } +// CIR-NEXT: cir.yield +// CIR-NEXT: } catch [#cir.unwind { +// CIR-NEXT: cir.resume +// CIR-NEXT: }] +// CIR-NEXT: cir.call @_ZN1CD1Ev(%[[V2]]) : (!cir.ptr) -> () extra(#fn_attr) +// CIR-NEXT: } +// CIR-NEXT: cir.call @_ZN1CD1Ev(%[[V1]]) : (!cir.ptr) -> () extra(#fn_attr) +// CIR-NEXT: cir.call @_ZN1CD1Ev(%[[V0]]) : (!cir.ptr) -> () extra(#fn_attr) +// CIR-NEXT: cir.return + +template class a; + +template <> class a { +public: + struct b { + typedef a c; + }; +}; + +template class a { +public: + template a(d) noexcept; + ~a(); +}; + +struct e { + using f = a::b::c; +}; + +template using g = e::f; + +template void i(h); + +class j { + +public: + using k = g; +}; + +class l { +public: + template l(m p1, n) : l(p1, 0, a()) {} + template l(m, n, h o) { + try { + j::k p(o); + i(p); + } catch (...) { + } + } +}; + +class G { +public: + template G(q p1, n) : r(p1, 0) {} + l r; +}; + +class s : G { +public: + int t; + s() : G(t, 0) {} +}; + +void fn3() { s(); } + +// CIR: cir.func linkonce_odr @_ZN1lC2Iii1aIvEEET_T0_T1_ +// CIR: cir.scope +// CIR: %[[V5:.*]] = cir.alloca !rec_a3Cint3E, !cir.ptr +// CIR: %[[V6:.*]] = cir.alloca !rec_a3Cvoid3E, !cir.ptr +// CIR: cir.try { +// CIR: cir.copy {{.*}} to %[[V6]] : !cir.ptr +// CIR: %[[V7:.*]] = cir.load align(1) %[[V6]] : !cir.ptr, !rec_a3Cvoid3E +// CIR: cir.call @_ZN1aIiEC1IS_IvEEET_(%[[V5]], %[[V7]]) : (!cir.ptr, !rec_a3Cvoid3E) -> () +// CIR: cir.scope { +// CIR: %[[V8:.*]] = cir.alloca !rec_a3Cint3E, !cir.ptr +// CIR: cir.copy %[[V5]] to %[[V8]] : !cir.ptr +// CIR: %[[V9:.*]] = cir.load align(1) %[[V8]] : !cir.ptr, !rec_a3Cint3E +// CIR-NEXT: cir.call exception @_Z1iI1aIiEEvT_(%[[V9]]) : (!rec_a3Cint3E) -> () cleanup { +// CIR-NEXT: cir.call @_ZN1aIiED1Ev(%[[V8]]) : (!cir.ptr) -> () +// CIR-NEXT: cir.call @_ZN1aIiED1Ev(%[[V5]]) : (!cir.ptr) -> () +// CIR-NEXT: cir.yield +// CIR-NEXT: } +// CIR-NEXT: cir.call @_ZN1aIiED1Ev(%[[V8]]) : (!cir.ptr) -> () +// CIR-NEXT: } +// CIR-NEXT: cir.call @_ZN1aIiED1Ev(%[[V5]]) : (!cir.ptr) -> () +// CIR-NEXT: cir.yield +// CIR: } catch [type #cir.all { +// CIR: %[[V7:.*]] = cir.catch_param -> !cir.ptr // CIR: cir.yield -// CIR: } -// CIR: cir.yield -// CIR: } catch [#cir.unwind { -// CIR: cir.resume -// CIR: }] -// CIR: cir.call @_ZN1CD1Ev(%[[V2]]) : (!cir.ptr) -> () extra(#fn_attr) -// CIR: cir.call @_ZN1CD1Ev(%[[V1]]) : (!cir.ptr) -> () extra(#fn_attr) -// CIR: } -// CIR: cir.call @_ZN1CD1Ev(%[[V1]]) : (!cir.ptr) -> () extra(#fn_attr) -// CIR: cir.call @_ZN1CD1Ev(%[[V0]]) : (!cir.ptr) -> () extra(#fn_attr) -// CIR: cir.return +// CIR: }]