From 7a4f3a37d74c7a705d35cdd036b12a27d2bb896c Mon Sep 17 00:00:00 2001 From: Nate Chandler Date: Fri, 16 Jun 2023 13:17:20 -0700 Subject: [PATCH 1/9] [SILOpt] Don't opt move-only lifetimes. According to language rules, such lifetimes are fixed and the relative order of their deinits is guaranteed. rdar://110913116 --- .../Transforms/SSADestroyHoisting.cpp | 5 ++ .../Utils/CanonicalizeOSSALifetime.cpp | 6 +++ .../canonicalize_ossa_lifetime_unit.sil | 46 +++++++++++++++++++ test/SILOptimizer/hoist_destroy_addr.sil | 34 ++++++++++++++ 4 files changed, 91 insertions(+) diff --git a/lib/SILOptimizer/Transforms/SSADestroyHoisting.cpp b/lib/SILOptimizer/Transforms/SSADestroyHoisting.cpp index 9190b6ddbd367..dfe563f2b7a59 100644 --- a/lib/SILOptimizer/Transforms/SSADestroyHoisting.cpp +++ b/lib/SILOptimizer/Transforms/SSADestroyHoisting.cpp @@ -870,6 +870,11 @@ bool hoistDestroys(SILValue root, bool ignoreDeinitBarriers, BasicCalleeAnalysis *calleeAnalysis) { LLVM_DEBUG(llvm::dbgs() << "Performing destroy hoisting on " << root); + // Don't canonicalize the lifetimes of addresses of move-only type. + // According to language rules, they are fixed. + if (root->getType().isMoveOnly()) + return false; + SILFunction *function = root->getFunction(); if (!function) return false; diff --git a/lib/SILOptimizer/Utils/CanonicalizeOSSALifetime.cpp b/lib/SILOptimizer/Utils/CanonicalizeOSSALifetime.cpp index d3f1965847de7..771459ee6f66a 100644 --- a/lib/SILOptimizer/Utils/CanonicalizeOSSALifetime.cpp +++ b/lib/SILOptimizer/Utils/CanonicalizeOSSALifetime.cpp @@ -1084,6 +1084,12 @@ void CanonicalizeOSSALifetime::rewriteLifetimes() { bool CanonicalizeOSSALifetime::canonicalizeValueLifetime(SILValue def) { LivenessState livenessState(*this, def); + // Don't canonicalize the lifetimes of values of move-only type. According to + // language rules, they are fixed. + if (def->getType().isMoveOnly()) { + return false; + } + // Step 1: Compute liveness. if (!computeLiveness()) { LLVM_DEBUG(llvm::dbgs() << "Failed to compute liveness boundary!\n"); diff --git a/test/SILOptimizer/canonicalize_ossa_lifetime_unit.sil b/test/SILOptimizer/canonicalize_ossa_lifetime_unit.sil index 3bff1c200b14e..0b17d1b7bab31 100644 --- a/test/SILOptimizer/canonicalize_ossa_lifetime_unit.sil +++ b/test/SILOptimizer/canonicalize_ossa_lifetime_unit.sil @@ -2,6 +2,9 @@ class C {} +@_moveOnly struct MoS {} +@_moveOnly struct MoE {} + // When access scopes are respected, the lifetime which previously extended // beyond the access scope still extends beyond it. // CHECK-LABEL: begin running test 1 of 2 on retract_value_lifetime_into_access_scope_when_access_scopes_not_respected: canonicalize-ossa-lifetime with: true, false, true, @trace @@ -45,3 +48,46 @@ bb0(%addr : $*C): %retval = tuple () return %retval : $() } + +sil @empty : $@convention(thin) () -> () { +[global: ] +bb0: + %0 = tuple () + return %0 : $() +} + +// Even though the apply of %empty is not a deinit barrier, verify that the +// destroy is not hoisted, because MoS is move-only. +// CHECK-LABEL: begin running test {{.*}} on dont_move_destroy_value_of_moveonly_struct: canonicalize-ossa-lifetime with: true, false, true, @argument +// CHECK-LABEL: sil [ossa] @dont_move_destroy_value_of_moveonly_struct : {{.*}} { +// CHECK: {{bb[0-9]+}}([[INSTANCE:%[^,]+]] : +// CHECK: apply +// CHECK: destroy_value [[INSTANCE]] +// CHECK-LABEL: } // end sil function 'dont_move_destroy_value_of_moveonly_struct' +// CHECK-LABEL: end running test {{.*}} on dont_move_destroy_value_of_moveonly_struct: canonicalize-ossa-lifetime with: true, false, true, @argument +sil [ossa] @dont_move_destroy_value_of_moveonly_struct : $@convention(thin) (@owned MoS) -> () { +entry(%instance : @owned $MoS): + test_specification "canonicalize-ossa-lifetime true false true @argument" + %empty = function_ref @empty : $@convention(thin) () -> () + apply %empty() : $@convention(thin) () -> () + destroy_value %instance : $MoS + %retval = tuple () + return %retval : $() +} + +// CHECK-LABEL: begin running test {{.*}} on dont_move_destroy_value_of_moveonly_enum: canonicalize-ossa-lifetime with: true, false, true, @argument +// CHECK-LABEL: sil [ossa] @dont_move_destroy_value_of_moveonly_enum : {{.*}} { +// CHECK: {{bb[0-9]+}}([[INSTANCE:%[^,]+]] : +// CHECK: apply +// CHECK: destroy_value [[INSTANCE]] +// CHECK-LABEL: } // end sil function 'dont_move_destroy_value_of_moveonly_enum' +// CHECK-LABEL: end running test {{.*}} on dont_move_destroy_value_of_moveonly_enum: canonicalize-ossa-lifetime with: true, false, true, @argument +sil [ossa] @dont_move_destroy_value_of_moveonly_enum : $@convention(thin) (@owned MoE) -> () { +entry(%instance : @owned $MoE): + test_specification "canonicalize-ossa-lifetime true false true @argument" + %empty = function_ref @empty : $@convention(thin) () -> () + apply %empty() : $@convention(thin) () -> () + destroy_value %instance : $MoE + %retval = tuple () + return %retval : $() +} diff --git a/test/SILOptimizer/hoist_destroy_addr.sil b/test/SILOptimizer/hoist_destroy_addr.sil index 9ec2aa2f038b4..0131014cffce0 100644 --- a/test/SILOptimizer/hoist_destroy_addr.sil +++ b/test/SILOptimizer/hoist_destroy_addr.sil @@ -79,6 +79,9 @@ struct STXXITXXII { var i: I } +@_moveOnly struct MoS {} +@_moveOnly struct MoE {} + sil @unknown : $@convention(thin) () -> () sil @use_S : $@convention(thin) (@in_guaranteed S) -> () @@ -1145,3 +1148,34 @@ entry(%addr : $*X): %retval = tuple () return %retval : $() } + +// Even though the apply of %empty is not a deinit barrier (c.f. +// hoist_over_apply_of_non_barrier_fn), verify that the destroy_addr is not +// hoisted, because MoS is move-only. +// CHECK-LABEL: sil [ossa] @dont_move_destroy_addr_of_moveonly_struct : {{.*}} { +// CHECK: {{bb[0-9]+}}([[ADDR:%[^,]+]] : +// CHECK: apply +// CHECK: destroy_addr [[ADDR]] +// CHECK-LABEL: } // end sil function 'dont_move_destroy_addr_of_moveonly_struct' +sil [ossa] @dont_move_destroy_addr_of_moveonly_struct : $@convention(thin) (@in MoS) -> () { +entry(%addr : $*MoS): + %empty = function_ref @empty : $@convention(thin) () -> () + apply %empty() : $@convention(thin) () -> () + destroy_addr %addr : $*MoS + %retval = tuple () + return %retval : $() +} + +// CHECK-LABEL: sil [ossa] @dont_move_destroy_addr_of_moveonly_enum : {{.*}} { +// CHECK: {{bb[0-9]+}}([[ADDR:%[^,]+]] : +// CHECK: apply +// CHECK: destroy_addr [[ADDR]] +// CHECK-LABEL: } // end sil function 'dont_move_destroy_addr_of_moveonly_enum' +sil [ossa] @dont_move_destroy_addr_of_moveonly_enum : $@convention(thin) (@in MoE) -> () { +entry(%addr : $*MoE): + %empty = function_ref @empty : $@convention(thin) () -> () + apply %empty() : $@convention(thin) () -> () + destroy_addr %addr : $*MoE + %retval = tuple () + return %retval : $() +} From d71121b7564e48b7968864e46741b036f4bc1da1 Mon Sep 17 00:00:00 2001 From: Nate Chandler Date: Thu, 8 Jun 2023 10:56:54 -0700 Subject: [PATCH 2/9] [FieldSensitivePL] NFC: Implemented print. The members were declared but undefined. --- .../Utils/FieldSensitivePrunedLiveness.cpp | 32 +++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/lib/SIL/Utils/FieldSensitivePrunedLiveness.cpp b/lib/SIL/Utils/FieldSensitivePrunedLiveness.cpp index 27cbaebc45098..93d1893a6f4c7 100644 --- a/lib/SIL/Utils/FieldSensitivePrunedLiveness.cpp +++ b/lib/SIL/Utils/FieldSensitivePrunedLiveness.cpp @@ -563,6 +563,38 @@ void FieldSensitivePrunedLiveBlocks::print(llvm::raw_ostream &OS) const { void FieldSensitivePrunedLiveBlocks::dump() const { print(llvm::dbgs()); } +//===----------------------------------------------------------------------===// +// FieldSensitivePrunedLivenessBoundary +//===----------------------------------------------------------------------===// + +void FieldSensitivePrunedLivenessBoundary::print(llvm::raw_ostream &OS) const { + for (auto pair : lastUsers) { + auto *user = pair.first; + auto bits = pair.second; + OS << "last user: " << *user + << "\tat " << bits << "\n"; + } + for (auto pair : boundaryEdges) { + auto *block = pair.first; + auto bits = pair.second; + OS << "boundary edge: "; + block->printAsOperand(OS); + OS << "\n" << "\tat " << bits << "\n"; + } + if (!deadDefs.empty()) { + for (auto pair : deadDefs) { + auto *deadDef = pair.first; + auto bits = pair.second; + OS << "dead def: " << *deadDef + << "\tat " << bits << "\n"; + } + } +} + +void FieldSensitivePrunedLivenessBoundary::dump() const { + print(llvm::dbgs()); +} + //===----------------------------------------------------------------------===// // MARK: FieldSensitiveLiveness //===----------------------------------------------------------------------===// From b5d306f7097ab87dc5e2e995b01d9826d22cb405 Mon Sep 17 00:00:00 2001 From: Nate Chandler Date: Fri, 16 Jun 2023 11:35:20 -0700 Subject: [PATCH 3/9] [Basic] Removed spurious req from FrozenMultiMap. Its storage vector is intended to be of some type like `std::vector>>`, i.e., some collection of pairs whose `second` is an `Optional`. So when constructing a default instance of that pair, just construct an Optional in the None case. --- include/swift/Basic/FrozenMultiMap.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/include/swift/Basic/FrozenMultiMap.h b/include/swift/Basic/FrozenMultiMap.h index 617fd64407266..a36f3e4704b1d 100644 --- a/include/swift/Basic/FrozenMultiMap.h +++ b/include/swift/Basic/FrozenMultiMap.h @@ -71,7 +71,7 @@ class FrozenMultiMap { // Since our array is sorted, we need to first find the first pair with our // inst as the first element. auto start = std::lower_bound( - storage.begin(), storage.end(), std::make_pair(key, Value()), + storage.begin(), storage.end(), std::make_pair(key, llvm::None), [&](const std::pair> &p1, const std::pair> &p2) { return p1.first < p2.first; From 4c02d779c497def27ea6522d726be59ada1eb3d2 Mon Sep 17 00:00:00 2001 From: Nate Chandler Date: Fri, 16 Jun 2023 15:21:36 -0700 Subject: [PATCH 4/9] [FieldSensitivePL] Fix vectorization. FieldSensitivePrunedLiveness is used as a vectorization of PrunedLiveness. An instance of FSPL with N elements needs to be able to represent the same states as N instances of PL. Previously, it failed to do that in two significant ways: (1) It attempted to save space for which elements were live by using a range. This failed to account for instructions which are users of non-contiguous fields of an aggregate. apply( @owned (struct_element_addr %s, #S.f1), @owned (struct_element_addr %s, #S.f3) ) (2) It used a single bit to represent whether the instruction was consuming. This failed to account for instructions which consumed some fields and borrowed others. apply( @owned (struct_element_addr %s, #S.f1), @guaranteed (struct_element_addr %s, #S.f2) ) The fix for (1) is to use a bit vector to represent which elements are used by the instruction. The fix for (2) is to use a second bit vector to represent which elements are _consumed_ by the instruction. Adapted the move-checker to use the new representation. rdar://110909290 --- .../swift/SIL/FieldSensitivePrunedLiveness.h | 151 ++++++++++------- .../Utils/FieldSensitivePrunedLiveness.cpp | 13 +- .../Mandatory/MoveOnlyAddressCheckerUtils.cpp | 158 ++++++++++-------- .../MoveOnlyBorrowToDestructureUtils.cpp | 28 ++-- test/SILOptimizer/moveonly_addresschecker.sil | 117 +++++++++++++ 5 files changed, 319 insertions(+), 148 deletions(-) diff --git a/include/swift/SIL/FieldSensitivePrunedLiveness.h b/include/swift/SIL/FieldSensitivePrunedLiveness.h index 1728e0955bd3d..456ba6c387bee 100644 --- a/include/swift/SIL/FieldSensitivePrunedLiveness.h +++ b/include/swift/SIL/FieldSensitivePrunedLiveness.h @@ -358,6 +358,13 @@ struct TypeTreeLeafTypeRange { endEltOffset >= range.endEltOffset; } + /// Sets each bit in \p bits corresponding to an element of this range. + void setBits(SmallBitVector &bits) { + for (auto element : getRange()) { + bits.set(element); + } + } + IntRange getRange() const { return range(startEltOffset, endEltOffset); } @@ -666,17 +673,60 @@ class FieldSensitivePrunedLiveness { FieldSensitivePrunedLiveBlocks liveBlocks; public: + enum IsInterestingUser { NonUser, NonLifetimeEndingUse, LifetimeEndingUse }; + struct InterestingUser { - TypeTreeLeafTypeRange subEltSpan; - bool isConsuming; + SmallBitVector liveBits; + SmallBitVector consumingBits; - InterestingUser() : subEltSpan(), isConsuming(false) {} - InterestingUser(TypeTreeLeafTypeRange subEltSpan, bool isConsuming) - : subEltSpan(subEltSpan), isConsuming(isConsuming) {} + InterestingUser(unsigned bitCount) + : liveBits(bitCount), consumingBits(bitCount) {} - InterestingUser &operator&=(bool otherValue) { - isConsuming &= otherValue; - return *this; + InterestingUser(unsigned bitCount, TypeTreeLeafTypeRange range, + bool lifetimeEnding) + : liveBits(bitCount), consumingBits(bitCount) { + addUses(range, lifetimeEnding); + } + + /// Record that the instruction uses the bits of the value in \p range. + void addUses(TypeTreeLeafTypeRange range, bool lifetimeEnding) { + range.setBits(liveBits); + if (lifetimeEnding) { + range.setBits(consumingBits); + } + } + + /// Populates the provided vector with contiguous ranges of bits which are + /// users of the same sort. + void getContiguousRanges( + SmallVectorImpl> + &ranges) const { + if (liveBits.size() == 0) + return; + + assert(ranges.empty()); + Optional> current = llvm::None; + for (unsigned bit = 0, size = liveBits.size(); bit < size; ++bit) { + auto interesting = isInterestingUser(bit); + if (!current) { + current = {bit, interesting}; + continue; + } + if (current->second != interesting) { + ranges.push_back( + {TypeTreeLeafTypeRange(current->first, bit), current->second}); + current = {bit, interesting}; + } + } + ranges.push_back({TypeTreeLeafTypeRange(current->first, liveBits.size()), + current->second}); + } + + IsInterestingUser isInterestingUser(unsigned element) const { + if (!liveBits.test(element)) + return NonUser; + return consumingBits.test(element) ? LifetimeEndingUse + : NonLifetimeEndingUse; } }; @@ -758,42 +808,6 @@ class FieldSensitivePrunedLiveness { return llvm::make_range(users.begin(), users.end()); } - using LifetimeEndingUserRange = OptionalTransformRange< - UserRange, - function_ref>( - const std::pair &)>>; - LifetimeEndingUserRange getAllLifetimeEndingUses() const { - assert(isInitialized()); - function_ref>( - const std::pair &)> - op; - op = [](const std::pair &pair) - -> Optional> { - if (pair.second.isConsuming) - return {{pair.first, pair.second.subEltSpan}}; - return None; - }; - return LifetimeEndingUserRange(getAllUsers(), op); - } - - using NonLifetimeEndingUserRange = OptionalTransformRange< - UserRange, - function_ref>( - const std::pair &)>>; - NonLifetimeEndingUserRange getAllNonLifetimeEndingUses() const { - assert(isInitialized()); - function_ref>( - const std::pair &)> - op; - op = [](const std::pair &pair) - -> Optional> { - if (!pair.second.isConsuming) - return {{pair.first, pair.second.subEltSpan}}; - return None; - }; - return NonLifetimeEndingUserRange(getAllUsers(), op); - } - using UserBlockRange = TransformRange< UserRange, function_ref &)>>; @@ -848,19 +862,37 @@ class FieldSensitivePrunedLiveness { SmallBitVector &liveOutBits, SmallBitVector &deadBits) const; - enum IsInterestingUser { NonUser, NonLifetimeEndingUse, LifetimeEndingUse }; + /// If \p user has had uses recored, return a pointer to the InterestingUser + /// where they've been recorded. + InterestingUser const *getInterestingUser(SILInstruction *user) const { + auto iter = users.find(user); + if (iter == users.end()) + return nullptr; + return &iter->second; + } - /// Return a result indicating whether the given user was identified as an - /// interesting use of the current def and whether it ends the lifetime. - std::pair> - isInterestingUser(SILInstruction *user) const { + /// How \p user uses the field at \p element. + IsInterestingUser isInterestingUser(SILInstruction *user, + unsigned element) const { assert(isInitialized()); - auto useIter = users.find(user); - if (useIter == users.end()) - return {NonUser, None}; - auto isInteresting = - useIter->second.isConsuming ? LifetimeEndingUse : NonLifetimeEndingUse; - return {isInteresting, useIter->second.subEltSpan}; + auto *record = getInterestingUser(user); + if (!record) + return NonUser; + return record->isInterestingUser(element); + } + + /// Whether \p user uses the fields in \p range as indicated by \p kind. + bool isInterestingUserOfKind(SILInstruction *user, IsInterestingUser kind, + TypeTreeLeafTypeRange range) const { + auto *record = getInterestingUser(user); + if (!record) + return kind == IsInterestingUser::NonUser; + + for (auto element : range.getRange()) { + if (isInterestingUser(user, element) != kind) + return false; + } + return true; } unsigned getNumSubElements() const { return liveBlocks.getNumBitsToTrack(); } @@ -886,10 +918,11 @@ class FieldSensitivePrunedLiveness { /// argument must be copied. void addInterestingUser(SILInstruction *user, TypeTreeLeafTypeRange range, bool lifetimeEnding) { - auto iterAndSuccess = - users.insert({user, InterestingUser(range, lifetimeEnding)}); - if (!iterAndSuccess.second) - iterAndSuccess.first->second &= lifetimeEnding; + auto iter = users.find(user); + if (iter == users.end()) { + iter = users.insert({user, InterestingUser(getNumSubElements())}).first; + } + iter->second.addUses(range, lifetimeEnding); } }; diff --git a/lib/SIL/Utils/FieldSensitivePrunedLiveness.cpp b/lib/SIL/Utils/FieldSensitivePrunedLiveness.cpp index 93d1893a6f4c7..e80dbac5946d0 100644 --- a/lib/SIL/Utils/FieldSensitivePrunedLiveness.cpp +++ b/lib/SIL/Utils/FieldSensitivePrunedLiveness.cpp @@ -705,9 +705,7 @@ bool FieldSensitivePrunedLiveRange::isWithinBoundary( // If we are not live and have an interesting user that maps to our bit, // mark this bit as being live again. if (!isLive) { - auto interestingUser = isInterestingUser(&blockInst); - bool isInteresting = - interestingUser.first && interestingUser.second->contains(bit); + bool isInteresting = isInterestingUser(&blockInst, bit); PRUNED_LIVENESS_LOG(llvm::dbgs() << " Inst was dead... Is InterestingUser: " << (isInteresting ? "true" : "false") << '\n'); @@ -838,8 +836,7 @@ void findBoundaryInNonDefBlock(SILBasicBlock *block, unsigned bitNo, PRUNED_LIVENESS_LOG(llvm::dbgs() << "Looking for boundary in non-def block\n"); for (SILInstruction &inst : llvm::reverse(*block)) { PRUNED_LIVENESS_LOG(llvm::dbgs() << "Visiting: " << inst); - auto interestingUser = liveness.isInterestingUser(&inst); - if (interestingUser.first && interestingUser.second->contains(bitNo)) { + if (liveness.isInterestingUser(&inst, bitNo)) { PRUNED_LIVENESS_LOG(llvm::dbgs() << " Is interesting user for this bit!\n"); boundary.getLastUserBits(&inst).set(bitNo); return; @@ -869,8 +866,7 @@ void findBoundaryInSSADefBlock(SILNode *ssaDef, unsigned bitNo, boundary.getDeadDefsBits(cast(&inst)).set(bitNo); return; } - auto interestingUser = liveness.isInterestingUser(&inst); - if (interestingUser.first && interestingUser.second->contains(bitNo)) { + if (liveness.isInterestingUser(&inst, bitNo)) { PRUNED_LIVENESS_LOG(llvm::dbgs() << " Found interesting user: " << inst); boundary.getLastUserBits(&inst).set(bitNo); return; @@ -1005,8 +1001,7 @@ void FieldSensitiveMultiDefPrunedLiveRange::findBoundariesInBlock( PRUNED_LIVENESS_LOG(llvm::dbgs() << " Checking if this inst is also a last user...\n"); if (!isLive) { - auto interestingUser = isInterestingUser(&inst); - if (interestingUser.first && interestingUser.second->contains(bitNo)) { + if (isInterestingUser(&inst, bitNo)) { PRUNED_LIVENESS_LOG( llvm::dbgs() << " Was interesting user! Moving from dead -> live!\n"); diff --git a/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp b/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp index 330931afa0507..bb9ab67d4bcb2 100644 --- a/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp +++ b/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp @@ -1150,9 +1150,8 @@ namespace { struct ConsumeInfo { /// Map blocks on the lifetime boundary to the last consuming instruction. - llvm::MapVector< - SILBasicBlock *, - SmallVector, 1>> + llvm::MapVector, 1>> finalBlockConsumes; bool isFrozen = false; @@ -1167,8 +1166,7 @@ struct ConsumeInfo { if (!inst) continue; os << "Inst: " << *inst; - os << "Range: "; - instRangePairVector.second.dump(); + os << "Range: " << instRangePairVector.second; os << '\n'; } } @@ -1191,13 +1189,15 @@ struct ConsumeInfo { return foundAny; } - void recordFinalConsume(SILInstruction *inst, TypeTreeLeafTypeRange span) { + void recordFinalConsume(SILInstruction *inst, SmallBitVector const &bits) { assert(!isFrozen); - auto iter = finalBlockConsumes.insert({inst->getParent(), {{inst, span}}}); - if (iter.second) - return; + auto *block = inst->getParent(); + auto iter = finalBlockConsumes.find(block); + if (iter == finalBlockConsumes.end()) { + iter = finalBlockConsumes.insert({block, {}}).first; + } LLVM_DEBUG(llvm::dbgs() << "Recorded Final Consume: " << *inst); - iter.first->second.emplace_back(inst, span); + iter->second.emplace_back(inst, bits); } void finishRecordingFinalConsumes() { @@ -1205,8 +1205,8 @@ struct ConsumeInfo { for (auto &pair : finalBlockConsumes) { llvm::stable_sort( pair.second, - [](const std::pair &lhs, - const std::pair &rhs) { + [](const std::pair &lhs, + const std::pair &rhs) { return lhs.first < rhs.first; }); } @@ -1222,7 +1222,7 @@ struct ConsumeInfo { // operands. // // Can only be used once frozen. - bool claimConsume(SILInstruction *inst, TypeTreeLeafTypeRange range) { + bool claimConsume(SILInstruction *inst, SmallBitVector const &bits) { assert(isFrozen); bool claimedConsume = false; @@ -1230,7 +1230,7 @@ struct ConsumeInfo { auto &iter = finalBlockConsumes[inst->getParent()]; for (unsigned i : indices(iter)) { auto &instRangePair = iter[i]; - if (instRangePair.first == inst && instRangePair.second == range) { + if (instRangePair.first == inst && instRangePair.second == bits) { instRangePair.first = nullptr; claimedConsume = true; LLVM_DEBUG(llvm::dbgs() << "Claimed consume: " << *inst); @@ -2183,9 +2183,9 @@ bool GlobalLivenessChecker::testInstVectorLiveness( // array and emit an error on those instead since it would be a better // error than using end_borrow here. { - auto pair = liveness.isInterestingUser(&*ii); - if (pair.first == FieldSensitivePrunedLiveness::NonLifetimeEndingUse && - pair.second->contains(errorSpan)) { + if (liveness.isInterestingUserOfKind( + &*ii, FieldSensitivePrunedLiveness::NonLifetimeEndingUse, + errorSpan)) { diagnosticEmitter.emitAddressDiagnostic( addressUseState.address, &*ii, errorUser, false /*is consuming*/, addressUseState.isInOutTermUser(&*ii)); @@ -2361,7 +2361,7 @@ static void insertDestroyBeforeInstruction(UseState &addressUseState, // claim that destroy instead of inserting another destroy_addr. if (auto *dai = dyn_cast(nextInstruction)) { if (dai->getOperand() == baseAddress) { - consumes.recordFinalConsume(dai, TypeTreeLeafTypeRange(0, bv.size())); + consumes.recordFinalConsume(dai, bv); return; } } @@ -2371,7 +2371,7 @@ static void insertDestroyBeforeInstruction(UseState &addressUseState, auto loc = RegularLocation::getAutoGeneratedLocation(nextInstruction->getLoc()); auto *dai = builder.createDestroyAddr(loc, baseAddress); - consumes.recordFinalConsume(dai, TypeTreeLeafTypeRange(0, bv.size())); + consumes.recordFinalConsume(dai, bv); addressUseState.destroys.insert({dai, TypeTreeLeafTypeRange(0, bv.size())}); return; } @@ -2389,7 +2389,9 @@ static void insertDestroyBeforeInstruction(UseState &addressUseState, if (pair.first->getType().isTrivial(*nextInstruction->getFunction())) continue; auto *dai = builder.createDestroyAddr(loc, pair.first); - consumes.recordFinalConsume(dai, pair.second); + SmallBitVector consumedBits(bv.size()); + pair.second.setBits(consumedBits); + consumes.recordFinalConsume(dai, consumedBits); addressUseState.destroys.insert({dai, pair.second}); } } @@ -2437,52 +2439,67 @@ void MoveOnlyAddressCheckerPImpl::insertDestroysOnBoundary( LLVM_DEBUG(llvm::dbgs() << " User: " << *inst); - auto interestingUse = liveness.isInterestingUser(inst); - switch (interestingUse.first) { - case IsInterestingUser::LifetimeEndingUse: { - LLVM_DEBUG(llvm::dbgs() - << " Lifetime ending use! Recording final consume!\n"); - // If we have a consuming use, when we stop at the consuming use we want - // the value to still be around. We only want the value to be invalidated - // once the consume operation has occured. Thus we always place the - // debug_value undef strictly after the consuming operation. - if (auto *ti = dyn_cast(inst)) { - for (auto *succBlock : ti->getSuccessorBlocks()) { - insertUndefDebugValue(&succBlock->front()); - } - } else { - insertUndefDebugValue(inst->getNextInstruction()); - } - consumes.recordFinalConsume(inst, *interestingUse.second); - continue; + auto interestingUser = liveness.getInterestingUser(inst); + SmallVector, 4> ranges; + if (interestingUser) { + interestingUser->getContiguousRanges(ranges); } - case IsInterestingUser::NonLifetimeEndingUse: - case IsInterestingUser::NonUser: - LLVM_DEBUG(llvm::dbgs() << " NoneUser or NonLifetimeEndingUse! " - "inserting destroy before instruction!\n"); - // If we are dealing with an inout parameter, we will have modeled our - // last use by treating a return inst as a last use. Since it doesn't have - // any successors, this results in us not inserting any destroy_addr. - if (isa(inst)) { - auto *block = inst->getParent(); - for (auto *succBlock : block->getSuccessorBlocks()) { - auto *insertPt = &*succBlock->begin(); - insertDestroyBeforeInstruction(addressUseState, insertPt, - liveness.getRootValue(), bv, consumes); - // We insert the debug_value undef /after/ the last use since we want - // the value to be around when we stop at the last use instruction. - insertUndefDebugValue(insertPt); + + for (auto rangePair : ranges) { + SmallBitVector bits(bv.size()); + rangePair.first.setBits(bits); + switch (rangePair.second) { + case IsInterestingUser::LifetimeEndingUse: { + LLVM_DEBUG( + llvm::dbgs() + << " Lifetime ending use! Recording final consume!\n"); + // If we have a consuming use, when we stop at the consuming use we want + // the value to still be around. We only want the value to be + // invalidated once the consume operation has occured. Thus we always + // place the debug_value undef strictly after the consuming operation. + if (auto *ti = dyn_cast(inst)) { + for (auto *succBlock : ti->getSuccessorBlocks()) { + insertUndefDebugValue(&succBlock->front()); + } + } else { + insertUndefDebugValue(inst->getNextInstruction()); } + consumes.recordFinalConsume(inst, bits); continue; } + case IsInterestingUser::NonUser: + break; + case IsInterestingUser::NonLifetimeEndingUse: + LLVM_DEBUG(llvm::dbgs() << " NonLifetimeEndingUse! " + "inserting destroy before instruction!\n"); + // If we are dealing with an inout parameter, we will have modeled our + // last use by treating a return inst as a last use. Since it doesn't + // have any successors, this results in us not inserting any + // destroy_addr. + if (isa(inst)) { + auto *block = inst->getParent(); + for (auto *succBlock : block->getSuccessorBlocks()) { + + auto *insertPt = &*succBlock->begin(); + insertDestroyBeforeInstruction(addressUseState, insertPt, + liveness.getRootValue(), bits, + consumes); + // We insert the debug_value undef /after/ the last use since we + // want the value to be around when we stop at the last use + // instruction. + insertUndefDebugValue(insertPt); + } + continue; + } - auto *insertPt = inst->getNextInstruction(); - insertDestroyBeforeInstruction(addressUseState, insertPt, - liveness.getRootValue(), bv, consumes); - // We insert the debug_value undef /after/ the last use since we want - // the value to be around when we stop at the last use instruction. - insertUndefDebugValue(insertPt); - continue; + auto *insertPt = inst->getNextInstruction(); + insertDestroyBeforeInstruction(addressUseState, insertPt, + liveness.getRootValue(), bits, consumes); + // We insert the debug_value undef /after/ the last use since we want + // the value to be around when we stop at the last use instruction. + insertUndefDebugValue(insertPt); + continue; + } } } @@ -2545,8 +2562,9 @@ void MoveOnlyAddressCheckerPImpl::rewriteUses( // Process destroys for (auto destroyPair : addressUseState.destroys) { /// Is this destroy instruction a final consuming use? - bool isFinalConsume = - consumes.claimConsume(destroyPair.first, destroyPair.second); + SmallBitVector bits(liveness.getNumSubElements()); + destroyPair.second.setBits(bits); + bool isFinalConsume = consumes.claimConsume(destroyPair.first, bits); // Remove destroys that are not the final consuming use. if (!isFinalConsume) { @@ -2583,22 +2601,26 @@ void MoveOnlyAddressCheckerPImpl::rewriteUses( for (auto reinitPair : addressUseState.reinitInsts) { if (!isReinitToInitConvertibleInst(reinitPair.first)) continue; - if (!consumes.claimConsume(reinitPair.first, reinitPair.second)) + SmallBitVector bits(liveness.getNumSubElements()); + reinitPair.second.setBits(bits); + if (!consumes.claimConsume(reinitPair.first, bits)) convertMemoryReinitToInitForm(reinitPair.first, debugVar); } // Check all takes. for (auto takeInst : addressUseState.takeInsts) { - bool claimedConsume = - consumes.claimConsume(takeInst.first, takeInst.second); + SmallBitVector bits(liveness.getNumSubElements()); + takeInst.second.setBits(bits); + bool claimedConsume = consumes.claimConsume(takeInst.first, bits); (void)claimedConsume; assert(claimedConsume && "Should claim all copies?!"); } // Then rewrite all copy insts to be takes and claim them. for (auto copyInst : addressUseState.copyInsts) { - bool claimedConsume = - consumes.claimConsume(copyInst.first, copyInst.second); + SmallBitVector bits(liveness.getNumSubElements()); + copyInst.second.setBits(bits); + bool claimedConsume = consumes.claimConsume(copyInst.first, bits); if (!claimedConsume) { llvm::errs() << "Found consume that was not recorded as a 'claimed consume'!\n"; diff --git a/lib/SILOptimizer/Mandatory/MoveOnlyBorrowToDestructureUtils.cpp b/lib/SILOptimizer/Mandatory/MoveOnlyBorrowToDestructureUtils.cpp index 9f01ba50013d9..b328701d6fcbb 100644 --- a/lib/SILOptimizer/Mandatory/MoveOnlyBorrowToDestructureUtils.cpp +++ b/lib/SILOptimizer/Mandatory/MoveOnlyBorrowToDestructureUtils.cpp @@ -317,9 +317,10 @@ bool Implementation::gatherUses(SILValue value) { } LLVM_DEBUG(llvm::dbgs() << " Found non lifetime ending use!\n"); - blocksToUses.insert( - nextUse->getParentBlock(), - {nextUse, {*leafRange, false /*is lifetime ending*/}}); + blocksToUses.insert(nextUse->getParentBlock(), + {nextUse, + {liveness.getNumSubElements(), *leafRange, + false /*is lifetime ending*/}}); liveness.updateForUse(nextUse->getUser(), *leafRange, false /*is lifetime ending*/); instToInterestingOperandIndexMap.insert(nextUse->getUser(), nextUse); @@ -344,7 +345,9 @@ bool Implementation::gatherUses(SILValue value) { LLVM_DEBUG(llvm::dbgs() << " Found lifetime ending use!\n"); destructureNeedingUses.push_back(nextUse); blocksToUses.insert(nextUse->getParentBlock(), - {nextUse, {*leafRange, true /*is lifetime ending*/}}); + {nextUse, + {liveness.getNumSubElements(), *leafRange, + true /*is lifetime ending*/}}); liveness.updateForUse(nextUse->getUser(), *leafRange, true /*is lifetime ending*/); instToInterestingOperandIndexMap.insert(nextUse->getUser(), nextUse); @@ -381,9 +384,10 @@ bool Implementation::gatherUses(SILValue value) { // Otherwise, treat it as a normal use. LLVM_DEBUG(llvm::dbgs() << " Treating non-begin_borrow borrow as " "a non lifetime ending use!\n"); - blocksToUses.insert( - nextUse->getParentBlock(), - {nextUse, {*leafRange, false /*is lifetime ending*/}}); + blocksToUses.insert(nextUse->getParentBlock(), + {nextUse, + {liveness.getNumSubElements(), *leafRange, + false /*is lifetime ending*/}}); liveness.updateForUse(nextUse->getUser(), *leafRange, false /*is lifetime ending*/); instToInterestingOperandIndexMap.insert(nextUse->getUser(), nextUse); @@ -989,13 +993,13 @@ void Implementation::rewriteUses(InstructionDeleter *deleter) { if (auto operandList = blocksToUses.find(block)) { // If we do, gather up the bits that we need. for (auto operand : *operandList) { - auto &subEltSpan = operand.second.subEltSpan; + auto &liveBits = operand.second.liveBits; LLVM_DEBUG(llvm::dbgs() << " Found need operand " << operand.first->getOperandNumber() - << " of inst: " << *operand.first->getUser() - << " Needs bits: " << subEltSpan << '\n'); - bitsNeededInBlock.set(subEltSpan.startEltOffset, - subEltSpan.endEltOffset); + << " of inst: " << *operand.first->getUser()); + for (auto bit : liveBits.set_bits()) { + bitsNeededInBlock.set(bit); + } seenOperands.insert(operand.first); } } diff --git a/test/SILOptimizer/moveonly_addresschecker.sil b/test/SILOptimizer/moveonly_addresschecker.sil index 65b098e7c3418..da86665e997b7 100644 --- a/test/SILOptimizer/moveonly_addresschecker.sil +++ b/test/SILOptimizer/moveonly_addresschecker.sil @@ -575,3 +575,120 @@ bb0(%0 : @owned $NonTrivialStruct): %9999 = tuple() return %9999 : $() } + +@_moveOnly +struct M { + deinit {} +} +@_moveOnly +struct M2 { + let s1: M + let s2: M +} + +sil @get_M2 : $@convention(thin) () -> @owned M2 +sil @end_addr_see_addr : $@convention(thin) (@in M, @in_guaranteed M) -> () + +/// A single instruction, apply @end_addr_see_addr, consumes one field and +/// borrows another. + +/// Varify that the consumed value isn't destroyed twice and that the borrowed +/// value isn't destroyed before it's used. +/// +/// Note: This test case doesn't have valid SIL (#M2.s1 is consumed twice), but +/// the invalidity wasn't the cause of the miscompile. With the fix, this +/// is transformed into valid SIL. +/// +/// Once verification is enabled, feel free to modify this test case to +/// have a destroy_addr of %second_addr instead, though not that this will +/// no longer verify the fix. +// CHECK-LABEL: sil [ossa] @rdar110909290 : {{.*}} { +// CHECK: [[STACK:%[^,]+]] = alloc_stack $M2 +// CHECK: [[GET_M2:%[^,]+]] = function_ref @get_M2 +// CHECK: [[M2:%[^,]+]] = apply [[GET_M2]]() +// CHECK: store [[M2]] to [init] [[STACK]] : $*M2 +// CHECK-NOT: destroy_addr +// CHECK: [[S1_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M2, #M2.s1 +// CHECK: [[S2_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M2, #M2.s2 +// CHECK: [[END_ADDR_SEE_ADDR:%[^,]+]] = function_ref @end_addr_see_addr +// CHECK: apply [[END_ADDR_SEE_ADDR]]([[S1_ADDR]], [[S2_ADDR]]) +// CHECK-NOT: struct_element_addr [[STACK]] : $*M2, #M2.s1 +// CHECK: [[S2_ADDR_2:%[^,]+]] = struct_element_addr [[STACK]] : $*M2, #M2.s2 +// CHECK: destroy_addr [[S2_ADDR_2]] : $*M +// CHECK-LABEL: } // end sil function 'rdar110909290' +sil [ossa] @rdar110909290 : $@convention(thin) () -> () { +bb0: + %0 = alloc_stack $M2 + %1 = mark_must_check [consumable_and_assignable] %0 : $*M2 + %3 = function_ref @get_M2 : $@convention(thin) () -> @owned M2 + %4 = apply %3() : $@convention(thin) () -> @owned M2 + store %4 to [init] %1 : $*M2 + %first_addr = struct_element_addr %1 : $*M2, #M2.s1 + %second_addr = struct_element_addr %1 : $*M2, #M2.s2 + %end_addr_see_addr = function_ref @end_addr_see_addr : $@convention(thin) (@in M, @in_guaranteed M) -> () + apply %end_addr_see_addr(%first_addr, %second_addr) : $@convention(thin) (@in M, @in_guaranteed M) -> () + destroy_addr %1 : $*M2 + dealloc_stack %0 : $*M2 + %22 = tuple () + return %22 : $() +} + +@_moveOnly +struct M4 { + let s1: M + let s2: M + let s3: M + let s4: M +} + +sil @get_M4 : $@convention(thin) () -> @owned M4 +sil @end_2 : $@convention(thin) (@owned M, @owned M) -> () +sil @see_addr_2 : $@convention(thin) (@in_guaranteed M, @in_guaranteed M) -> () + + +/// Two non-contiguous fields (#M4.s2, #M4.s4) are borrowed by @see_addr_2. +/// Two non-contiguous fields (#M4.s1, #M$.s3) are consumed by @end_2. +/// +/// Verify that #M4.s2 and #M4.s4 both survive past the apply of @see_addr_2. +// CHECK-LABEL: sil [ossa] @rdar110676577 : {{.*}} { +// CHECK: [[STACK:%[^,]+]] = alloc_stack $M4 +// CHECK: [[GET_M4:%[^,]+]] = function_ref @get_M4 +// CHECK: [[M4:%[^,]+]] = apply [[GET_M4]]() : $@convention(thin) () -> @owned M4 +// CHECK: store [[M4]] to [init] [[STACK]] : $*M4 +// CHECK: [[M4_S2_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s2 +// CHECK: [[M4_S4_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s4 +// CHECK: [[SEE_ADDR_2:%[^,]+]] = function_ref @see_addr_2 +// CHECK: apply [[SEE_ADDR_2]]([[M4_S2_ADDR]], [[M4_S4_ADDR]]) +// HECK: [[M4_S4_ADDR_2:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s4 +// HECK: destroy_addr [[M4_S4_ADDR_2]] +// CHECK: [[M4_S2_ADDR_2:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s2 +// CHECK: destroy_addr [[M4_S2_ADDR_2]] +// CHECK: [[M4_S1_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s1 +// CHECK: [[M4_S1:%[^,]+]] = load [take] [[M4_S1_ADDR]] : $*M +// CHECK: [[M4_S3_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s3 +// CHECK: [[M4_S3:%[^,]+]] = load [take] [[M4_S3_ADDR]] : $*M +// CHECK: [[END_2:%[^,]+]] = function_ref @end_2 +// CHECK: apply [[END_2]]([[M4_S1]], [[M4_S3]]) +// CHECK-LABEL: } // end sil function 'rdar110676577' +sil [ossa] @rdar110676577 : $@convention(thin) () -> () { +bb0: + %0 = alloc_stack $M4 + %1 = mark_must_check [consumable_and_assignable] %0 : $*M4 + %3 = function_ref @get_M4 : $@convention(thin) () -> @owned M4 + %4 = apply %3() : $@convention(thin) () -> @owned M4 + store %4 to [init] %1 : $*M4 + %6 = struct_element_addr %1 : $*M4, #M4.s2 + %6a = struct_element_addr %1 : $*M4, #M4.s4 + %see_addr_2 = function_ref @see_addr_2 : $@convention(thin) (@in_guaranteed M, @in_guaranteed M) -> () + apply %see_addr_2(%6, %6a) : $@convention(thin) (@in_guaranteed M, @in_guaranteed M) -> () + %12 = struct_element_addr %1 : $*M4, #M4.s1 + %13 = load [copy] %12 : $*M + %14 = struct_element_addr %1 : $*M4, #M4.s3 + %15 = load [copy] %14 : $*M + %16 = function_ref @end_2 : $@convention(thin) (@owned M, @owned M) -> () + %17 = apply %16(%13, %15) : $@convention(thin) (@owned M, @owned M) -> () + destroy_addr %1 : $*M4 + dealloc_stack %0 : $*M4 + %22 = tuple () + return %22 : $() +} From 942d91ad5d80268ad2b010a13ae4795485fb0281 Mon Sep 17 00:00:00 2001 From: Nate Chandler Date: Thu, 8 Jun 2023 10:58:42 -0700 Subject: [PATCH 5/9] [MoveOnlyAddressChecker] NFC: Promoted assertion. Dumped more info and called llvm_unreachable on bad state. --- lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp b/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp index bb9ab67d4bcb2..1c82a3000bc49 100644 --- a/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp +++ b/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp @@ -2613,7 +2613,12 @@ void MoveOnlyAddressCheckerPImpl::rewriteUses( takeInst.second.setBits(bits); bool claimedConsume = consumes.claimConsume(takeInst.first, bits); (void)claimedConsume; - assert(claimedConsume && "Should claim all copies?!"); + if (!claimedConsume) { + llvm::errs() + << "Found consume that was not recorded as a 'claimed consume'!\n"; + llvm::errs() << "Unrecorded consume: " << *takeInst.first; + llvm_unreachable("Standard compiler abort?!"); + } } // Then rewrite all copy insts to be takes and claim them. From 505bd14a42893bbfae823280ea658ffc4b9a908f Mon Sep 17 00:00:00 2001 From: Nate Chandler Date: Mon, 12 Jun 2023 15:45:25 -0700 Subject: [PATCH 6/9] [MoveOnlyAddressChecker] Maximize lifetimes. Previously, the checker inserted destroys after each last use. Here, extend the lifetimes of fields as far as possible within their original (unchecked) limits. rdar://99681073 --- .../Mandatory/MoveOnlyAddressCheckerUtils.cpp | 320 +++++ .../moveonly_address_maximize.swift | 307 +++++ test/Interpreter/moveonly_maximize.swift | 33 + test/SILOptimizer/discard_checking.swift | 45 +- test/SILOptimizer/moveonly_addresschecker.sil | 46 +- .../moveonly_addresschecker_debuginfo.sil | 6 +- .../moveonly_addresschecker_maximize.sil | 1130 +++++++++++++++++ test/SILOptimizer/moveonly_lifetime.swift | 9 +- 8 files changed, 1842 insertions(+), 54 deletions(-) create mode 100644 test/Interpreter/moveonly_address_maximize.swift create mode 100644 test/Interpreter/moveonly_maximize.swift create mode 100644 test/SILOptimizer/moveonly_addresschecker_maximize.sil diff --git a/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp b/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp index 1c82a3000bc49..8675e82417bee 100644 --- a/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp +++ b/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp @@ -642,6 +642,15 @@ namespace { struct UseState { MarkMustCheckInst *address; + /// The number of fields in the exploded type. Set in initializeLiveness. + unsigned fieldCount = UINT_MAX; + + /// The blocks that consume fields of the value. + /// + /// A map from blocks to a bit vector recording which fields were destroyed + /// in each. + llvm::SmallMapVector consumingBlocks; + /// A map from destroy_addr to the part of the type that it destroys. llvm::SmallMapVector destroys; @@ -750,6 +759,7 @@ struct UseState { void clear() { address = nullptr; + consumingBlocks.clear(); destroys.clear(); livenessUses.clear(); borrows.clear(); @@ -806,6 +816,14 @@ struct UseState { } } + void recordConsumingBlock(SILBasicBlock *block, TypeTreeLeafTypeRange range) { + auto iter = consumingBlocks.find(block); + if (iter == consumingBlocks.end()) { + iter = consumingBlocks.insert({block, SmallBitVector(fieldCount)}).first; + } + range.setBits(iter->second); + } + void initializeLiveness(FieldSensitiveMultiDefPrunedLiveRange &prunedLiveness); @@ -907,6 +925,8 @@ struct UseState { void UseState::initializeLiveness( FieldSensitiveMultiDefPrunedLiveRange &liveness) { + fieldCount = liveness.getNumSubElements(); + // We begin by initializing all of our init uses. for (auto initInstAndValue : initInsts) { LLVM_DEBUG(llvm::dbgs() << "Found def: " << *initInstAndValue.first); @@ -1019,6 +1039,8 @@ void UseState::initializeLiveness( if (!isReinitToInitConvertibleInst(reinitInstAndValue.first)) { liveness.updateForUse(reinitInstAndValue.first, reinitInstAndValue.second, false /*lifetime ending*/); + recordConsumingBlock(reinitInstAndValue.first->getParent(), + reinitInstAndValue.second); LLVM_DEBUG(llvm::dbgs() << "Added liveness for reinit: " << *reinitInstAndValue.first; liveness.print(llvm::dbgs())); @@ -1030,6 +1052,8 @@ void UseState::initializeLiveness( for (auto takeInstAndValue : takeInsts) { liveness.updateForUse(takeInstAndValue.first, takeInstAndValue.second, true /*lifetime ending*/); + recordConsumingBlock(takeInstAndValue.first->getParent(), + takeInstAndValue.second); LLVM_DEBUG(llvm::dbgs() << "Added liveness for take: " << *takeInstAndValue.first; liveness.print(llvm::dbgs())); @@ -1037,11 +1061,18 @@ void UseState::initializeLiveness( for (auto copyInstAndValue : copyInsts) { liveness.updateForUse(copyInstAndValue.first, copyInstAndValue.second, true /*lifetime ending*/); + recordConsumingBlock(copyInstAndValue.first->getParent(), + copyInstAndValue.second); LLVM_DEBUG(llvm::dbgs() << "Added liveness for copy: " << *copyInstAndValue.first; liveness.print(llvm::dbgs())); } + for (auto destroyInstAndValue : destroys) { + recordConsumingBlock(destroyInstAndValue.first->getParent(), + destroyInstAndValue.second); + } + // Do the same for our borrow and liveness insts. for (auto livenessInstAndValue : borrows) { liveness.updateForUse(livenessInstAndValue.first, @@ -1321,6 +1352,44 @@ struct MoveOnlyAddressCheckerPImpl { void handleSingleBlockDestroy(SILInstruction *destroy, bool isReinit); }; +class ExtendUnconsumedLiveness { + UseState addressUseState; + FieldSensitiveMultiDefPrunedLiveRange &liveness; + FieldSensitivePrunedLivenessBoundary &boundary; + + enum class DestroyKind { + Destroy, + Take, + Reinit, + }; + using DestroysCollection = + llvm::SmallMapVector; + using ConsumingBlocksCollection = SmallPtrSetVector; + +public: + ExtendUnconsumedLiveness(UseState addressUseState, + FieldSensitiveMultiDefPrunedLiveRange &liveness, + FieldSensitivePrunedLivenessBoundary &boundary) + : addressUseState(addressUseState), liveness(liveness), + boundary(boundary) {} + + void run(); + + void runOnField(unsigned element, DestroysCollection &destroys, + ConsumingBlocksCollection &consumingBlocks); + +private: + bool hasDefAfter(SILInstruction *inst, unsigned element); + + bool + shouldAddDestroyToLiveness(SILInstruction *destroy, unsigned element, + BasicBlockSet const &consumedAtExitBlocks, + BasicBlockSetVector const &consumedAtEntryBlocks); + + void addPreviousInstructionToLiveness(SILInstruction *inst, unsigned element, + bool lifetimeEnding); +}; + } // namespace //===----------------------------------------------------------------------===// @@ -2433,6 +2502,9 @@ void MoveOnlyAddressCheckerPImpl::insertDestroysOnBoundary( }); }; + // Control flow merge blocks used as insertion points. + llvm::DenseMap mergeBlocks; + for (auto &pair : boundary.getLastUsers()) { auto *inst = pair.first; auto &bv = pair.second; @@ -2479,6 +2551,16 @@ void MoveOnlyAddressCheckerPImpl::insertDestroysOnBoundary( if (isa(inst)) { auto *block = inst->getParent(); for (auto *succBlock : block->getSuccessorBlocks()) { + auto iter = mergeBlocks.find(succBlock); + if (iter == mergeBlocks.end()) + iter = mergeBlocks.insert({succBlock, bits}).first; + else { + SmallBitVector &alreadySetBits = iter->second; + bool hadCommon = alreadySetBits.anyCommon(bits); + alreadySetBits &= bits; + if (hadCommon) + continue; + } auto *insertPt = &*succBlock->begin(); insertDestroyBeforeInstruction(addressUseState, insertPt, @@ -2771,6 +2853,240 @@ void MoveOnlyAddressCheckerPImpl::checkForReinitAfterDiscard() { } } +void ExtendUnconsumedLiveness::run() { + ConsumingBlocksCollection consumingBlocks; + DestroysCollection destroys; + for (unsigned element = 0, count = liveness.getNumSubElements(); + element < count; ++element) { + + for (auto pair : addressUseState.consumingBlocks) { + if (pair.second.test(element)) { + consumingBlocks.insert(pair.first); + } + } + + for (auto pair : addressUseState.destroys) { + if (pair.second.contains(element)) { + destroys[pair.first] = DestroyKind::Destroy; + } + } + for (auto pair : addressUseState.takeInsts) { + if (pair.second.contains(element)) { + destroys[pair.first] = DestroyKind::Take; + } + } + for (auto pair : addressUseState.reinitInsts) { + if (pair.second.contains(element)) { + destroys[pair.first] = DestroyKind::Reinit; + } + } + + runOnField(element, destroys, consumingBlocks); + + consumingBlocks.clear(); + destroys.clear(); + } +} + +/// Extend liveness of each field as far as possible within the original live +/// range as far as possible without incurring any copies. +/// +/// The strategy has two parts. +/// +/// (1) The global analysis: +/// - Collect the blocks in which the field was live before canonicalization. +/// These are the "original" live blocks (originalLiveBlocks). +/// [Color these blocks green.] +/// - From within that collection, collect the blocks which contain a _final_ +/// consuming, non-destroy use, and their iterative successors. +/// These are the "consumed" blocks (consumedAtExitBlocks). +/// [Color these blocks red.] +/// - Extend liveness down to the boundary between originalLiveBlocks and +/// consumedAtExitBlocks blocks. +/// [Extend liveness down to the boundary between green blocks and red.] +/// - In particular, in regions of originalLiveBlocks which have no boundary +/// with consumedAtExitBlocks, liveness should be extended to its original +/// extent. +/// [Extend liveness down to the boundary between green blocks and uncolored.] +/// +/// (2) The local analysis: +/// - For in-block lifetimes, extend liveness forward from non-consuming uses +/// and dead defs to the original destroy. +void ExtendUnconsumedLiveness::runOnField( + unsigned element, DestroysCollection &destroys, + ConsumingBlocksCollection &consumingBlocks) { + SILValue currentDef = addressUseState.address; + + // First, collect the blocks that were _originally_ live. We can't use + // liveness here because it doesn't include blocks that occur before a + // destroy_addr. + BasicBlockSet originalLiveBlocks(currentDef->getFunction()); + { + // Some of the work here was already done by initializeLiveness. + // Specifically, it already discovered all blocks containing (transitive) + // uses and blocks that appear between them and the def. + // + // Seed the set with what it already discovered. + for (auto *discoveredBlock : liveness.getDiscoveredBlocks()) + originalLiveBlocks.insert(discoveredBlock); + + // Start the walk from the consuming blocks (which includes destroys as well + // as the other consuming uses). + BasicBlockWorklist worklist(currentDef->getFunction()); + for (auto *consumingBlock : consumingBlocks) { + worklist.push(consumingBlock); + } + + // Walk backwards from consuming blocks. + while (auto *block = worklist.pop()) { + if (!originalLiveBlocks.insert(block)) { + continue; + } + for (auto *predecessor : block->getPredecessorBlocks()) { + // If the block was discovered by liveness, we already added it to the + // set. + if (originalLiveBlocks.contains(predecessor)) + continue; + worklist.pushIfNotVisited(predecessor); + } + } + } + + // Second, collect the blocks which occur after a consuming use. + BasicBlockSet consumedAtExitBlocks(currentDef->getFunction()); + BasicBlockSetVector consumedAtEntryBlocks(currentDef->getFunction()); + { + // Start the forward walk from blocks which contain non-destroy consumes not + // followed by defs. + // + // Because they contain a consume not followed by a def, these are + // consumed-at-exit. + BasicBlockWorklist worklist(currentDef->getFunction()); + for (auto iterator : boundary.getLastUsers()) { + if (!iterator.second.test(element)) + continue; + auto *instruction = iterator.first; + // Skip over destroys on the boundary. + auto iter = destroys.find(instruction); + if (iter != destroys.end() && iter->second != DestroyKind::Take) { + continue; + } + // Skip over non-consuming users. + auto interestingUser = liveness.isInterestingUser(instruction, element); + assert(interestingUser != + FieldSensitivePrunedLiveness::IsInterestingUser::NonUser); + if (interestingUser != + FieldSensitivePrunedLiveness::IsInterestingUser::LifetimeEndingUse) { + continue; + } + // A consume with a subsequent def doesn't cause the block to be + // consumed-at-exit. + if (hasDefAfter(instruction, element)) + continue; + worklist.push(instruction->getParent()); + } + while (auto *block = worklist.pop()) { + consumedAtExitBlocks.insert(block); + for (auto *successor : block->getSuccessorBlocks()) { + if (!originalLiveBlocks.contains(successor)) + continue; + worklist.pushIfNotVisited(successor); + consumedAtEntryBlocks.insert(successor); + } + } + } + + // Third, find the blocks on the boundary between the originally-live blocks + // and the originally-live-but-consumed blocks. Extend liveness "to the end" + // of these blocks. + for (auto *block : consumedAtEntryBlocks) { + for (auto *predecessor : block->getPredecessorBlocks()) { + if (consumedAtExitBlocks.contains(predecessor)) + continue; + // Add "the instruction(s) before the terminator" of the predecessor to + // liveness. + addPreviousInstructionToLiveness(predecessor->getTerminator(), element, + /*lifetimeEnding*/ false); + } + } + + // Finally, preserve the destroys which weren't in the consumed region in + // place: hoisting such destroys would not avoid copies. + for (auto pair : destroys) { + auto *destroy = pair.first; + if (!shouldAddDestroyToLiveness(destroy, element, consumedAtExitBlocks, + consumedAtEntryBlocks)) + continue; + addPreviousInstructionToLiveness(destroy, element, + /*lifetimeEnding*/ false); + } +} + +bool ExtendUnconsumedLiveness::shouldAddDestroyToLiveness( + SILInstruction *destroy, unsigned element, + BasicBlockSet const &consumedAtExitBlocks, + BasicBlockSetVector const &consumedAtEntryBlocks) { + auto *block = destroy->getParent(); + bool followedByDef = hasDefAfter(destroy, element); + if (!followedByDef) { + // This destroy is the last write to the field in the block. + // + // If the block is consumed-at-exit, then there is some other consuming use + // before this destroy. Liveness can't be extended. + return !consumedAtExitBlocks.contains(block); + } + for (auto *inst = destroy->getPreviousInstruction(); inst; + inst = inst->getPreviousInstruction()) { + if (liveness.isDef(inst, element)) { + // Found the corresponding def with no intervening users. Liveness + // can be extended to the destroy. + return true; + } + auto interestingUser = liveness.isInterestingUser(inst, element); + switch (interestingUser) { + case FieldSensitivePrunedLiveness::IsInterestingUser::NonUser: + break; + case FieldSensitivePrunedLiveness::IsInterestingUser::NonLifetimeEndingUse: + // The first use seen is non-consuming. Liveness can be extended to the + // destroy. + return true; + break; + case FieldSensitivePrunedLiveness::IsInterestingUser::LifetimeEndingUse: + // Found a consuming use. Liveness can't be extended to the destroy + // (without creating a copy and triggering a diagnostic). + return false; + break; + } + } + // Found no uses or defs between the destroy and the top of the block. If the + // block was not consumed at entry, liveness can be extended to the destroy. + return !consumedAtEntryBlocks.contains(block); +} + +bool ExtendUnconsumedLiveness::hasDefAfter(SILInstruction *start, + unsigned element) { + // NOTE: Start iteration at \p start, not its sequel, because + // it might be both a consuming use and a def. + for (auto *inst = start; inst; inst = inst->getNextInstruction()) { + if (liveness.isDef(inst, element)) + return true; + } + return false; +} + +void ExtendUnconsumedLiveness::addPreviousInstructionToLiveness( + SILInstruction *inst, unsigned element, bool lifetimeEnding) { + auto range = TypeTreeLeafTypeRange(element, element + 1); + if (auto *previous = inst->getPreviousInstruction()) { + liveness.updateForUse(previous, range, lifetimeEnding); + } else { + for (auto *predecessor : inst->getParent()->getPredecessorBlocks()) { + liveness.updateForUse(predecessor->getTerminator(), range, + lifetimeEnding); + } + } +} + bool MoveOnlyAddressCheckerPImpl::performSingleCheck( MarkMustCheckInst *markedAddress) { SWIFT_DEFER { diagnosticEmitter.clearUsesWithDiagnostic(); }; @@ -2865,6 +3181,10 @@ bool MoveOnlyAddressCheckerPImpl::performSingleCheck( FieldSensitivePrunedLivenessBoundary boundary(liveness.getNumSubElements()); liveness.computeBoundary(boundary); + ExtendUnconsumedLiveness extension(addressUseState, liveness, boundary); + extension.run(); + boundary.clear(); + liveness.computeBoundary(boundary); insertDestroysOnBoundary(markedAddress, liveness, boundary); checkForReinitAfterDiscard(); rewriteUses(markedAddress, liveness, boundary); diff --git a/test/Interpreter/moveonly_address_maximize.swift b/test/Interpreter/moveonly_address_maximize.swift new file mode 100644 index 0000000000000..8ec5548ba3738 --- /dev/null +++ b/test/Interpreter/moveonly_address_maximize.swift @@ -0,0 +1,307 @@ +// RUN: %target-run-simple-swift(-Xfrontend -sil-verify-all) | %FileCheck %s +// RUN: %target-run-simple-swift(-O -Xfrontend -sil-verify-all) | %FileCheck %s + +struct S : ~Copyable { + let s: String + init(_ s: String) { self.s = s } + deinit { + print("destroying \(s)") + } +} +struct S2 : ~Copyable { + var s1: S + var s2: S + init(_ s: String) { + self.s1 = S("\(s).s1") + self.s2 = S("\(s).s2") + } +} +struct S3 : ~Copyable { + var s1: S + var s2: S + var s3: S + init(_ s: String) { + self.s1 = S("\(s).s1") + self.s2 = S("\(s).s2") + self.s3 = S("\(s).s3") + } +} + +func consumeVal(_ s: consuming S) {} +func consumeVal(_ s: consuming S2) {} +func borrowVal(_ s: borrowing S) {} +func borrowVal(_ s: borrowing S2) {} + +func marker(_ s: String) { + print("\(#function): \(s)") +} + +// Simple test that makes sure that we still after we consume have the lifetime +// of s be completely consumed by consumeVal. +// CHECK: destroying simpleTestVar().first.s1 +// CHECK: destroying simpleTestVar().first.s2 +// CHECK: destroying simpleTestVar().second.s1 +// CHECK: destroying simpleTestVar().second.s2 +// CHECK: marker(_:): simpleTestVar().1 +@_silgen_name("simpleTestVar") +func simpleTestVar() { + var s = S2("\(#function).first") + s = S2("\(#function).second") + consumeVal(s) // Lifetime of s should end here before end of scope. + marker("\(#function).1") +} + +// Simple test that proves that we can maximize lifetimes in a field sensitive +// manner. Since we only consume s.s1, s.s2's lifetime should still be maximized +// and be at end of scope. +// CHECK: destroying simpleTestVar2().first.s1 +// CHECK: destroying simpleTestVar2().first.s2 +// CHECK: destroying simpleTestVar2().second.s1 +// CHECK: marker(_:): simpleTestVar2().1 +// CHECK: destroying simpleTestVar2().second.s2 +func simpleTestVar2() { + var s = S2("\(#function).first") + s = S2("\(#function).second") + consumeVal(s.s1) // Lifetime of s1 should end here. + marker("\(#function).1") + // Lifetime of s2 should end at end of scope after marker. +} + +// In this case, we consume all of s by consuming s.s1 and s.s2 separately, so +// all lifetimes should be done before marker. +// CHECK: destroying simpleTestVar3().first.s1 +// CHECK: destroying simpleTestVar3().first.s2 +// CHECK: destroying simpleTestVar3().second.s1 +// CHECK: destroying simpleTestVar3().second.s2 +// CHECK: marker(_:): simpleTestVar3().1 +func simpleTestVar3() { + var s = S2("\(#function).first") + s = S2("\(#function).second") + consumeVal(s.s1) + consumeVal(s.s2) + marker("\(#function).1") // Lifetimes should end before marker. +} + +// In this case, we completely consume s and then reinitialize s implying we +// need to deal with two disjoint lifetimes. The second lifetime of s should end +// after marker. +// CHECK: destroying simpleTestVar3a().first.s1 +// CHECK: destroying simpleTestVar3a().first.s2 +// CHECK: destroying simpleTestVar3a().second.s1 +// CHECK: destroying simpleTestVar3a().second.s2 +// CHECK: marker(_:): simpleTestVar3a().1 +// CHECK: marker(_:): simpleTestVar3a().2 +// CHECK: destroying simpleTestVar3a().third.s1 +// CHECK: destroying simpleTestVar3a().third.s2 +func simpleTestVar3a() { + var s = S2("\(#function).first") + s = S2("\(#function).second") + consumeVal(s.s1) + consumeVal(s.s2) + + marker("\(#function).1") + + s = S2("\(#function).third") + marker("\(#function).2") +} + +// In this case, we have another borrowVal of s.s2. That should still let s.s2's +// lifetime end after marker. +// CHECK: destroying simpleTestVar3b().first.s1 +// CHECK: destroying simpleTestVar3b().first.s2 +// CHECK: destroying simpleTestVar3b().second.s1 +// CHECK: marker(_:): simpleTestVar3b().1 +// CHECK: destroying simpleTestVar3b().second.s2 +func simpleTestVar3b() { + var s = S2("\(#function).first") + s = S2("\(#function).second") + consumeVal(s.s1) + borrowVal(s.s2) + marker("\(#function).1") // s2 should end its lifetime after marker. +} + +// In this case, we are testing reinitialization and making sure that we can +// handle two initializations properly. We also are testing conditional merge +// logic. Since in both cases below s is completely consumed in b, s's lifetime +// would end at marker. + +// CHECK: destroying simpleTestVar4(_:_:)[false, false)].first.s1 +// CHECK: destroying simpleTestVar4(_:_:)[false, false)].first.s2 +// CHECK: marker(_:): simpleTestVar4(_:_:)[false, false)].1 +// CHECK: destroying simpleTestVar4(_:_:)[false, false)].second.s1 +// CHECK: destroying simpleTestVar4(_:_:)[false, false)].second.s2 +// CHECK: destroying simpleTestVar4(_:_:)[false, false)].third.s1 +// CHECK: destroying simpleTestVar4(_:_:)[false, false)].third.s2 +// CHECK: marker(_:): simpleTestVar4(_:_:)[false, false)].2 + +// CHECK: destroying simpleTestVar4(_:_:)[false, true)].first.s1 +// CHECK: destroying simpleTestVar4(_:_:)[false, true)].first.s2 +// CHECK: marker(_:): simpleTestVar4(_:_:)[false, true)].1 +// CHECK: destroying simpleTestVar4(_:_:)[false, true)].second.s1 +// CHECK: destroying simpleTestVar4(_:_:)[false, true)].second.s2 +// CHECK: destroying simpleTestVar4(_:_:)[false, true)].third.s1 +// CHECK: destroying simpleTestVar4(_:_:)[false, true)].third.s2 + +// CHECK: destroying simpleTestVar4(_:_:)[true, false)].first.s1 +// CHECK: destroying simpleTestVar4(_:_:)[true, false)].first.s2 +// CHECK: destroying simpleTestVar4(_:_:)[true, false)].second.s1 +// CHECK: destroying simpleTestVar4(_:_:)[true, false)].second.s2 +// CHECK: destroying simpleTestVar4(_:_:)[true, false)].third.s1 +// CHECK: destroying simpleTestVar4(_:_:)[true, false)].third.s2 +// CHECK: marker(_:): simpleTestVar4(_:_:)[true, false)].2 + +// CHECK: destroying simpleTestVar4(_:_:)[true, true)].first.s1 +// CHECK: destroying simpleTestVar4(_:_:)[true, true)].first.s2 +// CHECK: destroying simpleTestVar4(_:_:)[true, true)].second.s1 +// CHECK: destroying simpleTestVar4(_:_:)[true, true)].second.s2 +// CHECK: destroying simpleTestVar4(_:_:)[true, true)].third.s1 +// CHECK: destroying simpleTestVar4(_:_:)[true, true)].third.s2 +func simpleTestVar4(_ b1: Bool, _ b2: Bool) { + var s = S2("\(#function)[\(b1), \(b2))].first") + s = S2("\(#function)[\(b1), \(b2))].second") + + if b1 { + consumeVal(s) + } else { + marker("\(#function)[\(b1), \(b2))].1") + // S's lifetime should end after marker in this block. + } + + s = S2("\(#function)[\(b1), \(b2))].third") + + if b2 { + consumeVal(s) + } else { + marker("\(#function)[\(b1), \(b2))].2") + // S's 2nd lifetime should end after marker in this block. + } +} + +// This test is similar to the previous, except we are consuming different +// values along the if/else branch that completely covers the value. As a result +// of this, we need to end the lifetime of s in the branches. +// CHECK: destroying simpleTestVar6(_:)[false].first.s1 +// CHECK: destroying simpleTestVar6(_:)[false].first.s2 +// CHECK: destroying simpleTestVar6(_:)[false].second.s2 +// CHECK: marker(_:): simpleTestVar6(_:)[false].2 +// CHECK: destroying simpleTestVar6(_:)[false].second.s1 +// CHECK: destroying simpleTestVar6(_:)[false].third.s1 +// CHECK: destroying simpleTestVar6(_:)[false].third.s2 + +// CHECK: destroying simpleTestVar6(_:)[true].first.s1 +// CHECK: destroying simpleTestVar6(_:)[true].first.s2 +// CHECK: destroying simpleTestVar6(_:)[true].second.s1 +// CHECK: marker(_:): simpleTestVar6(_:)[true].1 +// CHECK: destroying simpleTestVar6(_:)[true].second.s2 +// CHECK: destroying simpleTestVar6(_:)[true].third.s1 +// CHECK: destroying simpleTestVar6(_:)[true].third.s2 +func simpleTestVar6(_ b: Bool) { + var s = S2("\(#function)[\(b)].first") + s = S2("\(#function)[\(b)].second") + + if b { + consumeVal(s.s1) // end of s.s1's lifetime. + marker("\(#function)[\(b)].1") + // s.s2 should end here. + } else { + consumeVal(s.s2) // end of s.s2's lifetime + marker("\(#function)[\(b)].2") + // end of s.s1's lifetime should end after marker. + } + + s = S2("\(#function)[\(b)].third") +} + +// In this case, we are using S3 implying we have three fields. So despite the +// fact that we are deleting these two values in the if-else branches, s3's +// lifetime needs to end at end of scope. +// CHECK: destroying simpleTestVar6a(_:)[false].first.s1 +// CHECK: destroying simpleTestVar6a(_:)[false].first.s2 +// CHECK: destroying simpleTestVar6a(_:)[false].first.s3 +// CHECK: destroying simpleTestVar6a(_:)[false].second.s2 +// CHECK: marker(_:): simpleTestVar6a(_:)[false].2 +// CHECK: destroying simpleTestVar6a(_:)[false].second.s1 +// CHECK: marker(_:): simpleTestVar6a(_:)[false].3 +// CHECK: destroying simpleTestVar6a(_:)[false].second.s3 + +// CHECK: destroying simpleTestVar6a(_:)[true].first.s1 +// CHECK: destroying simpleTestVar6a(_:)[true].first.s2 +// CHECK: destroying simpleTestVar6a(_:)[true].first.s3 +// CHECK: destroying simpleTestVar6a(_:)[true].second.s1 +// CHECK: marker(_:): simpleTestVar6a(_:)[true].1 +// CHECK: destroying simpleTestVar6a(_:)[true].second.s2 +// CHECK: marker(_:): simpleTestVar6a(_:)[true].3 +// CHECK: destroying simpleTestVar6a(_:)[true].second.s3 +func simpleTestVar6a(_ b: Bool) { + var s = S3("\(#function)[\(b)].first") + s = S3("\(#function)[\(b)].second") + + if b { + consumeVal(s.s1) // end of s.s1's lifetime. + marker("\(#function)[\(b)].1") + // s.s2 should end here. + } else { + consumeVal(s.s2) // end of s.s2's lifetime + marker("\(#function)[\(b)].2") + // end of s.s1's lifetime should end after marker. + } + + marker("\(#function)[\(b)].3") + // s.s3's lifetime should end here. +} + +// In this case, we are using S3, but we are consuming two disjoint parts of S +// in the if statement so we cover again completely. +// CHECK: destroying simpleTestVar6b(_:)[false].first.s1 +// CHECK: destroying simpleTestVar6b(_:)[false].first.s2 +// CHECK: destroying simpleTestVar6b(_:)[false].first.s3 +// CHECK: destroying simpleTestVar6b(_:)[false].second.s2 +// CHECK: marker(_:): simpleTestVar6b(_:)[false].2 +// CHECK: destroying simpleTestVar6b(_:)[false].second.s3 +// CHECK: destroying simpleTestVar6b(_:)[false].second.s1 +// CHECK: marker(_:): simpleTestVar6b(_:)[false].3 + +// CHECK: destroying simpleTestVar6b(_:)[true].first.s1 +// CHECK: destroying simpleTestVar6b(_:)[true].first.s2 +// CHECK: destroying simpleTestVar6b(_:)[true].first.s3 +// CHECK: destroying simpleTestVar6b(_:)[true].second.s1 +// CHECK: destroying simpleTestVar6b(_:)[true].second.s3 +// CHECK: marker(_:): simpleTestVar6b(_:)[true].1 +// CHECK: destroying simpleTestVar6b(_:)[true].second.s2 +// CHECK: marker(_:): simpleTestVar6b(_:)[true].3 +func simpleTestVar6b(_ b: Bool) { + var s = S3("\(#function)[\(b)].first") + s = S3("\(#function)[\(b)].second") + + if b { + consumeVal(s.s1) // end of s.s1's lifetime. + consumeVal(s.s3) // end of s.s3's lifetime + marker("\(#function)[\(b)].1") + // s.s2 should end here. + } else { + consumeVal(s.s2) // end of s.s2's lifetime + marker("\(#function)[\(b)].2") + // end of s.s1's lifetime should end after marker. + // end of s.s3's lifetime should end after marker. + } + + marker("\(#function)[\(b)].3") +} + + +simpleTestVar() +simpleTestVar2() +simpleTestVar3() +simpleTestVar3a() +simpleTestVar3b() +simpleTestVar4(false, false) +simpleTestVar4(false, true) +simpleTestVar4(true, false) +simpleTestVar4(true, true) +simpleTestVar6(false) +simpleTestVar6(true) +simpleTestVar6a(false) +simpleTestVar6a(true) +simpleTestVar6b(false) +simpleTestVar6b(true) + diff --git a/test/Interpreter/moveonly_maximize.swift b/test/Interpreter/moveonly_maximize.swift new file mode 100644 index 0000000000000..d313c5d501a19 --- /dev/null +++ b/test/Interpreter/moveonly_maximize.swift @@ -0,0 +1,33 @@ +// RUN: %target-run-simple-swift(-Xfrontend -sil-verify-all) | %FileCheck %s +// RUN: %target-run-simple-swift(-O -Xfrontend -sil-verify-all) | %FileCheck %s + +// REQUIRES: executable_test +struct Alice: ~Copyable { + var age: Int + + init(age: Int) { + print("INIT"); + self.age = age + } + + deinit { print("DEINIT") } +} + +func eatMe(_ alice: consuming Alice) { + print(" start") + print(" age:", alice.age) + print(" end") +} + +func doit() { + let alice = Alice(age: 10) + eatMe(alice) +} + +doit() + +// CHECK: INIT +// CHECK: start +// CHECK: age: 10 +// CHECK: end +// CHECK: DEINIT diff --git a/test/SILOptimizer/discard_checking.swift b/test/SILOptimizer/discard_checking.swift index 322c1eb66487c..fbbd5d180a5af 100644 --- a/test/SILOptimizer/discard_checking.swift +++ b/test/SILOptimizer/discard_checking.swift @@ -125,8 +125,8 @@ struct Basics: ~Copyable { if case .red = c { discard self // expected-note {{discarded self here}} } else { - mutator() // expected-error {{must consume 'self' before exiting method that discards self}} - throw E.someError // <- better spot + mutator() + throw E.someError // expected-error {{must consume 'self' before exiting method that discards self}} } } @@ -165,7 +165,7 @@ struct Basics: ~Copyable { } } - consuming func test8_stillMissingAConsume1(_ c: Color) throws { + consuming func test8_stillMissingAConsume1(_ c: Color) throws { // expected-error {{must consume 'self' before exiting method that discards self}} if case .red = c { discard self // expected-note {{discarded self here}} return @@ -174,7 +174,7 @@ struct Basics: ~Copyable { _ = consume self fatalError("hi") } - } // expected-error {{must consume 'self' before exiting method that discards self}} + } consuming func test8_stillMissingAConsume2(_ c: Color) throws { if case .red = c { @@ -212,10 +212,10 @@ struct Basics: ~Copyable { return } catch { print("hi") - return // <- better spot!! + return // expected-error {{must consume 'self' before exiting method that discards self}} } _ = consume self // expected-warning {{will never be executed}} - } // expected-error {{must consume 'self' before exiting method that discards self}} + } consuming func test9_fixed(_ c: Color) throws { if case .red = c { @@ -238,20 +238,20 @@ struct Basics: ~Copyable { consuming func test10(_ c: Color) throws { if case .red = c { - discard self // expected-note {{discarded self here}} + discard self // expected-note 2{{discarded self here}} return } do { - throw E.someError // expected-error {{must consume 'self' before exiting method that discards self}} + throw E.someError } catch E.someError { - return // <- better spot + return // expected-error {{must consume 'self' before exiting method that discards self}} } catch { - return // <- ok spot + return // expected-error {{must consume 'self' before exiting method that discards self}} } } - consuming func test11(_ c: Color) { + consuming func test11(_ c: Color) { // expected-error {{must consume 'self' before exiting method that discards self}} guard case .red = c else { discard self // expected-note {{discarded self here}} return @@ -263,7 +263,7 @@ struct Basics: ~Copyable { borrower() let x = self self = x - mutator() // expected-error {{must consume 'self' before exiting method that discards self}} + mutator() } consuming func test11_fixed(_ c: Color) { @@ -334,7 +334,7 @@ struct Basics: ~Copyable { return } await asyncer() - } // <- better spot + } consuming func test13_fixed(_ c: Color) async { guard case .red = c else { @@ -345,16 +345,16 @@ struct Basics: ~Copyable { _ = consume self } - consuming func test14(_ c: Color) async { + consuming func test14(_ c: Color) async { // expected-error {{must consume 'self' before exiting method that discards self}} guard case .red = c else { discard self // expected-note {{discarded self here}} return } - await withCheckedContinuation { cont in // expected-error {{must consume 'self' before exiting method that discards self}} + await withCheckedContinuation { cont in cont.resume() } print("back!") - } // <- better spot + } consuming func test14_fixed(_ c: Color) async { guard case .red = c else { @@ -402,7 +402,7 @@ struct Basics: ~Copyable { case 0: fallthrough case 1: - throw E.someError // expected-error 2{{must consume 'self' before exiting method that discards self}} + throw E.someError // expected-error {{must consume 'self' before exiting method that discards self}} case 2: return // expected-error {{must consume 'self' before exiting method that discards self}} case 3: @@ -410,22 +410,21 @@ struct Basics: ~Copyable { case 4: globalConsumingFn(self) default: - discard self // expected-note 4{{discarded self here}} + discard self // expected-note 3{{discarded self here}} } } consuming func loopyExit_bad(_ i: Int) { if i < 0 { - discard self // expected-note 2{{discarded self here}} + discard self // expected-note {{discarded self here}} return } - // TODO: rdar://110239087 (avoid duplicate consume-before-exit diagnostics for loop in discarding method) - for _ in 0.. () { // CHECK: [[STACK:%.*]] = alloc_stack [lexical] $Klass, var, name "x2" // CHECK: store {{%.*}} to [init] [[STACK]] -// CHECK: destroy_addr [[STACK]] // CHECK: [[ACCESS:%.*]] = begin_access [modify] [static] [[STACK]] +// CHECK: destroy_addr [[STACK]] // CHECK: store {{%.*}} to [init] [[ACCESS]] // CHECK: end_access [[ACCESS]] // CHECK: [[ACCESS:%.*]] = begin_access [read] [static] [[STACK]] @@ -659,16 +659,16 @@ sil @see_addr_2 : $@convention(thin) (@in_guaranteed M, @in_guaranteed M) -> () // CHECK: [[M4_S4_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s4 // CHECK: [[SEE_ADDR_2:%[^,]+]] = function_ref @see_addr_2 // CHECK: apply [[SEE_ADDR_2]]([[M4_S2_ADDR]], [[M4_S4_ADDR]]) -// HECK: [[M4_S4_ADDR_2:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s4 -// HECK: destroy_addr [[M4_S4_ADDR_2]] -// CHECK: [[M4_S2_ADDR_2:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s2 -// CHECK: destroy_addr [[M4_S2_ADDR_2]] // CHECK: [[M4_S1_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s1 // CHECK: [[M4_S1:%[^,]+]] = load [take] [[M4_S1_ADDR]] : $*M // CHECK: [[M4_S3_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s3 // CHECK: [[M4_S3:%[^,]+]] = load [take] [[M4_S3_ADDR]] : $*M // CHECK: [[END_2:%[^,]+]] = function_ref @end_2 // CHECK: apply [[END_2]]([[M4_S1]], [[M4_S3]]) +// CHECK: [[M4_S4_ADDR_2:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s4 +// CHECK: destroy_addr [[M4_S4_ADDR_2]] +// CHECK: [[M4_S2_ADDR_2:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s2 +// CHECK: destroy_addr [[M4_S2_ADDR_2]] // CHECK-LABEL: } // end sil function 'rdar110676577' sil [ossa] @rdar110676577 : $@convention(thin) () -> () { bb0: diff --git a/test/SILOptimizer/moveonly_addresschecker_debuginfo.sil b/test/SILOptimizer/moveonly_addresschecker_debuginfo.sil index 65b8c312f5a80..13a8ce0f0126f 100644 --- a/test/SILOptimizer/moveonly_addresschecker_debuginfo.sil +++ b/test/SILOptimizer/moveonly_addresschecker_debuginfo.sil @@ -50,13 +50,13 @@ bb1: // CHECK: [[BORROW:%.*]] = load_borrow [[STACK]] // CHECK: apply {{%.*}}([[BORROW]]) // CHECK-NEXT: end_borrow [[BORROW]] -// CHECK-NEXT: destroy_addr [[STACK]] -// CHECK-NEXT: debug_value undef : $*NonTrivialStruct, let, name "v" // {{.*}}; line:[[DEBUG_LOC]] // CHECK: br bb3 // // CHECK: bb2: -// CHECK-NEXT: destroy_addr [[STACK]] +// CHECK-NEXT: br bb3 +// CHECK: bb3: // CHECK-NEXT: debug_value undef : $*NonTrivialStruct, let, name "v" // {{.*}}; line:[[DEBUG_LOC]] +// CHECK-NEXT: destroy_addr [[STACK]] // CHECK: } // end sil function 'non_lifetime_ending_use_test_boundary_edge' sil [ossa] @non_lifetime_ending_use_test_boundary_edge : $@convention(thin) () -> () { %f = function_ref @get_nontrivial_struct : $@convention(thin) () -> @owned NonTrivialStruct diff --git a/test/SILOptimizer/moveonly_addresschecker_maximize.sil b/test/SILOptimizer/moveonly_addresschecker_maximize.sil new file mode 100644 index 0000000000000..e47ba09276d66 --- /dev/null +++ b/test/SILOptimizer/moveonly_addresschecker_maximize.sil @@ -0,0 +1,1130 @@ +// RUN: %target-sil-opt -sil-move-only-address-checker -enable-sil-verify-all %s | %FileCheck %s +sil_stage raw + +import Builtin + +// ############################################################################# +// ############################################################################# +// SCALAR STORAGE {{ +// ############################################################################# +// ############################################################################# + +sil [ossa] @condition : $@convention(thin) () -> Builtin.Int1 + +@_moveOnly struct S { + deinit +} + +sil [ossa] @get : $@convention(thin) () -> @out S +sil [ossa] @get_value : $@convention(thin) () -> @owned S + +sil [ossa] @see : $@convention(thin) (@guaranteed S) -> () +sil [ossa] @see_addr : $@convention(thin) (@in_guaranteed S) -> () + +sil [ossa] @end : $@convention(thin) (@owned S) -> () +sil [ossa] @end_addr : $@convention(thin) (@in S) -> () + +sil [ossa] @other : $@convention(thin) () -> () + +// ============================================================================= +// ============================================================================= +// Single def {{ +// ============================================================================= +// ============================================================================= + +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Single block {{ +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +// A load-copy of the value is passed to a value-consuming function, and the +// storage is destroyed at function end. +// +// Ensure that there is no copy and the lifetime ends at the value-consume. +// CHECK-LABEL: sil [ossa] @singleblock_consume_value_before_other : {{.*}} { +// CHECK: [[OTHER:%[^,]+]] = function_ref @other +// CHECK: [[GET_VALUE:%[^,]+]] = function_ref @get_value +// CHECK: [[END:%[^,]+]] = function_ref @end +// CHECK: [[STACK:%[^,]+]] = alloc_stack +// CHECK: [[INSTANCE:%[^,]+]] = apply [[GET_VALUE]]() +// CHECK: store [[INSTANCE]] to [init] [[STACK]] +// CHECK: [[ACCESS:%[^,]+]] = begin_access [modify] [static] [[STACK]] +// CHECK: [[INSTANCE_RELOAD:%[^,]+]] = load [take] [[ACCESS]] +// CHECK: apply [[END]]([[INSTANCE_RELOAD]]) +// CHECK: end_access [[ACCESS]] +// CHECK: apply [[OTHER]]() +// CHECK: dealloc_stack [[STACK]] +// CHECK-LABEL: } // end sil function 'singleblock_consume_value_before_other' +sil [ossa] @singleblock_consume_value_before_other : $@convention(thin) () -> () { +bb0: + %other = function_ref @other : $@convention(thin) () -> () + %get_value = function_ref @get_value : $@convention(thin) () -> @owned S + %end = function_ref @end : $@convention(thin) (@owned S) -> () + + %stack_addr = alloc_stack $S + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*S + %instance = apply %get_value() : $@convention(thin) () -> @owned S + store %instance to [init] %stack : $*S + %access = begin_access [deinit] [static] %stack : $*S + %instance_reload = load [copy] %access : $*S + apply %end(%instance_reload) : $@convention(thin) (@owned S) -> () + end_access %access : $*S + %18 = apply %other() : $@convention(thin) () -> () + destroy_addr %stack : $*S + dealloc_stack %stack_addr : $*S + %retval = tuple () + return %retval : $() +} + +// A load-copy of the value is passed to a value-borrowing function then +// destroyed, and the storage is destroyed at function end. +// +// Ensure that there is no copy and the lifetime ends at function end. +// CHECK-LABEL: sil [ossa] @singleblock_borrow_before_other : {{.*}} { +// CHECK: [[OTHER:%[^,]+]] = function_ref @other +// CHECK: [[GET_VALUE:%[^,]+]] = function_ref @get_value +// CHECK: [[SEE:%[^,]+]] = function_ref @see +// CHECK: [[STACK:%[^,]+]] = alloc_stack +// CHECK: [[INSTANCE:%[^,]+]] = apply [[GET_VALUE]]() +// CHECK: store [[INSTANCE]] to [init] [[STACK]] +// CHECK: [[ACCESS:%[^,]+]] = begin_access [deinit] [static] [[STACK]] +// CHECK: [[BORROW:%[^,]+]] = load_borrow [[ACCESS]] +// CHECK: apply [[SEE]]([[BORROW]]) +// CHECK: end_borrow [[BORROW]] +// CHECK: end_access [[ACCESS]] +// CHECK: apply [[OTHER]]() +// CHECK: destroy_addr [[STACK]] +// CHECK: dealloc_stack [[STACK]] +// CHECK-LABEL: } // end sil function 'singleblock_borrow_before_other' +sil [ossa] @singleblock_borrow_before_other : $@convention(thin) () -> () { +bb0: + %other = function_ref @other : $@convention(thin) () -> () + %get_value = function_ref @get_value : $@convention(thin) () -> @owned S + %see = function_ref @see : $@convention(thin) (@guaranteed S) -> () + %end = function_ref @end : $@convention(thin) (@owned S) -> () + + %stack_addr = alloc_stack $S + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*S + %instance = apply %get_value() : $@convention(thin) () -> @owned S + store %instance to [init] %stack : $*S + %access = begin_access [deinit] [static] %stack : $*S + %instance_reload = load [copy] %access : $*S + apply %see(%instance_reload) : $@convention(thin) (@guaranteed S) -> () + destroy_value %instance_reload : $S + end_access %access : $*S + %18 = apply %other() : $@convention(thin) () -> () + destroy_addr %stack : $*S + dealloc_stack %stack_addr : $*S + %retval = tuple () + return %retval : $() +} + +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// }} Single block +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Multiple blocks {{ +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +// There is a consuming use in right. So bottom is a consumed block. Liveness +// is retracted up to the consume in right and up to the bottom of left. +// CHECK-LABEL: sil [ossa] @diamond__consume_r__use_l__destroy_b : {{.*}} { +// CHECK: [[STACK:%[^,]+]] = alloc_stack +// CHECK: [[GET:%[^,]+]] = function_ref @get +// CHECK: [[OTHER:%[^,]+]] = function_ref @other +// CHECK: [[SEE_ADDR:%[^,]+]] = function_ref @see_addr +// CHECK: [[END:%[^,]+]] = function_ref @end +// CHECK: apply [[GET]]([[STACK]]) +// CHECK: cond_br undef, [[LEFT:bb[0-9]+]], [[RIGHT:bb[0-9]+]] +// CHECK: [[LEFT:bb[0-9]+]]: +// CHECK: apply [[SEE_ADDR]]([[STACK]]) +// CHECK: apply [[OTHER]]() +// CHECK: destroy_addr [[STACK]] +// CHECK: br [[BOTTOM:bb[0-9]+]] +// CHECK: [[RIGHT:bb[0-9]+]]: +// CHECK: [[INSTANCE:%[^,]+]] = load [take] [[STACK]] +// CHECK: apply [[END]]([[INSTANCE]]) +// CHECK: br [[BOTTOM]] +// CHECK: [[BOTTOM]]: +// CHECK-LABEL: } // end sil function 'diamond__consume_r__use_l__destroy_b' +sil [ossa] @diamond__consume_r__use_l__destroy_b : $@convention(thin) () -> () { +top: + %stack_addr = alloc_stack $S + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*S + %get = function_ref @get : $@convention(thin) () -> @out S + %other = function_ref @other : $@convention(thin) () -> () + %see = function_ref @see : $@convention(thin) (@guaranteed S) -> () + %see_addr = function_ref @see_addr : $@convention(thin) (@in_guaranteed S) -> () + %end = function_ref @end : $@convention(thin) (@owned S) -> () + %end_addr = function_ref @end_addr : $@convention(thin) (@in S) -> () + apply %get(%stack) : $@convention(thin) () -> @out S + cond_br undef, left, right +left: + apply %see_addr(%stack) : $@convention(thin) (@in_guaranteed S) -> () + apply %other() : $@convention(thin) () -> () + br bottom +right: + %copy = load [copy] %stack : $*S + apply %end(%copy) : $@convention(thin) (@owned S) -> () + br bottom +bottom: + destroy_addr %stack : $*S + dealloc_stack %stack_addr : $*S + %retval = tuple () + return %retval : $() +} + +// Only bottom is consumedAtEntry. +// CHECK-LABEL: sil [ossa] @diamond_2r__consume_r1r2__use_l__destroy_b : {{.*}} { +// CHECK: [[STACK:%[^,]+]] = alloc_stack +// CHECK: [[GET:%[^,]+]] = function_ref @get +// CHECK: [[SEE:%[^,]+]] = function_ref @see +// CHECK: [[END:%[^,]+]] = function_ref @end +// CHECK: apply [[GET]]([[STACK]]) +// CHECK: cond_br undef, [[LEFT:bb[0-9]+]], [[RIGHT:bb[0-9]+]] +// CHECK: [[LEFT]]: +// CHECK: [[BORROW:%[^,]+]] = load_borrow [[STACK]] +// CHECK: apply [[SEE]]([[BORROW]]) +// CHECK: end_borrow [[BORROW]] +// CHECK: destroy_addr [[STACK]] +// CHECK: br [[BOTTOM:bb[0-9]+]] +// CHECK: [[RIGHT]]: +// CHECK: [[BORROW_2:%[^,]+]] = load_borrow [[STACK]] +// CHECK: apply [[SEE]]([[BORROW_2]]) +// CHECK: end_borrow [[BORROW_2]] +// CHECK: br [[RIGHT_2:bb[0-9]+]] +// CHECK: [[RIGHT_2]]: +// CHECK: [[TAKE:%[^,]+]] = load [take] [[STACK]] +// CHECK: apply [[END]]([[TAKE]]) +// CHECK: br [[BOTTOM]] +// CHECK: [[BOTTOM]]: +// CHECK-LABEL: } // end sil function 'diamond_2r__consume_r1r2__use_l__destroy_b' +sil [ossa] @diamond_2r__consume_r1r2__use_l__destroy_b : $@convention(thin) () -> () { +top: + %stack_addr = alloc_stack $S + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*S + %get = function_ref @get : $@convention(thin) () -> @out S + %other = function_ref @other : $@convention(thin) () -> () + %see = function_ref @see : $@convention(thin) (@guaranteed S) -> () + %see_addr = function_ref @see_addr : $@convention(thin) (@in_guaranteed S) -> () + %end = function_ref @end : $@convention(thin) (@owned S) -> () + %end_addr = function_ref @end_addr : $@convention(thin) (@in S) -> () + apply %get(%stack) : $@convention(thin) () -> @out S + cond_br undef, left, right +left: + %copy3 = load [copy] %stack : $*S + apply %see(%copy3) : $@convention(thin) (@guaranteed S) -> () + destroy_value %copy3 : $S + br bottom +right: + %copy = load [copy] %stack : $*S + apply %see(%copy) : $@convention(thin) (@guaranteed S) -> () + destroy_value %copy : $S + br right2 +right2: + %copy2 = load [copy] %stack : $*S + apply %end(%copy2) : $@convention(thin) (@owned S) -> () + br bottom +bottom: + destroy_addr %stack : $*S + dealloc_stack %stack_addr : $*S + %retval = tuple () + return %retval : $() +} + +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// }} Multiple blocks +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +// ============================================================================= +// ============================================================================= +// }} Single def +// ============================================================================= +// ============================================================================= + +// ============================================================================= +// ============================================================================= +// Multiple defs {{ +// ============================================================================= +// ============================================================================= + +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Multiple blocks {{ +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +// Like diamond__consume_r__use_l__destroy_b but with a copy_addr [take] reinit +// in left. The reinit is after an apply of other. +// +// Ensure that the destroy from that deinit remains at the same location--it +// will be split into a separate instruction. +// +// TODO: Avoid the unnecessary churn of that splitting. +// CHECK-LABEL: sil [ossa] @diamond__consume_r__reinit_l__use_l__destroy_b : {{.*}} { +// CHECK: [[STACK:%[^,]+]] = alloc_stack +// CHECK: [[GET:%[^,]+]] = function_ref @get +// CHECK: [[OTHER:%[^,]+]] = function_ref @other +// CHECK: [[SEE_ADDR:%[^,]+]] = function_ref @see_addr +// CHECK: [[END:%[^,]+]] = function_ref @end +// CHECK: apply [[GET]]([[STACK]]) +// CHECK: cond_br undef, [[LEFT:bb[0-9]+]], [[RIGHT:bb[0-9]+]] +// CHECK: [[LEFT:bb[0-9]+]]: +// CHECK: apply [[OTHER]]() +// CHECK: destroy_addr [[STACK]] +// CHECK: apply [[SEE_ADDR]]([[STACK]]) +// CHECK: apply [[OTHER]]() +// CHECK: destroy_addr [[STACK]] +// CHECK: br [[BOTTOM:bb[0-9]+]] +// CHECK: [[RIGHT:bb[0-9]+]]: +// CHECK: [[INSTANCE:%[^,]+]] = load [take] [[STACK]] +// CHECK: apply [[END]]([[INSTANCE]]) +// CHECK: br [[BOTTOM]] +// CHECK: [[BOTTOM]]: +// CHECK-LABEL: } // end sil function 'diamond__consume_r__reinit_l__use_l__destroy_b' +sil [ossa] @diamond__consume_r__reinit_l__use_l__destroy_b : $@convention(thin) () -> () { +top: + %stack_addr = alloc_stack $S + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*S + %get = function_ref @get : $@convention(thin) () -> @out S + %other = function_ref @other : $@convention(thin) () -> () + %see = function_ref @see : $@convention(thin) (@guaranteed S) -> () + %see_addr = function_ref @see_addr : $@convention(thin) (@in_guaranteed S) -> () + %end = function_ref @end : $@convention(thin) (@owned S) -> () + %end_addr = function_ref @end_addr : $@convention(thin) (@in S) -> () + apply %get(%stack) : $@convention(thin) () -> @out S + cond_br undef, left, right +left: + apply %other() : $@convention(thin) () -> () + %stack2 = alloc_stack $S + apply %get(%stack2) : $@convention(thin) () -> @out S + copy_addr [take] %stack2 to %stack : $*S + dealloc_stack %stack2 : $*S + apply %see_addr(%stack) : $@convention(thin) (@in_guaranteed S) -> () + apply %other() : $@convention(thin) () -> () + br bottom +right: + %copy = load [copy] %stack : $*S + apply %end(%copy) : $@convention(thin) (@owned S) -> () + br bottom +bottom: + destroy_addr %stack : $*S + dealloc_stack %stack_addr : $*S + %retval = tuple () + return %retval : $() +} + +// Like diamond__consume_r__reinit_l__use_l__destroy_b with a store [assign] +// reinit in left. The reinit is after an apply of other. +// +// Ensure that the destroy from that deinit remains at the same location--it +// will be split into a separate instruction. +// +// TODO: Avoid the unnecessary churn of that splitting. +// CHECK-LABEL: sil [ossa] @diamond__consume_r__reinit_l__use_l__destroy_b2 : {{.*}} { +// CHECK: [[STACK:%[^,]+]] = alloc_stack +// CHECK: [[GET:%[^,]+]] = function_ref @get +// CHECK: [[OTHER:%[^,]+]] = function_ref @other +// CHECK: [[SEE_ADDR:%[^,]+]] = function_ref @see_addr +// CHECK: [[END:%[^,]+]] = function_ref @end +// CHECK: apply [[GET]]([[STACK]]) +// CHECK: cond_br undef, [[LEFT:bb[0-9]+]], [[RIGHT:bb[0-9]+]] +// CHECK: [[LEFT:bb[0-9]+]]: +// CHECK: apply [[OTHER]]() +// CHECK: destroy_addr [[STACK]] +// CHECK: apply [[SEE_ADDR]]([[STACK]]) +// CHECK: apply [[OTHER]]() +// CHECK: destroy_addr [[STACK]] +// CHECK: br [[BOTTOM:bb[0-9]+]] +// CHECK: [[RIGHT:bb[0-9]+]]: +// CHECK: [[INSTANCE:%[^,]+]] = load [take] [[STACK]] +// CHECK: apply [[END]]([[INSTANCE]]) +// CHECK: br [[BOTTOM]] +// CHECK: [[BOTTOM]]: +// CHECK-LABEL: } // end sil function 'diamond__consume_r__reinit_l__use_l__destroy_b2' +sil [ossa] @diamond__consume_r__reinit_l__use_l__destroy_b2 : $@convention(thin) () -> () { +top: + %stack_addr = alloc_stack $S + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*S + %get = function_ref @get : $@convention(thin) () -> @out S + %get_value = function_ref @get_value : $@convention(thin) () -> @owned S + %other = function_ref @other : $@convention(thin) () -> () + %see = function_ref @see : $@convention(thin) (@guaranteed S) -> () + %see_addr = function_ref @see_addr : $@convention(thin) (@in_guaranteed S) -> () + %end = function_ref @end : $@convention(thin) (@owned S) -> () + %end_addr = function_ref @end_addr : $@convention(thin) (@in S) -> () + apply %get(%stack) : $@convention(thin) () -> @out S + cond_br undef, left, right +left: + apply %other() : $@convention(thin) () -> () + %new = apply %get_value() : $@convention(thin) () -> @owned S + store %new to [assign] %stack : $*S + apply %see_addr(%stack) : $@convention(thin) (@in_guaranteed S) -> () + apply %other() : $@convention(thin) () -> () + br bottom +right: + %copy = load [copy] %stack : $*S + apply %end(%copy) : $@convention(thin) (@owned S) -> () + br bottom +bottom: + destroy_addr %stack : $*S + dealloc_stack %stack_addr : $*S + %retval = tuple () + return %retval : $() +} + +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// }} Multiple blocks +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +// ============================================================================= +// ============================================================================= +// }} Multiple defs +// ============================================================================= +// ============================================================================= + +// ############################################################################# +// ############################################################################# +// }} SCALAR STORAGE +// ############################################################################# +// ############################################################################# + +// ############################################################################# +// ############################################################################# +// AGGREGATE STORAGE {{ +// ############################################################################# +// ############################################################################# + +@_moveOnly struct S2 { + var s1: S + var s2: S +} + +sil [ossa] @get_value_S2 : $@convention(thin) () -> @owned S2 +sil [ossa] @end_S2 : $@convention(thin) (@owned S2) -> () + +@_moveOnly struct S3 { + var s1: S + var s2: S + var s3: S +} + +sil [ossa] @get_value_S3 : $@convention(thin) () -> @owned S3 +sil [ossa] @end_S3 : $@convention(thin) (@owned S3) -> () + +// ============================================================================= +// ============================================================================= +// Single def {{ +// ============================================================================= +// ============================================================================= + +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Single block {{ +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +// The apply of other remains before the destroy_addr. +// CHECK-LABEL: sil [ossa] @aggregate_1 : {{.*}} { +// CHECK: [[OTHER:%[^,]+]] = function_ref @other +// CHECK: [[STACK:%[^,]+]] = alloc_stack +// CHECK: apply [[OTHER]]() +// CHECK: destroy_addr [[STACK]] +// CHECK-LABEL: } // end sil function 'aggregate_1' +sil [ossa] @aggregate_1 : $@convention(thin) () -> () { +bb0: + %get_value_S2 = function_ref @get_value_S2 : $@convention(thin) () -> @owned S2 + %end = function_ref @end : $@convention(thin) (@owned S) -> () + %other = function_ref @other : $@convention(thin) () -> () + + %stack_addr = alloc_stack $S2 + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*S2 + %instance_1 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + store %instance_1 to [init] %stack : $*S2 + apply %other() : $@convention(thin) () -> () + destroy_addr %stack : $*S2 + dealloc_stack %stack_addr : $*S2 + %retval = tuple () + return %retval : $() +} + + +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// }} Single block +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +// ============================================================================= +// ============================================================================= +// }} Single def +// ============================================================================= +// ============================================================================= + +// ============================================================================= +// ============================================================================= +// Multiple defs {{ +// ============================================================================= +// ============================================================================= + +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Single block {{ +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +// Calls to other before each destroy remain before. +// CHECK-LABEL: sil [ossa] @aggregate_2 : {{.*}} { +// CHECK: [[OTHER:%[^,]+]] = function_ref @other +// CHECK: [[STACK:%[^,]+]] = alloc_stack +// CHECK: apply [[OTHER]]() +// CHECK: destroy_addr [[STACK]] : $*S2 +// CHECK: apply [[OTHER]]() +// CHECK: destroy_addr [[STACK]] : $*S2 +// CHECK-LABEL: } // end sil function 'aggregate_2' +sil [ossa] @aggregate_2 : $@convention(thin) () -> () { +bb0: + %get_value_S2 = function_ref @get_value_S2 : $@convention(thin) () -> @owned S2 + %end = function_ref @end : $@convention(thin) (@owned S) -> () + %other = function_ref @other : $@convention(thin) () -> () + + %stack_addr = alloc_stack $S2 + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*S2 + %instance_1 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + store %instance_1 to [init] %stack : $*S2 + %instance_2 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + apply %other() : $@convention(thin) () -> () + store %instance_2 to [assign] %stack : $*S2 + apply %other() : $@convention(thin) () -> () + destroy_addr %stack : $*S2 + dealloc_stack %stack_addr : $*S2 + %retval = tuple () + return %retval : $() +} + +// s1 is consumed but s2 is not and so is not destroyed until function end, after +// other. + +// CHECK-LABEL: sil [ossa] @simpleTestVar2 : {{.*}} { +// CHECK: [[OTHER:%[^,]+]] = function_ref @other +// CHECK: [[STACK:%[^,]+]] = alloc_stack +// CHECK: apply [[OTHER]]() +// CHECK: [[S2_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*S2, #S2.s2 +// CHECK: destroy_addr [[S2_ADDR]] +// CHECK-LABEL: } // end sil function 'simpleTestVar2' +sil [ossa] @simpleTestVar2 : $@convention(thin) () -> () { +bb0: + %get_value_S2 = function_ref @get_value_S2 : $@convention(thin) () -> @owned S2 + %end = function_ref @end : $@convention(thin) (@owned S) -> () + %other = function_ref @other : $@convention(thin) () -> () + + %stack_addr = alloc_stack $S2 + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*S2 + %instance_1 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + store %instance_1 to [init] %stack : $*S2 + %instance_2 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + %access = begin_access [modify] [static] %stack : $*S2 + store %instance_2 to [assign] %access : $*S2 + end_access %access : $*S2 + %access_2 = begin_access [deinit] [static] %stack : $*S2 + %s1_addr = struct_element_addr %access_2 : $*S2, #S2.s1 + %s1 = load [copy] %s1_addr : $*S + apply %end(%s1) : $@convention(thin) (@owned S) -> () + end_access %access_2 : $*S2 + apply %other() : $@convention(thin) () -> () + destroy_addr %stack : $*S2 + dealloc_stack %stack_addr : $*S2 + %retval = tuple () + return %retval : $() +} + +// Both fields (#S2.s1, #S2.s2) are consumed before other. +// CHECK-LABEL: sil [ossa] @simpleTestVar3 : {{.*}} { +// CHECK: [[END:%[^,]+]] = function_ref @end +// CHECK: [[OTHER:%[^,]+]] = function_ref @other +// CHECK: [[STACK:%[^,]+]] = alloc_stack +// CHECK: begin_access +// CHECK: [[ACCESS_1:%[^,]+]] = begin_access [modify] [static] [[STACK]] +// CHECK: [[S1_ADDR:%[^,]+]] = struct_element_addr [[ACCESS_1]] : $*S2, #S2.s1 +// CHECK: [[S1:%[^,]+]] = load [take] [[S1_ADDR]] +// CHECK: apply [[END]]([[S1]]) +// CHECK: [[ACCESS_2:%[^,]+]] = begin_access [modify] [static] [[STACK]] +// CHECK: [[S2_ADDR:%[^,]+]] = struct_element_addr [[ACCESS_2]] : $*S2, #S2.s2 +// CHECK: [[S2:%[^,]+]] = load [take] [[S2_ADDR]] +// CHECK: apply [[END]]([[S2]]) +// CHECK: apply [[OTHER]]() +// CHECK-LABEL: } // end sil function 'simpleTestVar3' +sil [ossa] @simpleTestVar3 : $@convention(thin) () -> () { +bb0: + %get_value_S2 = function_ref @get_value_S2 : $@convention(thin) () -> @owned S2 + %end = function_ref @end : $@convention(thin) (@owned S) -> () + %other = function_ref @other : $@convention(thin) () -> () + + %stack_addr = alloc_stack $S2 + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*S2 + %instance_1 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + store %instance_1 to [init] %stack : $*S2 + %instance_2 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + %access = begin_access [modify] [static] %stack : $*S2 + store %instance_2 to [assign] %access : $*S2 + end_access %access : $*S2 + %access_2 = begin_access [deinit] [static] %stack : $*S2 + %s1_addr = struct_element_addr %access_2 : $*S2, #S2.s1 + %s1 = load [copy] %s1_addr : $*S + apply %end(%s1) : $@convention(thin) (@owned S) -> () + end_access %access_2 : $*S2 + %access_3 = begin_access [deinit] [static] %stack : $*S2 + %s2_addr = struct_element_addr %access_3 : $*S2, #S2.s2 + %s2 = load [copy] %s2_addr : $*S + apply %end(%s2) : $@convention(thin) (@owned S) -> () + end_access %access_3 : $*S2 + apply %other() : $@convention(thin) () -> () + destroy_addr %stack : $*S2 + dealloc_stack %stack_addr : $*S2 + %retval = tuple () + return %retval : $() +} + +// Addr is initialized, then both fields are individually consumed before other. +// Then addr is initialized again and neither field is consumed; then other is +// applied again. +// +// Ensure that the reinitialized fields survive to function end. +// +// CHECK-LABEL: sil [ossa] @simpleTestVar3a : {{.*}} { +// CHECK: [[GET_VALUE_S2:%[^,]+]] = function_ref @get_value_S2 +// CHECK: [[OTHER:%[^,]+]] = function_ref @other +// CHECK: [[STACK:%[^,]+]] = alloc_stack +// CHECK: apply [[OTHER]]() +// CHECK: [[INSTANCE_3:%[^,]+]] = apply [[GET_VALUE_S2]]() +// CHECK: [[ACCESS:%[^,]+]] = begin_access [modify] [static] [[STACK]] +// CHECK: store [[INSTANCE_3]] to [init] [[ACCESS]] +// CHECK: end_access [[ACCESS]] +// CHECK: apply [[OTHER]]() : $@convention(thin) () -> () +// CHECK: destroy_addr [[STACK]] +// CHECK-LABEL: } // end sil function 'simpleTestVar3a' +sil [ossa] @simpleTestVar3a : $@convention(thin) () -> () { +bb0: + %get_value_S2 = function_ref @get_value_S2 : $@convention(thin) () -> @owned S2 + %end = function_ref @end : $@convention(thin) (@owned S) -> () + %other = function_ref @other : $@convention(thin) () -> () + + %stack_addr = alloc_stack $S2 + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*S2 + %instance_1 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + store %instance_1 to [init] %stack : $*S2 + %instance_2 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + %access_1 = begin_access [modify] [static] %stack : $*S2 + store %instance_2 to [assign] %access_1 : $*S2 + end_access %access_1 : $*S2 + %access_2 = begin_access [deinit] [static] %stack : $*S2 + %s1_addr = struct_element_addr %access_2 : $*S2, #S2.s1 + %s1 = load [copy] %s1_addr : $*S + apply %end(%s1) : $@convention(thin) (@owned S) -> () + end_access %access_2 : $*S2 + %access_3 = begin_access [deinit] [static] %stack : $*S2 + %s2_addr = struct_element_addr %access_3 : $*S2, #S2.s2 + %s2 = load [copy] %s2_addr : $*S + apply %end(%s2) : $@convention(thin) (@owned S) -> () + end_access %access_3 : $*S2 + apply %other() : $@convention(thin) () -> () + %instance_3 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + %access_4 = begin_access [modify] [static] %stack : $*S2 + store %instance_3 to [assign] %access_4 : $*S2 + end_access %access_4 : $*S2 + apply %other() : $@convention(thin) () -> () + destroy_addr %stack : $*S2 + dealloc_stack %stack_addr : $*S2 + %retval = tuple () + return %retval : $() +} + +// Like simpleTestVar3a but without access scopes and the original init. +// CHECK-LABEL: sil [ossa] @simpleTestVar3a_simplified : {{.*}} { +// CHECK: [[GET_VALUE_S2:%[^,]+]] = function_ref @get_value_S2 +// CHECK: [[OTHER:%[^,]+]] = function_ref @other +// CHECK: [[STACK:%[^,]+]] = alloc_stack +// CHECK: apply [[OTHER]]() +// CHECK: [[INSTANCE_2:%[^,]+]] = apply [[GET_VALUE_S2]]() +// CHECK: store [[INSTANCE_2]] to [init] [[STACK]] +// CHECK: apply [[OTHER]]() : $@convention(thin) () -> () +// CHECK: destroy_addr [[STACK]] +// CHECK-LABEL: } // end sil function 'simpleTestVar3a_simplified' +sil [ossa] @simpleTestVar3a_simplified : $@convention(thin) () -> () { +bb0: + %get_value_S2 = function_ref @get_value_S2 : $@convention(thin) () -> @owned S2 + %end = function_ref @end : $@convention(thin) (@owned S) -> () + %other = function_ref @other : $@convention(thin) () -> () + + %stack_addr = alloc_stack $S2 + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*S2 + %instance_1 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + store %instance_1 to [init] %stack : $*S2 + %s1_addr = struct_element_addr %stack : $*S2, #S2.s1 + %s1 = load [copy] %s1_addr : $*S + apply %end(%s1) : $@convention(thin) (@owned S) -> () + %s2_addr = struct_element_addr %stack : $*S2, #S2.s2 + %s2 = load [copy] %s2_addr : $*S + apply %end(%s2) : $@convention(thin) (@owned S) -> () + apply %other() : $@convention(thin) () -> () + %instance_2 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + store %instance_2 to [assign] %stack : $*S2 + apply %other() : $@convention(thin) () -> () + destroy_addr %stack : $*S2 + dealloc_stack %stack_addr : $*S2 + %retval = tuple () + return %retval : $() +} + +// Ensure that first initialized liveness remains until original destroy. +// And ensure that reinitialized fields live until function exit. +// CHECK-LABEL: sil [ossa] @simpleTestVar3a_simplified_vary2 : {{.*}} { +// CHECK: [[GET_VALUE:%[^,]+]] = function_ref @get_value +// CHECK: [[SEE:%[^,]+]] = function_ref @see +// CHECK: [[OTHER:%[^,]+]] = function_ref @other +// CHECK: [[STACK:%[^,]+]] = alloc_stack $S +// CHECK: [[INSTANCE_1:%[^,]+]] = apply [[GET_VALUE]]() +// CHECK: store [[INSTANCE_1]] to [init] [[STACK]] +// CHECK: [[BORROW:%[^,]+]] = load_borrow [[STACK]] +// CHECK: apply [[SEE]]([[BORROW]]) +// CHECK: end_borrow [[BORROW]] +// CHECK: apply [[OTHER]]() +// CHECK: [[INSTANCE_2:%[^,]+]] = apply [[GET_VALUE]]() +// CHECK: destroy_addr [[STACK]] +// CHECK: store [[INSTANCE_2]] to [init] [[STACK]] +// CHECK: apply [[OTHER]]() +// CHECK: destroy_addr [[STACK]] +// CHECK-LABEL: } // end sil function 'simpleTestVar3a_simplified_vary2' +sil [ossa] @simpleTestVar3a_simplified_vary2 : $@convention(thin) () -> () { +bb0: + %get_value = function_ref @get_value : $@convention(thin) () -> @owned S + %see = function_ref @see : $@convention(thin) (@guaranteed S) -> () + %other = function_ref @other : $@convention(thin) () -> () + + %stack_addr = alloc_stack $S + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*S + %instance_1 = apply %get_value() : $@convention(thin) () -> @owned S + store %instance_1 to [init] %stack : $*S + %s = load_borrow %stack : $*S + apply %see(%s) : $@convention(thin) (@guaranteed S) -> () + end_borrow %s : $S + apply %other() : $@convention(thin) () -> () + %instance_2 = apply %get_value() : $@convention(thin) () -> @owned S + store %instance_2 to [assign] %stack : $*S + apply %other() : $@convention(thin) () -> () + destroy_addr %stack : $*S + dealloc_stack %stack_addr : $*S + %retval = tuple () + return %retval : $() +} + +// s1 is consumed at @end. s2 is never consumed so it's destroyed at function +// end. +// CHECK-LABEL: sil [ossa] @simpleTestVar3b : {{.*}} { +// CHECK: [[END:%[^,]+]] = function_ref @end +// CHECK: [[OTHER:%[^,]+]] = function_ref @other +// CHECK: [[STACK:%[^,]+]] = alloc_stack +// CHECK: begin_access +// CHECK: [[ACCESS:%[^,]+]] = begin_access [modify] [static] [[STACK]] +// CHECK: [[S1_ADDR:%[^,]+]] = struct_element_addr [[ACCESS]] : $*S2, #S2.s1 +// CHECK: [[S1:%[^,]+]] = load [take] [[S1_ADDR]] +// CHECK: apply [[END]]([[S1]]) +// CHECK: apply [[OTHER]]() +// CHECK: [[S2_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*S2, #S2.s2 +// CHECK: destroy_addr [[S2_ADDR]] +// CHECK-LABEL: } // end sil function 'simpleTestVar3b' +sil [ossa] @simpleTestVar3b : $@convention(thin) () -> () { +bb0: + %get_value_S2 = function_ref @get_value_S2 : $@convention(thin) () -> @owned S2 + %end = function_ref @end : $@convention(thin) (@owned S) -> () + %see = function_ref @see : $@convention(thin) (@guaranteed S) -> () + %other = function_ref @other : $@convention(thin) () -> () + + %stack_addr = alloc_stack $S2 + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*S2 + %instance_1 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + store %instance_1 to [init] %stack : $*S2 + %instance_2 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + %access_1 = begin_access [modify] [static] %stack : $*S2 + store %instance_2 to [assign] %access_1 : $*S2 + end_access %access_1 : $*S2 + %access_2 = begin_access [deinit] [static] %stack : $*S2 + %s1_addr = struct_element_addr %access_2 : $*S2, #S2.s1 + %s1 = load [copy] %s1_addr : $*S + apply %end(%s1) : $@convention(thin) (@owned S) -> () + end_access %access_2 : $*S2 + %access_3 = begin_access [read] [static] %stack : $*S2 + %s2_addr = struct_element_addr %access_3 : $*S2, #S2.s2 + %s2 = load [copy] %s2_addr : $*S + apply %see(%s2) : $@convention(thin) (@guaranteed S) -> () + destroy_value %s2 : $S + end_access %access_3 : $*S2 + apply %other() : $@convention(thin) () -> () + destroy_addr %stack : $*S2 + dealloc_stack %stack_addr : $*S2 + %retval = tuple () + return %retval : $() +} + +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// }} Single block +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// {{ Multiple blocks +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +// Ensure that liveness extends to destroy in live-at-entry block where no def +// precedes destroy. And ensure that the reinit stack stays live until +// function exit. +// CHECK-LABEL: sil [ossa] @simpleTestVar3a_simplified_vary : {{.*}} { +// CHECK: [[GET_VALUE_S2:%[^,]+]] = function_ref @get_value_S2 +// CHECK: [[SEE:%[^,]+]] = function_ref @see +// CHECK: [[OTHER:%[^,]+]] = function_ref @other +// CHECK: [[STACK:%[^,]+]] = alloc_stack $S2 +// CHECK: [[INSTANCE_1:%[^,]+]] = apply [[GET_VALUE_S2]]() +// CHECK: store [[INSTANCE_1]] to [init] [[STACK]] +// CHECK: [[S1_ADDR:%[^,]+]] = struct_element_addr [[STACK]] +// CHECK: [[S1:%[^,]+]] = load_borrow [[S1_ADDR]] +// CHECK: apply [[SEE]]([[S1]]) +// CHECK: end_borrow [[S1]] +// CHECK: [[S2_ADDR:%[^,]+]] = struct_element_addr [[STACK]] +// CHECK: [[S2:%[^,]+]] = load_borrow [[S2_ADDR]] +// CHECK: apply [[SEE]]([[S2]]) +// CHECK: end_borrow [[S2]] +// CHECK: apply [[OTHER]]() +// CHECK: [[INSTANCE_2:%[^,]+]] = apply [[GET_VALUE_S2]]() +// CHECK: br bb1 +// CHECK: bb1: +// CHECK: destroy_addr [[STACK]] +// CHECK: store [[INSTANCE_2]] to [init] [[STACK]] +// CHECK: apply [[OTHER]]() +// CHECK: destroy_addr [[STACK]] +// CHECK-LABEL: } // end sil function 'simpleTestVar3a_simplified_vary' +sil [ossa] @simpleTestVar3a_simplified_vary : $@convention(thin) () -> () { +bb0: + %get_value_S2 = function_ref @get_value_S2 : $@convention(thin) () -> @owned S2 + %see = function_ref @see : $@convention(thin) (@guaranteed S) -> () + %other = function_ref @other : $@convention(thin) () -> () + + %stack_addr = alloc_stack $S2 + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*S2 + %instance_1 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + store %instance_1 to [init] %stack : $*S2 + %s1_addr = struct_element_addr %stack : $*S2, #S2.s1 + %s1 = load_borrow %s1_addr : $*S + apply %see(%s1) : $@convention(thin) (@guaranteed S) -> () + end_borrow %s1 : $S + %s2_addr = struct_element_addr %stack : $*S2, #S2.s2 + %s2 = load_borrow %s2_addr : $*S + apply %see(%s2) : $@convention(thin) (@guaranteed S) -> () + end_borrow %s2 : $S + apply %other() : $@convention(thin) () -> () + %instance_3 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + br bb1 +bb1: + store %instance_3 to [assign] %stack : $*S2 + apply %other() : $@convention(thin) () -> () + destroy_addr %stack : $*S2 + dealloc_stack %stack_addr : $*S2 + %retval = tuple () + return %retval : $() +} + +// Stack is initialized twice, in entry and middle. In each case, it is passed +// to a consuming function on the left block and unused on the right block; and +// each right block contains an apply of other. +// +// Verify that the destroy in each right block is after the apply of other. +// CHECK-LABEL: sil [ossa] @simpleTestVar4 : {{.*}} { +// CHECK: [[OTHER:%[^,]+]] = function_ref @other +// CHECK: [[STACK:%[^,]+]] = alloc_stack +// CHECK: cond_br {{%[^,]+}}, [[LEFT_1:bb[0-9]+]], [[RIGHT_1:bb[0-9]+]] +// CHECK: [[LEFT_1]]: +// CHECK: br [[MIDDLE:bb[0-9]+]] +// CHECK: [[RIGHT_1]]: +// CHECK: apply [[OTHER]]() +// CHECK: destroy_addr [[STACK]] +// CHECK: br [[MIDDLE]] +// CHECK: [[MIDDLE]]: +// CHECK: cond_br {{%[^,]+}}, [[LEFT_2:bb[0-9]+]], [[RIGHT_2:bb[0-9]+]] +// CHECK: [[LEFT_2]]: +// CHECK: br [[EXIT:bb[0-9]+]] +// CHECK: [[RIGHT_2]]: +// CHECK: destroy_addr [[STACK]] +// CHECK: apply [[OTHER]]() : $@convention(thin) () -> () +// CHECK: br [[EXIT]] +// CHECK-LABEL: } // end sil function 'simpleTestVar4' +sil [ossa] @simpleTestVar4 : $@convention(thin) () -> () { +entry: + %get_value_S2 = function_ref @get_value_S2 : $@convention(thin) () -> @owned S2 + %condition = function_ref @condition : $@convention(thin) () -> Builtin.Int1 + %end_S2 = function_ref @end_S2 : $@convention(thin) (@owned S2) -> () + %other = function_ref @other : $@convention(thin) () -> () + + %stack_addr = alloc_stack $S2 + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*S2 + %instance_1 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + store %instance_1 to [init] %stack : $*S2 + %instance_2 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + %access_1 = begin_access [modify] [static] %stack : $*S2 + store %instance_2 to [assign] %access_1 : $*S2 + end_access %access_1 : $*S2 + %which_1 = apply %condition() : $@convention(thin) () -> Builtin.Int1 + cond_br %which_1, left_1, right_1 + +left_1: + %access_2 = begin_access [deinit] [static] %stack : $*S2 + %reload_1 = load [copy] %access_2 : $*S2 + apply %end_S2(%reload_1) : $@convention(thin) (@owned S2) -> () + end_access %access_2 : $*S2 + br middle + +right_1: + apply %other() : $@convention(thin) () -> () + br middle + +middle: + %instance_3 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + %access_3 = begin_access [modify] [static] %stack : $*S2 + store %instance_3 to [assign] %access_3 : $*S2 + end_access %access_3 : $*S2 + %which_2 = apply %condition() : $@convention(thin) () -> Builtin.Int1 + cond_br %which_2, left_2, right_2 + +left_2: + %access_4 = begin_access [deinit] [static] %stack : $*S2 + %reload_2 = load [copy] %access_4 : $*S2 + apply %end_S2(%reload_2) : $@convention(thin) (@owned S2) -> () + end_access %access_4 : $*S2 + br end + +right_2: + apply %other() : $@convention(thin) () -> () + br end + +end: + %retval = tuple () + destroy_addr %stack : $*S2 + dealloc_stack %stack_addr : $*S2 + return %retval : $() +} + +// One field is consumed in either branch: s1 in left, s2 in right. The other +// field should live until the end of the block. +// CHECK-LABEL: sil [ossa] @simpleTestVar6 : $@convention(thin) () -> () { +// CHECK: [[OTHER:%[^,]+]] = function_ref @other +// CHECK: [[STACK:%[^,]+]] = alloc_stack +// CHECK: cond_br {{%[^,]+}}, [[LEFT:bb[0-9]+]], [[RIGHT:bb[0-9]+]] +// CHECK: [[LEFT]]: +// CHECK: apply [[OTHER]]() +// CHECK: [[S2_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*S2, #S2.s2 +// CHECK: destroy_addr [[S2_ADDR]] : $*S +// CHECK: br [[EXIT:bb[0-9]+]] +// CHECK: [[RIGHT]]: +// CHECK: apply [[OTHER]]() +// CHECK: [[S1_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*S2, #S2.s1 +// CHECK: destroy_addr [[S1_ADDR]] : $*S +// CHECK: br [[EXIT]] +// CHECK: [[EXIT]]: +// CHECK-LABEL: } // end sil function 'simpleTestVar6' +sil [ossa] @simpleTestVar6 : $@convention(thin) () -> () { +entry: + %get_value_S2 = function_ref @get_value_S2 : $@convention(thin) () -> @owned S2 + %condition = function_ref @condition : $@convention(thin) () -> Builtin.Int1 + %other = function_ref @other : $@convention(thin) () -> () + %end = function_ref @end : $@convention(thin) (@owned S) -> () + + %stack_addr = alloc_stack $S2 + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*S2 + %instance_1 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + store %instance_1 to [init] %stack : $*S2 + %instance_2 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + %access_1 = begin_access [modify] [static] %stack : $*S2 + store %instance_2 to [assign] %access_1 : $*S2 + end_access %access_1 : $*S2 + %which = apply %condition() : $@convention(thin) () -> Builtin.Int1 + cond_br %which, left, right + +left: + %access_2 = begin_access [deinit] [static] %stack : $*S2 + %s1_addr = struct_element_addr %access_2 : $*S2, #S2.s1 + %s1 = load [copy] %s1_addr : $*S + apply %end(%s1) : $@convention(thin) (@owned S) -> () + end_access %access_2 : $*S2 + apply %other() : $@convention(thin) () -> () + br exit + +right: + %access_3 = begin_access [deinit] [static] %stack : $*S2 + %s2_addr = struct_element_addr %access_3 : $*S2, #S2.s2 + %s2 = load [copy] %s2_addr : $*S + apply %end(%s2) : $@convention(thin) (@owned S) -> () + end_access %access_3 : $*S2 + apply %other() : $@convention(thin) () -> () + br exit + +exit: + %instance_3 = apply %get_value_S2() : $@convention(thin) () -> @owned S2 + %access_4 = begin_access [modify] [static] %stack : $*S2 + store %instance_3 to [assign] %access_4 : $*S2 + end_access %access_4 : $*S2 + destroy_addr %stack : $*S2 + dealloc_stack %stack_addr : $*S2 + %retval = tuple () + return %retval : $() +} + +// The struct has three fields: +// s1 is consumed in left but not right +// s2 is consumed in right but not left +// s3 is not consumed +// There is an apply of other after the consumes and at the beginning of exit. +// +// Ensure that the not-consumed-on-this-branch field (s2 in left, s1 in right) +// is destroyed after the apply of other in each branch block. Ensure that the +// unconsumed field is destroyed at function exit--after the apply of other in +// exit. +// CHECK-LABEL: sil [ossa] @simpleTestVar6a : $@convention(thin) () -> () { +// CHECK: [[OTHER:%[^,]+]] = function_ref @other : $@convention(thin) () -> () +// CHECK: [[STACK:%[^,]+]] = alloc_stack $S3 +// CHECK: cond_br {{%[^,]+}}, [[LEFT:bb[0-9]+]], [[RIGHT:bb[0-9]+]] +// CHECK: [[LEFT]]: +// CHECK: apply [[OTHER]]() : $@convention(thin) () -> () +// CHECK: [[S2_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*S3, #S3.s2 +// CHECK: destroy_addr [[S2_ADDR]] : $*S +// CHECK: br [[EXIT:bb[0-9]+]] +// CHECK: [[RIGHT]]: +// CHECK: apply [[OTHER]]() : $@convention(thin) () -> () +// CHECK: [[S1_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*S3, #S3.s1 +// CHECK: destroy_addr [[S1_ADDR]] : $*S +// CHECK: br [[EXIT]] +// CHECK: [[EXIT]]: +// CHECK: apply [[OTHER]]() : $@convention(thin) () -> () +// CHECK: [[S3_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*S3, #S3.s3 +// CHECK: destroy_addr [[S3_ADDR]] : $*S +// CHECK-LABEL: } // end sil function 'simpleTestVar6a' +sil [ossa] @simpleTestVar6a : $@convention(thin) () -> () { +bb0: + %condition = function_ref @condition : $@convention(thin) () -> Builtin.Int1 + %end = function_ref @end : $@convention(thin) (@owned S) -> () + %get_value_S3 = function_ref @get_value_S3 : $@convention(thin) () -> @owned S3 + %other = function_ref @other : $@convention(thin) () -> () + + %stack_addr = alloc_stack $S3 + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*S3 + %instance_1 = apply %get_value_S3() : $@convention(thin) () -> @owned S3 + store %instance_1 to [init] %stack : $*S3 + %instance_2 = apply %get_value_S3() : $@convention(thin) () -> @owned S3 + %access_1 = begin_access [modify] [static] %stack : $*S3 + store %instance_2 to [assign] %access_1 : $*S3 + end_access %access_1 : $*S3 + %which = apply %condition() : $@convention(thin) () -> Builtin.Int1 + cond_br %which, left, right + +left: + %access_2 = begin_access [deinit] [static] %stack : $*S3 + %s1_addr = struct_element_addr %access_2 : $*S3, #S3.s1 + %s1 = load [copy] %s1_addr : $*S + apply %end(%s1) : $@convention(thin) (@owned S) -> () + end_access %access_2 : $*S3 + apply %other() : $@convention(thin) () -> () + br exit + +right: + %access_3 = begin_access [deinit] [static] %stack : $*S3 + %s2_addr = struct_element_addr %access_3 : $*S3, #S3.s2 + %s2 = load [copy] %s2_addr : $*S + apply %end(%s2) : $@convention(thin) (@owned S) -> () + end_access %access_3 : $*S3 + apply %other() : $@convention(thin) () -> () + br exit + +exit: + apply %other() : $@convention(thin) () -> () + destroy_addr %stack : $*S3 + dealloc_stack %stack_addr : $*S3 + %retval = tuple () + return %retval : $() +} + +/// The fields #S3.s1 and #S3.s3 are consumed in left but not right. +/// So they should be live until the end of right. +/// The field #S3.s2 is consumed in right but not left. +/// So it should be live until the end of left. +/// +// CHECK-LABEL: sil [ossa] @simpleTestVar6b : {{.*}} { +// CHECK: {{bb[0-9]+}}: +// CHECK: [[OTHER:%[^,]+]] = function_ref @other +// CHECK: [[STACK:%[^,]+]] = alloc_stack +// CHECK: cond_br {{%[^,]+}}, [[LEFT:bb[0-9]+]], [[RIGHT:bb[0-9]+]] +// CHECK: [[LEFT]]: +// CHECK: apply [[OTHER]]() +// CHECK: [[S2_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*S3, #S3.s2 +// CHECK: destroy_addr [[S2_ADDR]] +// CHECK: br [[EXIT:bb[0-9]+]] +// CHECK: [[RIGHT]]: +// CHECK: apply [[OTHER]]() +// CHECK: [[S3_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*S3, #S3.s3 +// CHECK: destroy_addr [[S3_ADDR]] +// CHECK: [[S1_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*S3, #S3.s1 +// CHECK: destroy_addr [[S1_ADDR]] +// CHECK: br [[EXIT]] +// CHECK: [[EXIT]]: +// CHECK-LABEL: } // end sil function 'simpleTestVar6b' +sil [ossa] @simpleTestVar6b : $@convention(thin) () -> () { +entry: + %condition = function_ref @condition : $@convention(thin) () -> Builtin.Int1 + %end = function_ref @end : $@convention(thin) (@owned S) -> () + %get_value_S3 = function_ref @get_value_S3 : $@convention(thin) () -> @owned S3 + %other = function_ref @other : $@convention(thin) () -> () + + %stack_addr = alloc_stack $S3 + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*S3 + %instance_1 = apply %get_value_S3() : $@convention(thin) () -> @owned S3 + store %instance_1 to [init] %stack : $*S3 + %instance_2 = apply %get_value_S3() : $@convention(thin) () -> @owned S3 + %access_1 = begin_access [modify] [static] %stack : $*S3 + store %instance_2 to [assign] %access_1 : $*S3 + end_access %access_1 : $*S3 + %which = apply %condition() : $@convention(thin) () -> Builtin.Int1 + cond_br %which, left, right + +left: + %access_2 = begin_access [deinit] [static] %stack : $*S3 + %s1_addr = struct_element_addr %access_2 : $*S3, #S3.s1 + %s1 = load [copy] %s1_addr : $*S + apply %end(%s1) : $@convention(thin) (@owned S) -> () + end_access %access_2 : $*S3 + %access_3 = begin_access [deinit] [static] %stack : $*S3 + %s3_addr = struct_element_addr %access_3 : $*S3, #S3.s3 + %s3 = load [copy] %s3_addr : $*S + apply %end(%s3) : $@convention(thin) (@owned S) -> () + end_access %access_3 : $*S3 + apply %other() : $@convention(thin) () -> () + br exit + +right: + %access_4 = begin_access [deinit] [static] %stack : $*S3 + %s2_addr = struct_element_addr %access_4 : $*S3, #S3.s2 + %s2 = load [copy] %s2_addr : $*S + apply %end(%s2) : $@convention(thin) (@owned S) -> () + end_access %access_4 : $*S3 + apply %other() : $@convention(thin) () -> () + br exit + +exit: + apply %other() : $@convention(thin) () -> () + destroy_addr %stack : $*S3 + dealloc_stack %stack_addr : $*S3 + %retval = tuple () + return %retval : $() +} + +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// }} Multiple blocks +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +// ============================================================================= +// ============================================================================= +// }} Multiple defs +// ============================================================================= +// ============================================================================= + +// ############################################################################# +// ############################################################################# +// }} AGGREGATE STORAGE +// ############################################################################# +// ############################################################################# diff --git a/test/SILOptimizer/moveonly_lifetime.swift b/test/SILOptimizer/moveonly_lifetime.swift index 46e776ed06003..c553305067ca5 100644 --- a/test/SILOptimizer/moveonly_lifetime.swift +++ b/test/SILOptimizer/moveonly_lifetime.swift @@ -1,4 +1,4 @@ -// RUN: %target-swift-emit-sil -sil-verify-all -module-name moveonly_lifetime -o /dev/null -Xllvm -sil-print-canonical-module -Onone -verify -enable-experimental-feature MoveOnlyClasses %s 2>&1 | %FileCheck %s +// RUN: %target-swift-emit-sil -sil-verify-all -module-name moveonly_lifetime -o /dev/null -Xllvm -sil-print-canonical-module -Onone -verify -enable-experimental-feature MoveOnlyClasses %s | %FileCheck %s struct C : ~Copyable { deinit {} @@ -31,17 +31,16 @@ func something() // CHECK: [[INSTANCE:%.*]] = load [take] [[STACK]] // CHECK: [[TAKE_C:%[^,]+]] = function_ref @takeC // CHECK: apply [[TAKE_C]]([[INSTANCE]]) +// CHECK: br [[BOTTOM:bb[0-9]+]] // // CHECK: [[LEFT]]: // CHECK: [[INSTANCE:%.*]] = load_borrow [[STACK]] // CHECK: [[BORROW_C:%[^,]+]] = function_ref @borrowC // CHECK: apply [[BORROW_C]]([[INSTANCE]]) -// -// TODO: Once we maximize lifetimes this should be below something. -// CHECK: destroy_addr [[STACK]] -// // CHECK: [[SOMETHING:%[^,]+]] = function_ref @something // CHECK: apply [[SOMETHING]] +// CHECK: destroy_addr [[STACK]] +// CHECK: br [[BOTTOM]] // CHECK-LABEL: } // end sil function 'test_diamond__consume_r__use_l' @_silgen_name("test_diamond__consume_r__use_l") func test_diamond(_ condition: Bool) { From 4e6cbd133583f3e665baaea0e6231665028c9615 Mon Sep 17 00:00:00 2001 From: Nate Chandler Date: Fri, 16 Jun 2023 18:36:56 -0700 Subject: [PATCH 7/9] [move-only] Avoid loc from func decl. It's always the first line of the function, so try to do better. --- .../Mandatory/MoveOnlyDiagnostics.cpp | 7 +++++++ test/SILOptimizer/discard_checking.swift | 16 ++++++++-------- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/lib/SILOptimizer/Mandatory/MoveOnlyDiagnostics.cpp b/lib/SILOptimizer/Mandatory/MoveOnlyDiagnostics.cpp index 4375ee9631556..db5625c07130a 100644 --- a/lib/SILOptimizer/Mandatory/MoveOnlyDiagnostics.cpp +++ b/lib/SILOptimizer/Mandatory/MoveOnlyDiagnostics.cpp @@ -14,6 +14,7 @@ #include "MoveOnlyDiagnostics.h" +#include "swift/AST/Decl.h" #include "swift/AST/DiagnosticsSIL.h" #include "swift/AST/Stmt.h" #include "swift/Basic/Defer.h" @@ -226,6 +227,12 @@ void DiagnosticEmitter::emitMissingConsumeInDiscardingContext( return true; case SILLocation::RegularKind: { + Decl *decl = loc.getAsASTNode(); + if (decl && isa(decl)) { + // Having the function itself as a location results in a location at the + // first line of the function. Find another location. + return false; + } Stmt *stmt = loc.getAsASTNode(); if (!stmt) return true; // For non-statements, assume it is exiting the func. diff --git a/test/SILOptimizer/discard_checking.swift b/test/SILOptimizer/discard_checking.swift index fbbd5d180a5af..69334b6283c36 100644 --- a/test/SILOptimizer/discard_checking.swift +++ b/test/SILOptimizer/discard_checking.swift @@ -165,7 +165,7 @@ struct Basics: ~Copyable { } } - consuming func test8_stillMissingAConsume1(_ c: Color) throws { // expected-error {{must consume 'self' before exiting method that discards self}} + consuming func test8_stillMissingAConsume1(_ c: Color) throws { if case .red = c { discard self // expected-note {{discarded self here}} return @@ -174,7 +174,7 @@ struct Basics: ~Copyable { _ = consume self fatalError("hi") } - } + } // expected-error {{must consume 'self' before exiting method that discards self}} consuming func test8_stillMissingAConsume2(_ c: Color) throws { if case .red = c { @@ -251,7 +251,7 @@ struct Basics: ~Copyable { } } - consuming func test11(_ c: Color) { // expected-error {{must consume 'self' before exiting method that discards self}} + consuming func test11(_ c: Color) { guard case .red = c else { discard self // expected-note {{discarded self here}} return @@ -264,7 +264,7 @@ struct Basics: ~Copyable { let x = self self = x mutator() - } + } // expected-error {{must consume 'self' before exiting method that discards self}} consuming func test11_fixed(_ c: Color) { guard case .red = c else { @@ -328,13 +328,13 @@ struct Basics: ~Copyable { _ = consume self } - consuming func test13(_ c: Color) async { // expected-error {{must consume 'self' before exiting method that discards self}} + consuming func test13(_ c: Color) async { guard case .red = c else { discard self // expected-note {{discarded self here}} return } await asyncer() - } + } // expected-error {{must consume 'self' before exiting method that discards self}} consuming func test13_fixed(_ c: Color) async { guard case .red = c else { @@ -345,7 +345,7 @@ struct Basics: ~Copyable { _ = consume self } - consuming func test14(_ c: Color) async { // expected-error {{must consume 'self' before exiting method that discards self}} + consuming func test14(_ c: Color) async { guard case .red = c else { discard self // expected-note {{discarded self here}} return @@ -354,7 +354,7 @@ struct Basics: ~Copyable { cont.resume() } print("back!") - } + } // expected-error {{must consume 'self' before exiting method that discards self}} consuming func test14_fixed(_ c: Color) async { guard case .red = c else { From c58d560b8d0c1aee69e88a8bfa77f2dc17a90f39 Mon Sep 17 00:00:00 2001 From: Nate Chandler Date: Fri, 16 Jun 2023 18:42:43 -0700 Subject: [PATCH 8/9] [MoveOnlyAddressChecker] Added extension flag. Passing ``` -Xllvm -move-only-address-checker-disable-lifetime-extension=true ``` will skip the maximization of unconsumed field lifetimes. --- .../Mandatory/MoveOnlyAddressCheckerUtils.cpp | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp b/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp index 8675e82417bee..1e52e99d8ae69 100644 --- a/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp +++ b/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp @@ -280,6 +280,12 @@ using namespace swift; using namespace swift::siloptimizer; +llvm::cl::opt DisableMoveOnlyAddressCheckerLifetimeExtension( + "move-only-address-checker-disable-lifetime-extension", + llvm::cl::init(false), + llvm::cl::desc("Disable the lifetime extension of non-consumed fields of " + "move-only values.")); + //===----------------------------------------------------------------------===// // MARK: Memory Utilities //===----------------------------------------------------------------------===// @@ -3181,8 +3187,10 @@ bool MoveOnlyAddressCheckerPImpl::performSingleCheck( FieldSensitivePrunedLivenessBoundary boundary(liveness.getNumSubElements()); liveness.computeBoundary(boundary); - ExtendUnconsumedLiveness extension(addressUseState, liveness, boundary); - extension.run(); + if (!DisableMoveOnlyAddressCheckerLifetimeExtension) { + ExtendUnconsumedLiveness extension(addressUseState, liveness, boundary); + extension.run(); + } boundary.clear(); liveness.computeBoundary(boundary); insertDestroysOnBoundary(markedAddress, liveness, boundary); From bff2838fc8448a60616a88d06792e3d3408ed3a3 Mon Sep 17 00:00:00 2001 From: Nate Chandler Date: Fri, 16 Jun 2023 18:11:03 -0700 Subject: [PATCH 9/9] [MoveOnlyAddressChecker] Fix used fields repr. The address checker records uses in its livenessUses map. Previously, that map mapped from an instruction to a range of fields of the type. But an instruction can use multiple discontiguous fields of a single value. Here, such instructions are properly recorded by fixing the map to store a bit vector for each instruction. rdar://110676577 --- .../swift/SIL/FieldSensitivePrunedLiveness.h | 84 ++++++- .../Utils/FieldSensitivePrunedLiveness.cpp | 46 ++++ .../Mandatory/MoveOnlyAddressCheckerUtils.cpp | 233 ++++++++++-------- test/SILOptimizer/moveonly_addresschecker.sil | 60 ----- .../moveonly_addresschecker_unmaximized.sil | 67 +++++ 5 files changed, 319 insertions(+), 171 deletions(-) create mode 100644 test/SILOptimizer/moveonly_addresschecker_unmaximized.sil diff --git a/include/swift/SIL/FieldSensitivePrunedLiveness.h b/include/swift/SIL/FieldSensitivePrunedLiveness.h index 456ba6c387bee..b38ae79a1d30c 100644 --- a/include/swift/SIL/FieldSensitivePrunedLiveness.h +++ b/include/swift/SIL/FieldSensitivePrunedLiveness.h @@ -345,6 +345,16 @@ struct TypeTreeLeafTypeRange { return TypeTreeLeafTypeRange(start, end); } + /// Whether \p bits contains any of the in-range bits. + bool intersects(SmallBitVector const &bits) const { + for (auto element : getRange()) { + if (bits.test(element)) { + return true; + } + } + return false; + } + /// Is the given leaf type specified by \p singleLeafElementNumber apart of /// our \p range of leaf type values in the our larger type. bool contains(SubElementOffset singleLeafElementNumber) const { @@ -359,7 +369,7 @@ struct TypeTreeLeafTypeRange { } /// Sets each bit in \p bits corresponding to an element of this range. - void setBits(SmallBitVector &bits) { + void setBits(SmallBitVector &bits) const { for (auto element : getRange()) { bits.set(element); } @@ -696,6 +706,14 @@ class FieldSensitivePrunedLiveness { } } + /// Record that the instruction uses the bits in \p bits. + void addUses(SmallBitVector const &bits, bool lifetimeEnding) { + liveBits |= bits; + if (lifetimeEnding) { + consumingBits |= bits; + } + } + /// Populates the provided vector with contiguous ranges of bits which are /// users of the same sort. void getContiguousRanges( @@ -838,6 +856,9 @@ class FieldSensitivePrunedLiveness { void updateForUse(SILInstruction *user, TypeTreeLeafTypeRange span, bool lifetimeEnding); + void updateForUse(SILInstruction *user, SmallBitVector const &bits, + bool lifetimeEnding); + void getBlockLiveness(SILBasicBlock *bb, TypeTreeLeafTypeRange span, SmallVectorImpl &resultingFoundLiveness) const { @@ -862,6 +883,14 @@ class FieldSensitivePrunedLiveness { SmallBitVector &liveOutBits, SmallBitVector &deadBits) const; + InterestingUser &getOrCreateInterestingUser(SILInstruction *user) { + auto iter = users.find(user); + if (iter == users.end()) { + iter = users.insert({user, InterestingUser(getNumSubElements())}).first; + } + return *&iter->second; + } + /// If \p user has had uses recored, return a pointer to the InterestingUser /// where they've been recorded. InterestingUser const *getInterestingUser(SILInstruction *user) const { @@ -885,11 +914,12 @@ class FieldSensitivePrunedLiveness { bool isInterestingUserOfKind(SILInstruction *user, IsInterestingUser kind, TypeTreeLeafTypeRange range) const { auto *record = getInterestingUser(user); - if (!record) + if (!record) { return kind == IsInterestingUser::NonUser; + } for (auto element : range.getRange()) { - if (isInterestingUser(user, element) != kind) + if (record->isInterestingUser(element) != kind) return false; } return true; @@ -918,11 +948,12 @@ class FieldSensitivePrunedLiveness { /// argument must be copied. void addInterestingUser(SILInstruction *user, TypeTreeLeafTypeRange range, bool lifetimeEnding) { - auto iter = users.find(user); - if (iter == users.end()) { - iter = users.insert({user, InterestingUser(getNumSubElements())}).first; - } - iter->second.addUses(range, lifetimeEnding); + getOrCreateInterestingUser(user).addUses(range, lifetimeEnding); + } + + void addInterestingUser(SILInstruction *user, SmallBitVector const &bits, + bool lifetimeEnding) { + getOrCreateInterestingUser(user).addUses(bits, lifetimeEnding); } }; @@ -1036,6 +1067,11 @@ class FieldSensitivePrunedLiveRange : public FieldSensitivePrunedLiveness { void updateForUse(SILInstruction *user, TypeTreeLeafTypeRange span, bool lifetimeEnding); + /// Customize updateForUse for FieldSensitivePrunedLiveness such that we check + /// that we consider defs as stopping liveness from being propagated up. + void updateForUse(SILInstruction *user, SmallBitVector const &bits, + bool lifetimeEnding); + /// Compute the boundary from the blocks discovered during liveness analysis. /// /// Precondition: \p liveness.getDiscoveredBlocks() is a valid list of all @@ -1107,6 +1143,14 @@ class FieldSensitiveSSAPrunedLiveRange return inst == defInst.first && defInst.second->contains(bit); } + bool isDef(SILInstruction *inst, SmallBitVector const &bits) const { + if (inst != defInst.first) + return false; + SmallBitVector defBits(bits.size()); + defInst.second->setBits(defBits); + return (defBits & bits) == bits; + } + bool isDef(SILInstruction *inst, TypeTreeLeafTypeRange span) const { return inst == defInst.first && defInst.second->setIntersection(span).has_value(); @@ -1217,6 +1261,30 @@ class FieldSensitiveMultiDefPrunedLiveRange *iter, [&](TypeTreeLeafTypeRange span) { return span.contains(bit); }); } + bool isDef(SILValue value, SmallBitVector const &bits) const { + assert(isInitialized()); + auto iter = defs.find(cast(value)); + if (!iter) + return false; + SmallBitVector allBits(bits.size()); + for (auto range : *iter) { + range.setBits(allBits); + } + return (bits & allBits) == bits; + } + + bool isDef(SILInstruction *inst, SmallBitVector const &bits) const { + assert(isInitialized()); + auto iter = defs.find(cast(inst)); + if (!iter) + return false; + SmallBitVector allBits(bits.size()); + for (auto range : *iter) { + range.setBits(allBits); + } + return (bits & allBits) == bits; + } + bool isDef(SILInstruction *inst, TypeTreeLeafTypeRange span) const { assert(isInitialized()); auto iter = defs.find(cast(inst)); diff --git a/lib/SIL/Utils/FieldSensitivePrunedLiveness.cpp b/lib/SIL/Utils/FieldSensitivePrunedLiveness.cpp index e80dbac5946d0..e75dcb22867fa 100644 --- a/lib/SIL/Utils/FieldSensitivePrunedLiveness.cpp +++ b/lib/SIL/Utils/FieldSensitivePrunedLiveness.cpp @@ -609,6 +609,16 @@ void FieldSensitivePrunedLiveness::updateForUse(SILInstruction *user, addInterestingUser(user, range, lifetimeEnding); } +void FieldSensitivePrunedLiveness::updateForUse(SILInstruction *user, + SmallBitVector const &bits, + bool lifetimeEnding) { + for (auto bit : bits.set_bits()) { + liveBlocks.updateForUse(user, bit); + } + + addInterestingUser(user, bits, lifetimeEnding); +} + //===----------------------------------------------------------------------===// // MARK: FieldSensitivePrunedLiveRange //===----------------------------------------------------------------------===// @@ -822,6 +832,42 @@ void FieldSensitivePrunedLiveRange::updateForUse( FieldSensitivePrunedLiveness::updateForUse(user, range, lifetimeEnding); } +template +void FieldSensitivePrunedLiveRange::updateForUse( + SILInstruction *user, SmallBitVector const &bits, bool lifetimeEnding) { + PRUNED_LIVENESS_LOG( + llvm::dbgs() + << "Begin FieldSensitivePrunedLiveRange::updateForUse " + "for: " + << *user); + PRUNED_LIVENESS_LOG(llvm::dbgs() + << "Looking for def instruction earlier in the block!\n"); + + auto *parentBlock = user->getParent(); + for (auto ii = std::next(user->getReverseIterator()), + ie = parentBlock->rend(); + ii != ie; ++ii) { + // If we find the def, just mark this instruction as being an interesting + // instruction. + if (asImpl().isDef(&*ii, bits)) { + PRUNED_LIVENESS_LOG(llvm::dbgs() << " Found def: " << *ii); + PRUNED_LIVENESS_LOG( + llvm::dbgs() + << " Marking inst as interesting user and returning!\n"); + addInterestingUser(user, bits, lifetimeEnding); + return; + } + } + + // Otherwise, just delegate to our parent class's update for use. This will + // update liveness for our predecessor blocks and add this instruction as an + // interesting user. + PRUNED_LIVENESS_LOG(llvm::dbgs() + << "No defs found! Delegating to " + "FieldSensitivePrunedLiveness::updateForUse.\n"); + FieldSensitivePrunedLiveness::updateForUse(user, bits, lifetimeEnding); +} + //===----------------------------------------------------------------------===// // MARK: Boundary Computation Utilities //===----------------------------------------------------------------------===// diff --git a/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp b/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp index 1e52e99d8ae69..a7d7ed6bf798e 100644 --- a/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp +++ b/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp @@ -532,92 +532,6 @@ static bool isCopyableValue(SILValue value) { return true; } -//===----------------------------------------------------------------------===// -// MARK: Partial Apply Utilities -//===----------------------------------------------------------------------===// - -static bool findNonEscapingPartialApplyUses( - PartialApplyInst *pai, TypeTreeLeafTypeRange leafRange, - llvm::SmallMapVector - &livenessUses) { - StackList worklist(pai->getFunction()); - for (auto *use : pai->getUses()) - worklist.push_back(use); - - LLVM_DEBUG(llvm::dbgs() << "Searching for partial apply uses!\n"); - while (!worklist.empty()) { - auto *use = worklist.pop_back_val(); - - if (use->isTypeDependent()) - continue; - - auto *user = use->getUser(); - - // These instructions do not cause us to escape. - if (isIncidentalUse(user) || isa(user)) - continue; - - // Look through these instructions. - if (isa(user) || isa(user) || - isa(user) || - // If we capture this partial_apply in another partial_apply, then we - // know that said partial_apply must not have escaped the value since - // otherwise we could not have an inout_aliasable argument or be - // on_stack. Process it recursively so that we treat uses of that - // partial_apply and applies of that partial_apply as uses of our - // partial_apply. - // - // We have this separately from the other look through sections so that - // we can make it clearer what we are doing here. - isa(user)) { - for (auto *use : cast(user)->getUses()) - worklist.push_back(use); - continue; - } - - // If we have a mark_dependence and are the value, look through the - // mark_dependence. - if (auto *mdi = dyn_cast(user)) { - if (mdi->getValue() == use->get()) { - for (auto *use : mdi->getUses()) - worklist.push_back(use); - continue; - } - } - - if (auto apply = FullApplySite::isa(user)) { - // If we apply the function or pass the function off to an apply, then we - // need to treat the function application as a liveness use of the - // variable since if the partial_apply is invoked within the function - // application, we may access the captured variable. - livenessUses.insert({user, leafRange}); - if (apply.beginsCoroutineEvaluation()) { - // If we have a coroutine, we need to treat the abort_apply and - // end_apply as liveness uses since once we execute one of those - // instructions, we have returned control to the coroutine which means - // that we could then access the captured variable again. - auto *bai = cast(user); - SmallVector endApplies; - SmallVector abortApplies; - bai->getCoroutineEndPoints(endApplies, abortApplies); - for (auto *eai : endApplies) - livenessUses.insert({eai, leafRange}); - for (auto *aai : abortApplies) - livenessUses.insert({aai, leafRange}); - } - continue; - } - - LLVM_DEBUG( - llvm::dbgs() - << "Found instruction we did not understand... returning false!\n"); - LLVM_DEBUG(llvm::dbgs() << "Instruction: " << *user); - return false; - } - - return true; -} - //===----------------------------------------------------------------------===// // MARK: Find Candidate Mark Must Checks //===----------------------------------------------------------------------===// @@ -648,8 +562,7 @@ namespace { struct UseState { MarkMustCheckInst *address; - /// The number of fields in the exploded type. Set in initializeLiveness. - unsigned fieldCount = UINT_MAX; + Optional cachedNumSubelements; /// The blocks that consume fields of the value. /// @@ -662,7 +575,7 @@ struct UseState { /// A map from a liveness requiring use to the part of the type that it /// requires liveness for. - llvm::SmallMapVector livenessUses; + llvm::SmallMapVector livenessUses; /// A map from a load [copy] or load [take] that we determined must be /// converted to a load_borrow to the part of the type tree that it needs to @@ -731,6 +644,34 @@ struct UseState { SILFunction *getFunction() const { return address->getFunction(); } + /// The number of fields in the exploded type. + unsigned getNumSubelements() { + if (!cachedNumSubelements) { + cachedNumSubelements = TypeSubElementCount(address); + } + return *cachedNumSubelements; + } + + SmallBitVector &getOrCreateLivenessUse(SILInstruction *inst) { + auto iter = livenessUses.find(inst); + if (iter == livenessUses.end()) { + iter = livenessUses.insert({inst, SmallBitVector(getNumSubelements())}) + .first; + } + return iter->second; + } + + void recordLivenessUse(SILInstruction *inst, SmallBitVector const &bits) { + getOrCreateLivenessUse(inst) |= bits; + } + + void recordLivenessUse(SILInstruction *inst, TypeTreeLeafTypeRange range) { + auto &bits = getOrCreateLivenessUse(inst); + for (auto element : range.getRange()) { + bits.set(element); + } + } + /// Returns true if this is a terminator instruction that although it doesn't /// use our inout argument directly is used by the pass to ensure that we /// reinit said argument if we consumed it in the body of the function. @@ -765,6 +706,7 @@ struct UseState { void clear() { address = nullptr; + cachedNumSubelements = llvm::None; consumingBlocks.clear(); destroys.clear(); livenessUses.clear(); @@ -825,7 +767,9 @@ struct UseState { void recordConsumingBlock(SILBasicBlock *block, TypeTreeLeafTypeRange range) { auto iter = consumingBlocks.find(block); if (iter == consumingBlocks.end()) { - iter = consumingBlocks.insert({block, SmallBitVector(fieldCount)}).first; + iter = + consumingBlocks.insert({block, SmallBitVector(getNumSubelements())}) + .first; } range.setBits(iter->second); } @@ -879,7 +823,7 @@ struct UseState { { auto iter = livenessUses.find(inst); if (iter != livenessUses.end()) { - if (span.setIntersection(iter->second)) + if (span.intersects(iter->second)) return true; } } @@ -929,10 +873,94 @@ struct UseState { } // namespace +//===----------------------------------------------------------------------===// +// MARK: Partial Apply Utilities +//===----------------------------------------------------------------------===// + +static bool findNonEscapingPartialApplyUses(PartialApplyInst *pai, + TypeTreeLeafTypeRange leafRange, + UseState &useState) { + StackList worklist(pai->getFunction()); + for (auto *use : pai->getUses()) + worklist.push_back(use); + + LLVM_DEBUG(llvm::dbgs() << "Searching for partial apply uses!\n"); + while (!worklist.empty()) { + auto *use = worklist.pop_back_val(); + + if (use->isTypeDependent()) + continue; + + auto *user = use->getUser(); + + // These instructions do not cause us to escape. + if (isIncidentalUse(user) || isa(user)) + continue; + + // Look through these instructions. + if (isa(user) || isa(user) || + isa(user) || + // If we capture this partial_apply in another partial_apply, then we + // know that said partial_apply must not have escaped the value since + // otherwise we could not have an inout_aliasable argument or be + // on_stack. Process it recursively so that we treat uses of that + // partial_apply and applies of that partial_apply as uses of our + // partial_apply. + // + // We have this separately from the other look through sections so that + // we can make it clearer what we are doing here. + isa(user)) { + for (auto *use : cast(user)->getUses()) + worklist.push_back(use); + continue; + } + + // If we have a mark_dependence and are the value, look through the + // mark_dependence. + if (auto *mdi = dyn_cast(user)) { + if (mdi->getValue() == use->get()) { + for (auto *use : mdi->getUses()) + worklist.push_back(use); + continue; + } + } + + if (auto apply = FullApplySite::isa(user)) { + // If we apply the function or pass the function off to an apply, then we + // need to treat the function application as a liveness use of the + // variable since if the partial_apply is invoked within the function + // application, we may access the captured variable. + useState.recordLivenessUse(user, leafRange); + if (apply.beginsCoroutineEvaluation()) { + // If we have a coroutine, we need to treat the abort_apply and + // end_apply as liveness uses since once we execute one of those + // instructions, we have returned control to the coroutine which means + // that we could then access the captured variable again. + auto *bai = cast(user); + SmallVector endApplies; + SmallVector abortApplies; + bai->getCoroutineEndPoints(endApplies, abortApplies); + for (auto *eai : endApplies) + useState.recordLivenessUse(eai, leafRange); + for (auto *aai : abortApplies) + useState.recordLivenessUse(aai, leafRange); + } + continue; + } + + LLVM_DEBUG( + llvm::dbgs() + << "Found instruction we did not understand... returning false!\n"); + LLVM_DEBUG(llvm::dbgs() << "Instruction: " << *user); + return false; + } + + return true; +} + void UseState::initializeLiveness( FieldSensitiveMultiDefPrunedLiveRange &liveness) { - fieldCount = liveness.getNumSubElements(); - + assert(liveness.getNumSubElements() == getNumSubelements()); // We begin by initializing all of our init uses. for (auto initInstAndValue : initInsts) { LLVM_DEBUG(llvm::dbgs() << "Found def: " << *initInstAndValue.first); @@ -1764,7 +1792,7 @@ bool GatherUsesVisitor::visitUse(Operand *op) { LLVM_DEBUG(llvm::dbgs() << "Found copy of copyable type. Treating as liveness use! " << *user); - useState.livenessUses.insert({user, *leafRange}); + useState.recordLivenessUse(user, *leafRange); return true; } @@ -1816,7 +1844,7 @@ bool GatherUsesVisitor::visitUse(Operand *op) { auto leafRange = TypeTreeLeafTypeRange::get(op->get(), getRootAddress()); if (!leafRange) return false; - useState.livenessUses.insert({user, *leafRange}); + useState.recordLivenessUse(user, *leafRange); return true; } @@ -1924,7 +1952,7 @@ bool GatherUsesVisitor::visitUse(Operand *op) { "since they will become end_borrows.\n"); for (auto *consumeUse : li->getConsumingUses()) { auto *dvi = cast(consumeUse->getUser()); - useState.livenessUses.insert({dvi, *leafRange}); + useState.recordLivenessUse(dvi, *leafRange); } return true; @@ -1963,7 +1991,7 @@ bool GatherUsesVisitor::visitUse(Operand *op) { "since they will become end_borrows.\n"); for (auto *consumeUse : li->getConsumingUses()) { auto *dvi = cast(consumeUse->getUser()); - useState.livenessUses.insert({dvi, *leafRange}); + useState.recordLivenessUse(dvi, *leafRange); } } else { // If we had a load [copy], store this into the copy list. These are the @@ -2030,7 +2058,7 @@ bool GatherUsesVisitor::visitUse(Operand *op) { if (!leafRange) return false; - useState.livenessUses.insert({user, *leafRange}); + useState.recordLivenessUse(user, *leafRange); return true; } @@ -2055,7 +2083,7 @@ bool GatherUsesVisitor::visitUse(Operand *op) { if (!leafRange) return false; - useState.livenessUses.insert({user, *leafRange}); + useState.recordLivenessUse(user, *leafRange); return true; } } @@ -2089,8 +2117,7 @@ bool GatherUsesVisitor::visitUse(Operand *op) { // where the partial apply is passed to a function. We treat those as // liveness uses. If we find a use we don't understand, we return false // here. - if (!findNonEscapingPartialApplyUses(pas, *leafRange, - useState.livenessUses)) { + if (!findNonEscapingPartialApplyUses(pas, *leafRange, useState)) { LLVM_DEBUG( llvm::dbgs() << "Failed to understand use of a non-escaping partial apply?!\n"); @@ -2113,7 +2140,7 @@ bool GatherUsesVisitor::visitUse(Operand *op) { return false; } - useState.livenessUses.insert({user, *leafRange}); + useState.recordLivenessUse(user, *leafRange); return true; } @@ -2132,7 +2159,7 @@ bool GatherUsesVisitor::visitUse(Operand *op) { llvm_unreachable("standard failure"); } #endif - useState.livenessUses.insert({user, *leafRange}); + useState.recordLivenessUse(user, *leafRange); return true; } diff --git a/test/SILOptimizer/moveonly_addresschecker.sil b/test/SILOptimizer/moveonly_addresschecker.sil index e03a8a420790f..10f730140af23 100644 --- a/test/SILOptimizer/moveonly_addresschecker.sil +++ b/test/SILOptimizer/moveonly_addresschecker.sil @@ -632,63 +632,3 @@ bb0: %22 = tuple () return %22 : $() } - -@_moveOnly -struct M4 { - let s1: M - let s2: M - let s3: M - let s4: M -} - -sil @get_M4 : $@convention(thin) () -> @owned M4 -sil @end_2 : $@convention(thin) (@owned M, @owned M) -> () -sil @see_addr_2 : $@convention(thin) (@in_guaranteed M, @in_guaranteed M) -> () - - -/// Two non-contiguous fields (#M4.s2, #M4.s4) are borrowed by @see_addr_2. -/// Two non-contiguous fields (#M4.s1, #M$.s3) are consumed by @end_2. -/// -/// Verify that #M4.s2 and #M4.s4 both survive past the apply of @see_addr_2. -// CHECK-LABEL: sil [ossa] @rdar110676577 : {{.*}} { -// CHECK: [[STACK:%[^,]+]] = alloc_stack $M4 -// CHECK: [[GET_M4:%[^,]+]] = function_ref @get_M4 -// CHECK: [[M4:%[^,]+]] = apply [[GET_M4]]() : $@convention(thin) () -> @owned M4 -// CHECK: store [[M4]] to [init] [[STACK]] : $*M4 -// CHECK: [[M4_S2_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s2 -// CHECK: [[M4_S4_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s4 -// CHECK: [[SEE_ADDR_2:%[^,]+]] = function_ref @see_addr_2 -// CHECK: apply [[SEE_ADDR_2]]([[M4_S2_ADDR]], [[M4_S4_ADDR]]) -// CHECK: [[M4_S1_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s1 -// CHECK: [[M4_S1:%[^,]+]] = load [take] [[M4_S1_ADDR]] : $*M -// CHECK: [[M4_S3_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s3 -// CHECK: [[M4_S3:%[^,]+]] = load [take] [[M4_S3_ADDR]] : $*M -// CHECK: [[END_2:%[^,]+]] = function_ref @end_2 -// CHECK: apply [[END_2]]([[M4_S1]], [[M4_S3]]) -// CHECK: [[M4_S4_ADDR_2:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s4 -// CHECK: destroy_addr [[M4_S4_ADDR_2]] -// CHECK: [[M4_S2_ADDR_2:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s2 -// CHECK: destroy_addr [[M4_S2_ADDR_2]] -// CHECK-LABEL: } // end sil function 'rdar110676577' -sil [ossa] @rdar110676577 : $@convention(thin) () -> () { -bb0: - %0 = alloc_stack $M4 - %1 = mark_must_check [consumable_and_assignable] %0 : $*M4 - %3 = function_ref @get_M4 : $@convention(thin) () -> @owned M4 - %4 = apply %3() : $@convention(thin) () -> @owned M4 - store %4 to [init] %1 : $*M4 - %6 = struct_element_addr %1 : $*M4, #M4.s2 - %6a = struct_element_addr %1 : $*M4, #M4.s4 - %see_addr_2 = function_ref @see_addr_2 : $@convention(thin) (@in_guaranteed M, @in_guaranteed M) -> () - apply %see_addr_2(%6, %6a) : $@convention(thin) (@in_guaranteed M, @in_guaranteed M) -> () - %12 = struct_element_addr %1 : $*M4, #M4.s1 - %13 = load [copy] %12 : $*M - %14 = struct_element_addr %1 : $*M4, #M4.s3 - %15 = load [copy] %14 : $*M - %16 = function_ref @end_2 : $@convention(thin) (@owned M, @owned M) -> () - %17 = apply %16(%13, %15) : $@convention(thin) (@owned M, @owned M) -> () - destroy_addr %1 : $*M4 - dealloc_stack %0 : $*M4 - %22 = tuple () - return %22 : $() -} diff --git a/test/SILOptimizer/moveonly_addresschecker_unmaximized.sil b/test/SILOptimizer/moveonly_addresschecker_unmaximized.sil new file mode 100644 index 0000000000000..530828ac39ce0 --- /dev/null +++ b/test/SILOptimizer/moveonly_addresschecker_unmaximized.sil @@ -0,0 +1,67 @@ +// RUN: %target-sil-opt -module-name moveonly_addresschecker -sil-move-only-address-checker -enable-experimental-feature MoveOnlyClasses -enable-sil-verify-all %s -move-only-address-checker-disable-lifetime-extension=true | %FileCheck %s + +@_moveOnly +struct M { + deinit {} +} + +@_moveOnly +struct M4 { + let s1: M + let s2: M + let s3: M + let s4: M +} + +sil @get_M4 : $@convention(thin) () -> @owned M4 +sil @end_2 : $@convention(thin) (@owned M, @owned M) -> () +sil @see_addr_2 : $@convention(thin) (@in_guaranteed M, @in_guaranteed M) -> () + + +/// Two non-contiguous fields (#M4.s2, #M4.s4) are borrowed by @see_addr_2. +/// Two non-contiguous fields (#M4.s1, #M$.s3) are consumed by @end_2. +/// +/// Verify that #M4.s2 and #M4.s4 both survive past the apply of @see_addr_2. +// CHECK-LABEL: sil [ossa] @rdar110676577 : {{.*}} { +// CHECK: [[STACK:%[^,]+]] = alloc_stack $M4 +// CHECK: [[GET_M4:%[^,]+]] = function_ref @get_M4 +// CHECK: [[M4:%[^,]+]] = apply [[GET_M4]]() : $@convention(thin) () -> @owned M4 +// CHECK: store [[M4]] to [init] [[STACK]] : $*M4 +// CHECK: [[M4_S2_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s2 +// CHECK: [[M4_S4_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s4 +// CHECK: [[SEE_ADDR_2:%[^,]+]] = function_ref @see_addr_2 +// CHECK: apply [[SEE_ADDR_2]]([[M4_S2_ADDR]], [[M4_S4_ADDR]]) +// CHECK: [[M4_S4_ADDR_2:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s4 +// CHECK: destroy_addr [[M4_S4_ADDR_2]] +// CHECK: [[M4_S2_ADDR_2:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s2 +// CHECK: destroy_addr [[M4_S2_ADDR_2]] +// CHECK: [[M4_S1_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s1 +// CHECK: [[M4_S1:%[^,]+]] = load [take] [[M4_S1_ADDR]] : $*M +// CHECK: [[M4_S3_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s3 +// CHECK: [[M4_S3:%[^,]+]] = load [take] [[M4_S3_ADDR]] : $*M +// CHECK: [[END_2:%[^,]+]] = function_ref @end_2 +// CHECK: apply [[END_2]]([[M4_S1]], [[M4_S3]]) +// CHECK-LABEL: } // end sil function 'rdar110676577' +sil [ossa] @rdar110676577 : $@convention(thin) () -> () { +bb0: + %0 = alloc_stack $M4 + %1 = mark_must_check [consumable_and_assignable] %0 : $*M4 + %3 = function_ref @get_M4 : $@convention(thin) () -> @owned M4 + %4 = apply %3() : $@convention(thin) () -> @owned M4 + store %4 to [init] %1 : $*M4 + %6 = struct_element_addr %1 : $*M4, #M4.s2 + %6a = struct_element_addr %1 : $*M4, #M4.s4 + %see_addr_2 = function_ref @see_addr_2 : $@convention(thin) (@in_guaranteed M, @in_guaranteed M) -> () + apply %see_addr_2(%6, %6a) : $@convention(thin) (@in_guaranteed M, @in_guaranteed M) -> () + %12 = struct_element_addr %1 : $*M4, #M4.s1 + %13 = load [copy] %12 : $*M + %14 = struct_element_addr %1 : $*M4, #M4.s3 + %15 = load [copy] %14 : $*M + %16 = function_ref @end_2 : $@convention(thin) (@owned M, @owned M) -> () + %17 = apply %16(%13, %15) : $@convention(thin) (@owned M, @owned M) -> () + destroy_addr %1 : $*M4 + dealloc_stack %0 : $*M4 + %22 = tuple () + return %22 : $() +} +