Skip to content

[MoveOnlyAddressChecker] Fix representation for initialized fields. #66955

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
Jun 28, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions include/swift/SIL/FieldSensitivePrunedLiveness.h
Original file line number Diff line number Diff line change
Expand Up @@ -321,6 +321,10 @@ struct TypeTreeLeafTypeRange {
SmallVectorImpl<std::pair<SILValue, TypeTreeLeafTypeRange>>
&resultingProjections);

static void visitContiguousRanges(
SmallBitVector const &bits,
llvm::function_ref<void(TypeTreeLeafTypeRange)> callback);

bool operator==(const TypeTreeLeafTypeRange &other) const {
return startEltOffset == other.startEltOffset &&
endEltOffset == other.endEltOffset;
Expand Down Expand Up @@ -1217,6 +1221,11 @@ class FieldSensitiveMultiDefPrunedLiveRange
defBlocks.setFrozen();
}

void initializeDef(SILInstruction *def, SmallBitVector const &bits) {
TypeTreeLeafTypeRange::visitContiguousRanges(
bits, [&](auto range) { initializeDef(def, range); });
}

void initializeDef(SILValue def, TypeTreeLeafTypeRange span) {
assert(Super::isInitialized());
defs.insert(def, span);
Expand Down
23 changes: 23 additions & 0 deletions lib/SIL/Utils/FieldSensitivePrunedLiveness.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -474,6 +474,29 @@ void TypeTreeLeafTypeRange::constructProjectionsForNeededElements(
}
}

void TypeTreeLeafTypeRange::visitContiguousRanges(
SmallBitVector const &bits,
llvm::function_ref<void(TypeTreeLeafTypeRange)> callback) {
if (bits.size() == 0)
return;

llvm::Optional<unsigned> current = llvm::None;
for (unsigned bit = 0, size = bits.size(); bit < size; ++bit) {
auto isSet = bits.test(bit);
if (current) {
if (!isSet) {
callback(TypeTreeLeafTypeRange(*current, bit));
current = llvm::None;
}
} else if (isSet) {
current = bit;
}
}
if (current) {
callback(TypeTreeLeafTypeRange(*current, bits.size()));
}
}

//===----------------------------------------------------------------------===//
// MARK: FieldSensitivePrunedLiveBlocks
//===----------------------------------------------------------------------===//
Expand Down
86 changes: 55 additions & 31 deletions lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -563,6 +563,9 @@ namespace {
struct UseState {
MarkMustCheckInst *address;

using InstToBitMap =
llvm::SmallMapVector<SILInstruction *, SmallBitVector, 4>;

llvm::Optional<unsigned> cachedNumSubelements;

/// The blocks that consume fields of the value.
Expand All @@ -576,7 +579,7 @@ struct UseState {

/// A map from a liveness requiring use to the part of the type that it
/// requires liveness for.
llvm::SmallMapVector<SILInstruction *, SmallBitVector, 4> livenessUses;
InstToBitMap livenessUses;

/// A map from a load [copy] or load [take] that we determined must be
/// converted to a load_borrow to the part of the type tree that it needs to
Expand Down Expand Up @@ -622,11 +625,11 @@ struct UseState {

/// A map from an instruction that initializes memory to the description of
/// the part of the type tree that it initializes.
llvm::SmallMapVector<SILInstruction *, TypeTreeLeafTypeRange, 4> initInsts;
InstToBitMap initInsts;

/// memInstMustReinitialize insts. Contains both insts like copy_addr/store
/// [assign] that are reinits that we will convert to inits and true reinits.
llvm::SmallMapVector<SILInstruction *, TypeTreeLeafTypeRange, 4> reinitInsts;
InstToBitMap reinitInsts;

/// The set of drop_deinits of this mark_must_check
SmallSetVector<SILInstruction *, 2> dropDeinitInsts;
Expand All @@ -653,24 +656,39 @@ struct UseState {
return *cachedNumSubelements;
}

SmallBitVector &getOrCreateLivenessUse(SILInstruction *inst) {
auto iter = livenessUses.find(inst);
if (iter == livenessUses.end()) {
iter = livenessUses.insert({inst, SmallBitVector(getNumSubelements())})
.first;
SmallBitVector &getOrCreateAffectedBits(SILInstruction *inst,
InstToBitMap &map) {
auto iter = map.find(inst);
if (iter == map.end()) {
iter = map.insert({inst, SmallBitVector(getNumSubelements())}).first;
}
return iter->second;
}

void setAffectedBits(SILInstruction *inst, SmallBitVector const &bits,
InstToBitMap &map) {
getOrCreateAffectedBits(inst, map) |= bits;
}

void setAffectedBits(SILInstruction *inst, TypeTreeLeafTypeRange range,
InstToBitMap &map) {
range.setBits(getOrCreateAffectedBits(inst, map));
}

void recordLivenessUse(SILInstruction *inst, SmallBitVector const &bits) {
getOrCreateLivenessUse(inst) |= bits;
setAffectedBits(inst, bits, livenessUses);
}

void recordLivenessUse(SILInstruction *inst, TypeTreeLeafTypeRange range) {
auto &bits = getOrCreateLivenessUse(inst);
for (auto element : range.getRange()) {
bits.set(element);
}
setAffectedBits(inst, range, livenessUses);
}

void recordReinitUse(SILInstruction *inst, TypeTreeLeafTypeRange range) {
setAffectedBits(inst, range, reinitInsts);
}

void recordInitUse(SILInstruction *inst, TypeTreeLeafTypeRange range) {
setAffectedBits(inst, range, initInsts);
}

/// Returns true if this is a terminator instruction that although it doesn't
Expand Down Expand Up @@ -765,14 +783,24 @@ struct UseState {
}
}

void recordConsumingBlock(SILBasicBlock *block, TypeTreeLeafTypeRange range) {
SmallBitVector &getOrCreateConsumingBlock(SILBasicBlock *block) {
auto iter = consumingBlocks.find(block);
if (iter == consumingBlocks.end()) {
iter =
consumingBlocks.insert({block, SmallBitVector(getNumSubelements())})
.first;
}
range.setBits(iter->second);
return iter->second;
}

void recordConsumingBlock(SILBasicBlock *block, TypeTreeLeafTypeRange range) {
auto &consumingBits = getOrCreateConsumingBlock(block);
range.setBits(consumingBits);
}

void recordConsumingBlock(SILBasicBlock *block, SmallBitVector &bits) {
auto &consumingBits = getOrCreateConsumingBlock(block);
consumingBits |= bits;
}

void
Expand Down Expand Up @@ -839,7 +867,7 @@ struct UseState {
if (!isReinitToInitConvertibleInst(inst)) {
auto iter = reinitInsts.find(inst);
if (iter != reinitInsts.end()) {
if (span.setIntersection(iter->second))
if (span.intersects(iter->second))
return true;
}
}
Expand All @@ -857,14 +885,14 @@ struct UseState {
{
auto iter = initInsts.find(inst);
if (iter != initInsts.end()) {
if (span.setIntersection(iter->second))
if (span.intersects(iter->second))
return true;
}
}
if (isReinitToInitConvertibleInst(inst)) {
auto iter = reinitInsts.find(inst);
if (iter != reinitInsts.end()) {
if (span.setIntersection(iter->second))
if (span.intersects(iter->second))
return true;
}
}
Expand Down Expand Up @@ -1002,7 +1030,7 @@ void UseState::initializeLiveness(
llvm::dbgs()
<< "Found in/in_guaranteed/inout/inout_aliasable argument as "
"an init... adding mark_must_check as init!\n");
initInsts.insert({address, liveness.getTopLevelSpan()});
recordInitUse(address, liveness.getTopLevelSpan());
liveness.initializeDef(address, liveness.getTopLevelSpan());
break;
case swift::SILArgumentConvention::Indirect_Out:
Expand Down Expand Up @@ -1033,14 +1061,14 @@ void UseState::initializeLiveness(
// later invariants that we assert upon remain true.
LLVM_DEBUG(llvm::dbgs() << "Found move only arg closure box use... "
"adding mark_must_check as init!\n");
initInsts.insert({address, liveness.getTopLevelSpan()});
recordInitUse(address, liveness.getTopLevelSpan());
liveness.initializeDef(address, liveness.getTopLevelSpan());
}
} else if (auto *box = dyn_cast<AllocBoxInst>(
lookThroughOwnershipInsts(projectBox->getOperand()))) {
LLVM_DEBUG(llvm::dbgs() << "Found move only var allocbox use... "
"adding mark_must_check as init!\n");
initInsts.insert({address, liveness.getTopLevelSpan()});
recordInitUse(address, liveness.getTopLevelSpan());
liveness.initializeDef(address, liveness.getTopLevelSpan());
}
}
Expand All @@ -1051,7 +1079,7 @@ void UseState::initializeLiveness(
stripAccessMarkers(address->getOperand()))) {
LLVM_DEBUG(llvm::dbgs() << "Found ref_element_addr use... "
"adding mark_must_check as init!\n");
initInsts.insert({address, liveness.getTopLevelSpan()});
recordInitUse(address, liveness.getTopLevelSpan());
liveness.initializeDef(address, liveness.getTopLevelSpan());
}

Expand All @@ -1061,7 +1089,7 @@ void UseState::initializeLiveness(
dyn_cast<GlobalAddrInst>(stripAccessMarkers(address->getOperand()))) {
LLVM_DEBUG(llvm::dbgs() << "Found global_addr use... "
"adding mark_must_check as init!\n");
initInsts.insert({address, liveness.getTopLevelSpan()});
recordInitUse(address, liveness.getTopLevelSpan());
liveness.initializeDef(address, liveness.getTopLevelSpan());
}

Expand Down Expand Up @@ -1734,18 +1762,16 @@ bool GatherUsesVisitor::visitUse(Operand *op) {
if (!leafRange)
return false;

assert(!useState.initInsts.count(user));
useState.initInsts.insert({user, *leafRange});
useState.recordInitUse(user, *leafRange);
return true;
}

if (::memInstMustReinitialize(op)) {
LLVM_DEBUG(llvm::dbgs() << "Found reinit: " << *user);
assert(!useState.reinitInsts.count(user));
auto leafRange = TypeTreeLeafTypeRange::get(op->get(), getRootAddress());
if (!leafRange)
return false;
useState.reinitInsts.insert({user, *leafRange});
useState.recordReinitUse(user, *leafRange);
return true;
}

Expand Down Expand Up @@ -2730,9 +2756,7 @@ void MoveOnlyAddressCheckerPImpl::rewriteUses(
for (auto reinitPair : addressUseState.reinitInsts) {
if (!isReinitToInitConvertibleInst(reinitPair.first))
continue;
SmallBitVector bits(liveness.getNumSubElements());
reinitPair.second.setBits(bits);
if (!consumes.claimConsume(reinitPair.first, bits))
if (!consumes.claimConsume(reinitPair.first, reinitPair.second))
convertMemoryReinitToInitForm(reinitPair.first, debugVar);
}

Expand Down Expand Up @@ -2923,7 +2947,7 @@ void ExtendUnconsumedLiveness::run() {
}
}
for (auto pair : addressUseState.reinitInsts) {
if (pair.second.contains(element)) {
if (pair.second.test(element)) {
destroys[pair.first] = DestroyKind::Reinit;
}
}
Expand Down
79 changes: 78 additions & 1 deletion test/SILOptimizer/moveonly_addresschecker_unmaximized.sil
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@ struct M4 {
sil @get_M4 : $@convention(thin) () -> @owned M4
sil @end_2 : $@convention(thin) (@owned M, @owned M) -> ()
sil @see_addr_2 : $@convention(thin) (@in_guaranteed M, @in_guaranteed M) -> ()

sil @replace_2 : $@convention(thin) (@inout M, @inout M) -> ()
sil @get_out_2 : $@convention(thin) () -> (@out M, @out M)

/// Two non-contiguous fields (#M4.s2, #M4.s4) are borrowed by @see_addr_2.
/// Two non-contiguous fields (#M4.s1, #M$.s3) are consumed by @end_2.
Expand Down Expand Up @@ -65,3 +66,79 @@ bb0:
return %22 : $()
}

// CHECK-LABEL: sil [ossa] @rdar111356251 : $@convention(thin) () -> () {
// CHECK: [[STACK:%[^,]+]] = alloc_stack $M4
// CHECK: [[GET_M4:%[^,]+]] = function_ref @get_M4 : $@convention(thin) () -> @owned M4
// CHECK: [[INSTANCE:%[^,]+]] = apply [[GET_M4]]() : $@convention(thin) () -> @owned M4
// CHECK: store [[INSTANCE]] to [init] [[STACK]] : $*M4
// CHECK: [[S2_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s2
// CHECK: [[S4_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s4
// CHECK: [[REPLACE_2:%[^,]+]] = function_ref @replace_2 : $@convention(thin) (@inout M, @inout M) -> ()
// CHECK: apply [[REPLACE_2]]([[S2_ADDR]], [[S4_ADDR]]) : $@convention(thin) (@inout M, @inout M) -> ()
// CHECK: [[S4_ADDR_2:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s4
// CHECK: destroy_addr [[S4_ADDR_2]] : $*M
// CHECK: [[S2_ADDR_2:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s2
// CHECK: destroy_addr [[S2_ADDR_2]] : $*M
// CHECK: [[S1_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s1
// CHECK: [[S1:%[^,]+]] = load [take] [[S1_ADDR]] : $*M
// CHECK: [[S3_ADDR:%[^,]+]] = struct_element_addr [[STACK]] : $*M4, #M4.s3
// CHECK: [[S3:%[^,]+]] = load [take] [[S3_ADDR]] : $*M
// CHECK: [[END_2:%[^,]+]] = function_ref @end_2 : $@convention(thin) (@owned M, @owned M) -> ()
// CHECK: apply [[END_2]]([[S1]], [[S3]]) : $@convention(thin) (@owned M, @owned M) -> ()
// CHECK-LABEL: } // end sil function 'rdar111356251'
sil [ossa] @rdar111356251 : $@convention(thin) () -> () {
bb0:
%stack_addr = alloc_stack $M4
%stack = mark_must_check [consumable_and_assignable] %stack_addr : $*M4
%get_M4 = function_ref @get_M4 : $@convention(thin) () -> @owned M4
%instance = apply %get_M4() : $@convention(thin) () -> @owned M4
store %instance to [init] %stack : $*M4
%s2_addr = struct_element_addr %stack : $*M4, #M4.s2
%s4_addr = struct_element_addr %stack : $*M4, #M4.s4
%replace_2 = function_ref @replace_2 : $@convention(thin) (@inout M, @inout M) -> ()
apply %replace_2(%s2_addr, %s4_addr) : $@convention(thin) (@inout M, @inout M) -> ()
%12 = struct_element_addr %stack : $*M4, #M4.s1
%13 = load [copy] %12 : $*M
%14 = struct_element_addr %stack : $*M4, #M4.s3
%15 = load [copy] %14 : $*M
%16 = function_ref @end_2 : $@convention(thin) (@owned M, @owned M) -> ()
%17 = apply %16(%13, %15) : $@convention(thin) (@owned M, @owned M) -> ()
destroy_addr %stack : $*M4
dealloc_stack %stack_addr : $*M4
%22 = tuple ()
return %22 : $()
}

// Verify that M4.s4 is not leaked after it is set.
// CHECK-LABEL: sil [ossa] @rdar111391893 : $@convention(thin) () -> () {
// CHECK: [[GET_OUT_2:%[^,]+]] = function_ref @get_out_2
// CHECK: [[STACK:%[^,]+]] = alloc_stack
// CHECK: [[S2_ADDR_1:%[^,]+]] = struct_element_addr [[STACK]]
// CHECK: [[S4_ADDR_1:%[^,]+]] = struct_element_addr [[STACK]]
// CHECK: apply [[GET_OUT_2]]([[S2_ADDR_1]], [[S4_ADDR_1]])
// CHECK: [[S2_ADDR_2:%[^,]+]] = struct_element_addr [[STACK]]
// CHECK: [[S4_ADDR_4:%[^,]+]] = struct_element_addr [[STACK]]
// CHECK: destroy_addr [[S2_ADDR_2]]
// CHECK: destroy_addr [[S4_ADDR_4]]
// CHECK-LABEL: } // end sil function 'rdar111391893'
sil [ossa] @rdar111391893 : $@convention(thin) () -> () {
%get_out_2 = function_ref @get_out_2 : $@convention(thin) () -> (@out M, @out M)
%end_2 = function_ref @end_2 : $@convention(thin) (@owned M, @owned M) -> ()
%stack_addr = alloc_stack $M4
%stack = mark_must_check [consumable_and_assignable] %stack_addr : $*M4
%s2_addr = struct_element_addr %stack : $*M4, #M4.s2
%s4_addr = struct_element_addr %stack : $*M4, #M4.s4
apply %get_out_2(%s2_addr, %s4_addr) : $@convention(thin) () -> (@out M, @out M)
%s1_addr = struct_element_addr %stack : $*M4, #M4.s1
%s3_addr = struct_element_addr %stack : $*M4, #M4.s3
apply %get_out_2(%s1_addr, %s3_addr) : $@convention(thin) () -> (@out M, @out M)
%12 = struct_element_addr %stack : $*M4, #M4.s1
%13 = load [copy] %12 : $*M
%14 = struct_element_addr %stack : $*M4, #M4.s3
%15 = load [copy] %14 : $*M
%17 = apply %end_2(%13, %15) : $@convention(thin) (@owned M, @owned M) -> ()
destroy_addr %stack : $*M4
dealloc_stack %stack_addr : $*M4
%22 = tuple ()
return %22 : $()
}