@@ -5099,6 +5099,147 @@ RValue CodeGenFunction::EmitBuiltinExpr(const GlobalDecl GD, unsigned BuiltinID,
5099
5099
ReturnValueSlot(), Args);
5100
5100
}
5101
5101
5102
+ case Builtin::BI__atomic_test_and_set: {
5103
+ // Look at the argument type to determine whether this is a volatile
5104
+ // operation. The parameter type is always volatile.
5105
+ QualType PtrTy = E->getArg(0)->IgnoreImpCasts()->getType();
5106
+ bool Volatile =
5107
+ PtrTy->castAs<PointerType>()->getPointeeType().isVolatileQualified();
5108
+
5109
+ Address Ptr =
5110
+ EmitPointerWithAlignment(E->getArg(0)).withElementType(Int8Ty);
5111
+
5112
+ Value *NewVal = Builder.getInt8(1);
5113
+ Value *Order = EmitScalarExpr(E->getArg(1));
5114
+ if (isa<llvm::ConstantInt>(Order)) {
5115
+ int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
5116
+ AtomicRMWInst *Result = nullptr;
5117
+ switch (ord) {
5118
+ case 0: // memory_order_relaxed
5119
+ default: // invalid order
5120
+ Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg, Ptr, NewVal,
5121
+ llvm::AtomicOrdering::Monotonic);
5122
+ break;
5123
+ case 1: // memory_order_consume
5124
+ case 2: // memory_order_acquire
5125
+ Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg, Ptr, NewVal,
5126
+ llvm::AtomicOrdering::Acquire);
5127
+ break;
5128
+ case 3: // memory_order_release
5129
+ Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg, Ptr, NewVal,
5130
+ llvm::AtomicOrdering::Release);
5131
+ break;
5132
+ case 4: // memory_order_acq_rel
5133
+
5134
+ Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg, Ptr, NewVal,
5135
+ llvm::AtomicOrdering::AcquireRelease);
5136
+ break;
5137
+ case 5: // memory_order_seq_cst
5138
+ Result = Builder.CreateAtomicRMW(
5139
+ llvm::AtomicRMWInst::Xchg, Ptr, NewVal,
5140
+ llvm::AtomicOrdering::SequentiallyConsistent);
5141
+ break;
5142
+ }
5143
+ Result->setVolatile(Volatile);
5144
+ return RValue::get(Builder.CreateIsNotNull(Result, "tobool"));
5145
+ }
5146
+
5147
+ llvm::BasicBlock *ContBB = createBasicBlock("atomic.continue", CurFn);
5148
+
5149
+ llvm::BasicBlock *BBs[5] = {
5150
+ createBasicBlock("monotonic", CurFn),
5151
+ createBasicBlock("acquire", CurFn),
5152
+ createBasicBlock("release", CurFn),
5153
+ createBasicBlock("acqrel", CurFn),
5154
+ createBasicBlock("seqcst", CurFn)
5155
+ };
5156
+ llvm::AtomicOrdering Orders[5] = {
5157
+ llvm::AtomicOrdering::Monotonic, llvm::AtomicOrdering::Acquire,
5158
+ llvm::AtomicOrdering::Release, llvm::AtomicOrdering::AcquireRelease,
5159
+ llvm::AtomicOrdering::SequentiallyConsistent};
5160
+
5161
+ Order = Builder.CreateIntCast(Order, Builder.getInt32Ty(), false);
5162
+ llvm::SwitchInst *SI = Builder.CreateSwitch(Order, BBs[0]);
5163
+
5164
+ Builder.SetInsertPoint(ContBB);
5165
+ PHINode *Result = Builder.CreatePHI(Int8Ty, 5, "was_set");
5166
+
5167
+ for (unsigned i = 0; i < 5; ++i) {
5168
+ Builder.SetInsertPoint(BBs[i]);
5169
+ AtomicRMWInst *RMW = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
5170
+ Ptr, NewVal, Orders[i]);
5171
+ RMW->setVolatile(Volatile);
5172
+ Result->addIncoming(RMW, BBs[i]);
5173
+ Builder.CreateBr(ContBB);
5174
+ }
5175
+
5176
+ SI->addCase(Builder.getInt32(0), BBs[0]);
5177
+ SI->addCase(Builder.getInt32(1), BBs[1]);
5178
+ SI->addCase(Builder.getInt32(2), BBs[1]);
5179
+ SI->addCase(Builder.getInt32(3), BBs[2]);
5180
+ SI->addCase(Builder.getInt32(4), BBs[3]);
5181
+ SI->addCase(Builder.getInt32(5), BBs[4]);
5182
+
5183
+ Builder.SetInsertPoint(ContBB);
5184
+ return RValue::get(Builder.CreateIsNotNull(Result, "tobool"));
5185
+ }
5186
+
5187
+ case Builtin::BI__atomic_clear: {
5188
+ QualType PtrTy = E->getArg(0)->IgnoreImpCasts()->getType();
5189
+ bool Volatile =
5190
+ PtrTy->castAs<PointerType>()->getPointeeType().isVolatileQualified();
5191
+
5192
+ Address Ptr = EmitPointerWithAlignment(E->getArg(0));
5193
+ Ptr = Ptr.withElementType(Int8Ty);
5194
+ Value *NewVal = Builder.getInt8(0);
5195
+ Value *Order = EmitScalarExpr(E->getArg(1));
5196
+ if (isa<llvm::ConstantInt>(Order)) {
5197
+ int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
5198
+ StoreInst *Store = Builder.CreateStore(NewVal, Ptr, Volatile);
5199
+ switch (ord) {
5200
+ case 0: // memory_order_relaxed
5201
+ default: // invalid order
5202
+ Store->setOrdering(llvm::AtomicOrdering::Monotonic);
5203
+ break;
5204
+ case 3: // memory_order_release
5205
+ Store->setOrdering(llvm::AtomicOrdering::Release);
5206
+ break;
5207
+ case 5: // memory_order_seq_cst
5208
+ Store->setOrdering(llvm::AtomicOrdering::SequentiallyConsistent);
5209
+ break;
5210
+ }
5211
+ return RValue::get(nullptr);
5212
+ }
5213
+
5214
+ llvm::BasicBlock *ContBB = createBasicBlock("atomic.continue", CurFn);
5215
+
5216
+ llvm::BasicBlock *BBs[3] = {
5217
+ createBasicBlock("monotonic", CurFn),
5218
+ createBasicBlock("release", CurFn),
5219
+ createBasicBlock("seqcst", CurFn)
5220
+ };
5221
+ llvm::AtomicOrdering Orders[3] = {
5222
+ llvm::AtomicOrdering::Monotonic, llvm::AtomicOrdering::Release,
5223
+ llvm::AtomicOrdering::SequentiallyConsistent};
5224
+
5225
+ Order = Builder.CreateIntCast(Order, Builder.getInt32Ty(), false);
5226
+ llvm::SwitchInst *SI = Builder.CreateSwitch(Order, BBs[0]);
5227
+
5228
+ for (unsigned i = 0; i < 3; ++i) {
5229
+ Builder.SetInsertPoint(BBs[i]);
5230
+ StoreInst *Store = Builder.CreateStore(NewVal, Ptr, Volatile);
5231
+ Store->setOrdering(Orders[i]);
5232
+ Builder.CreateBr(ContBB);
5233
+ }
5234
+
5235
+ SI->addCase(Builder.getInt32(0), BBs[0]);
5236
+ SI->addCase(Builder.getInt32(3), BBs[1]);
5237
+ SI->addCase(Builder.getInt32(5), BBs[2]);
5238
+
5239
+ Builder.SetInsertPoint(ContBB);
5240
+ return RValue::get(nullptr);
5241
+ }
5242
+
5102
5243
case Builtin::BI__atomic_thread_fence:
5103
5244
case Builtin::BI__atomic_signal_fence:
5104
5245
case Builtin::BI__c11_atomic_thread_fence:
0 commit comments