@@ -737,6 +737,13 @@ static void emitAtomicOp(CIRGenFunction &CGF, AtomicExpr *E, Address Dest,
737
737
fetchAttr = cir::AtomicFetchKindAttr::get (builder.getContext (),
738
738
cir::AtomicFetchKind::Nand);
739
739
break ;
740
+ case AtomicExpr::AO__atomic_test_and_set: {
741
+ llvm_unreachable (" NYI" );
742
+ }
743
+
744
+ case AtomicExpr::AO__atomic_clear: {
745
+ llvm_unreachable (" NYI" );
746
+ }
740
747
}
741
748
742
749
assert (Op.size () && " expected operation name to build" );
@@ -854,6 +861,8 @@ RValue CIRGenFunction::emitAtomicExpr(AtomicExpr *E) {
854
861
case AtomicExpr::AO__c11_atomic_load:
855
862
case AtomicExpr::AO__opencl_atomic_load:
856
863
case AtomicExpr::AO__hip_atomic_load:
864
+ case AtomicExpr::AO__atomic_test_and_set:
865
+ case AtomicExpr::AO__atomic_clear:
857
866
break ;
858
867
859
868
case AtomicExpr::AO__atomic_load:
@@ -1144,6 +1153,8 @@ RValue CIRGenFunction::emitAtomicExpr(AtomicExpr *E) {
1144
1153
case AtomicExpr::AO__opencl_atomic_fetch_max:
1145
1154
case AtomicExpr::AO__scoped_atomic_fetch_max:
1146
1155
case AtomicExpr::AO__scoped_atomic_max_fetch:
1156
+ case AtomicExpr::AO__atomic_test_and_set:
1157
+ case AtomicExpr::AO__atomic_clear:
1147
1158
llvm_unreachable (" Integral atomic operations always become atomicrmw!" );
1148
1159
}
1149
1160
@@ -1175,22 +1186,21 @@ RValue CIRGenFunction::emitAtomicExpr(AtomicExpr *E) {
1175
1186
llvm_unreachable (" NYI" );
1176
1187
}
1177
1188
1178
- [[maybe_unused]] bool IsStore =
1179
- E->getOp () == AtomicExpr::AO__c11_atomic_store ||
1180
- E->getOp () == AtomicExpr::AO__opencl_atomic_store ||
1181
- E->getOp () == AtomicExpr::AO__hip_atomic_store ||
1182
- E->getOp () == AtomicExpr::AO__atomic_store ||
1183
- E->getOp () == AtomicExpr::AO__atomic_store_n ||
1184
- E->getOp () == AtomicExpr::AO__scoped_atomic_store ||
1185
- E->getOp () == AtomicExpr::AO__scoped_atomic_store_n;
1186
- [[maybe_unused]] bool IsLoad =
1187
- E->getOp () == AtomicExpr::AO__c11_atomic_load ||
1188
- E->getOp () == AtomicExpr::AO__opencl_atomic_load ||
1189
- E->getOp () == AtomicExpr::AO__hip_atomic_load ||
1190
- E->getOp () == AtomicExpr::AO__atomic_load ||
1191
- E->getOp () == AtomicExpr::AO__atomic_load_n ||
1192
- E->getOp () == AtomicExpr::AO__scoped_atomic_load ||
1193
- E->getOp () == AtomicExpr::AO__scoped_atomic_load_n;
1189
+ bool IsStore = E->getOp () == AtomicExpr::AO__c11_atomic_store ||
1190
+ E->getOp () == AtomicExpr::AO__opencl_atomic_store ||
1191
+ E->getOp () == AtomicExpr::AO__hip_atomic_store ||
1192
+ E->getOp () == AtomicExpr::AO__atomic_store ||
1193
+ E->getOp () == AtomicExpr::AO__atomic_store_n ||
1194
+ E->getOp () == AtomicExpr::AO__scoped_atomic_store ||
1195
+ E->getOp () == AtomicExpr::AO__scoped_atomic_store_n ||
1196
+ E->getOp () == AtomicExpr::AO__atomic_clear;
1197
+ bool IsLoad = E->getOp () == AtomicExpr::AO__c11_atomic_load ||
1198
+ E->getOp () == AtomicExpr::AO__opencl_atomic_load ||
1199
+ E->getOp () == AtomicExpr::AO__hip_atomic_load ||
1200
+ E->getOp () == AtomicExpr::AO__atomic_load ||
1201
+ E->getOp () == AtomicExpr::AO__atomic_load_n ||
1202
+ E->getOp () == AtomicExpr::AO__scoped_atomic_load ||
1203
+ E->getOp () == AtomicExpr::AO__scoped_atomic_load_n;
1194
1204
1195
1205
if (auto ordAttr = getConstOpIntAttr (Order)) {
1196
1206
// We should not ever get to a case where the ordering isn't a valid CABI
0 commit comments