; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=aarch64--linux-gnu -mattr=+sve < %s | FileCheck %s ; Ensure we use the CC result of SVE compare instructions when branching. define void @sve_cmplt_setcc(* %out, %in, %pg) { ; CHECK-LABEL: sve_cmplt_setcc: ; CHECK: // %bb.0: // %entry ; CHECK-NEXT: cmplt p1.h, p0/z, z0.h, #0 ; CHECK-NEXT: b.eq .LBB0_2 ; CHECK-NEXT: // %bb.1: // %if.then ; CHECK-NEXT: st1h { z0.h }, p0, [x0] ; CHECK-NEXT: .LBB0_2: // %if.end ; CHECK-NEXT: ret entry: %0 = tail call @llvm.aarch64.sve.cmplt.wide.nxv8i16( %pg, %in, zeroinitializer) %1 = tail call i1 @llvm.aarch64.sve.ptest.any.nxv8i1( %pg, %0) br i1 %1, label %if.then, label %if.end if.then: tail call void @llvm.masked.store.nxv8i16.p0( %in, * %out, i32 2, %pg) br label %if.end if.end: ret void } ; Ensure we use the inverted CC result of SVE compare instructions when branching. define void @sve_cmplt_setcc_inverted(* %out, %in, %pg) { ; CHECK-LABEL: sve_cmplt_setcc_inverted: ; CHECK: // %bb.0: // %entry ; CHECK-NEXT: cmplt p1.h, p0/z, z0.h, #0 ; CHECK-NEXT: b.ne .LBB1_2 ; CHECK-NEXT: // %bb.1: // %if.then ; CHECK-NEXT: st1h { z0.h }, p0, [x0] ; CHECK-NEXT: .LBB1_2: // %if.end ; CHECK-NEXT: ret entry: %0 = tail call @llvm.aarch64.sve.cmplt.wide.nxv8i16( %pg, %in, zeroinitializer) %1 = tail call i1 @llvm.aarch64.sve.ptest.any.nxv8i1( %pg, %0) br i1 %1, label %if.end, label %if.then if.then: tail call void @llvm.masked.store.nxv8i16.p0( %in, * %out, i32 2, %pg) br label %if.end if.end: ret void } ; Ensure we combine setcc and csel so as to not end up with an extra compare define void @sve_cmplt_setcc_hslo(* %out, %in, %pg) { ; CHECK-LABEL: sve_cmplt_setcc_hslo: ; CHECK: // %bb.0: // %entry ; CHECK-NEXT: ptrue p1.h ; CHECK-NEXT: cmplt p2.h, p0/z, z0.h, #0 ; CHECK-NEXT: and p1.b, p0/z, p0.b, p1.b ; CHECK-NEXT: ptest p1, p2.b ; CHECK-NEXT: b.hs .LBB2_2 ; CHECK-NEXT: // %bb.1: // %if.then ; CHECK-NEXT: st1h { z0.h }, p0, [x0] ; CHECK-NEXT: .LBB2_2: // %if.end ; CHECK-NEXT: ret entry: %0 = tail call @llvm.aarch64.sve.cmplt.wide.nxv8i16( %pg, %in, zeroinitializer) %1 = tail call i1 @llvm.aarch64.sve.ptest.last.nxv8i1( %pg, %0) br i1 %1, label %if.then, label %if.end if.then: tail call void @llvm.masked.store.nxv8i16.p0( %in, * %out, i32 2, %pg) br label %if.end if.end: ret void } ; Fold away the redundant setcc:: ; setcc(ne, , sext(nxvNi1 ...), splat(0)) ; -> nxvNi1 ... define @sve_cmpne_setcc_all_true_sext( %vec, %pg) { ; CHECK-LABEL: sve_cmpne_setcc_all_true_sext: ; CHECK: // %bb.0: ; CHECK-NEXT: ret %alltrue.ins = insertelement poison, i1 true, i32 0 %alltrue = shufflevector %alltrue.ins, poison, zeroinitializer %pg.sext = sext %pg to %cmp2 = call @llvm.aarch64.sve.cmpne.nxv16i8( %alltrue, %pg.sext, zeroinitializer) ret %cmp2 } ; Fold away the redundant setcc:: ; setcc(ne, pred, sext(setcc(ne, pred, ..., splat(0))), splat(0)) ; -> setcc(ne, pred, ..., splat(0)) define @sve_cmpne_setcc_equal_pred( %vec, %pg) { ; CHECK-LABEL: sve_cmpne_setcc_equal_pred: ; CHECK: // %bb.0: ; CHECK-NEXT: cmpne p0.b, p0/z, z0.b, #0 ; CHECK-NEXT: ret %cmp1 = call @llvm.aarch64.sve.cmpne.nxv16i8( %pg, %vec, zeroinitializer) %cmp1.sext = sext %cmp1 to %cmp2 = call @llvm.aarch64.sve.cmpne.nxv16i8( %pg, %cmp1.sext, zeroinitializer) ret %cmp2 } ; Combine: ; setcc(ne, pred1, sext(setcc(ne, pred2, ..., splat(0))), splat(0)) ; -> setcc(ne, and(pred1, pred2), ..., splat(0)) define @sve_cmpne_setcc_different_pred( %vec, %pg1, %pg2) { ; CHECK-LABEL: sve_cmpne_setcc_different_pred: ; CHECK: // %bb.0: ; CHECK-NEXT: cmpne p0.b, p0/z, z0.b, #0 ; CHECK-NEXT: and p0.b, p0/z, p0.b, p1.b ; CHECK-NEXT: ret %cmp1 = call @llvm.aarch64.sve.cmpne.nxv16i8( %pg1, %vec, zeroinitializer) %cmp1.sext = sext %cmp1 to %cmp2 = call @llvm.aarch64.sve.cmpne.nxv16i8( %pg2, %cmp1.sext, zeroinitializer) ret %cmp2 } declare @llvm.aarch64.sve.cmpne.nxv16i8(, , ) declare i1 @llvm.aarch64.sve.ptest.any.nxv8i1(, ) declare i1 @llvm.aarch64.sve.ptest.last.nxv8i1(, ) declare @llvm.aarch64.sve.cmplt.wide.nxv8i16(, , ) declare void @llvm.masked.store.nxv8i16.p0(, *, i32, )