; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=aarch64-linux-gnu -mattr=+sve2 < %s | FileCheck %s define @facgt_fun( %a, %b, %c) { ; CHECK-LABEL: facgt_fun: ; CHECK: // %bb.0: // %entry ; CHECK-NEXT: facgt p0.d, p0/z, z0.d, z1.d ; CHECK-NEXT: ret entry: %0 = tail call @llvm.aarch64.sve.facgt.nxv2f64( %a, %b, %c) %1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %0) ret %1 } define @facge_fun( %a, %b, %c) { ; CHECK-LABEL: facge_fun: ; CHECK: // %bb.0: // %entry ; CHECK-NEXT: facge p0.d, p0/z, z0.d, z1.d ; CHECK-NEXT: ret entry: %0 = tail call @llvm.aarch64.sve.facge.nxv2f64( %a, %b, %c) %1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %0) ret %1 } define @whilege_fun(i32 %a, i32 %b) { ; CHECK-LABEL: whilege_fun: ; CHECK: // %bb.0: // %entry ; CHECK-NEXT: whilege p0.d, w0, w1 ; CHECK-NEXT: ret entry: %0 = call @llvm.aarch64.sve.whilege.nxv2i1.i32(i32 %a, i32 %b) %1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %0) ret %1 } define @whilegt_fun(i32 %a, i32 %b) { ; CHECK-LABEL: whilegt_fun: ; CHECK: // %bb.0: // %entry ; CHECK-NEXT: whilegt p0.d, w0, w1 ; CHECK-NEXT: ret entry: %0 = call @llvm.aarch64.sve.whilegt.nxv2i1.i32(i32 %a, i32 %b) %1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %0) ret %1 } define @whilehi_fun(i32 %a, i32 %b) { ; CHECK-LABEL: whilehi_fun: ; CHECK: // %bb.0: // %entry ; CHECK-NEXT: whilehi p0.d, w0, w1 ; CHECK-NEXT: ret entry: %0 = call @llvm.aarch64.sve.whilehi.nxv2i1.i32(i32 %a, i32 %b) %1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %0) ret %1 } define @whilehs_fun(i32 %a, i32 %b) { ; CHECK-LABEL: whilehs_fun: ; CHECK: // %bb.0: // %entry ; CHECK-NEXT: whilehs p0.d, w0, w1 ; CHECK-NEXT: ret entry: %0 = call @llvm.aarch64.sve.whilehs.nxv2i1.i32(i32 %a, i32 %b) %1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %0) ret %1 } define @whilele_fun(i32 %a, i32 %b) { ; CHECK-LABEL: whilele_fun: ; CHECK: // %bb.0: // %entry ; CHECK-NEXT: whilele p0.d, w0, w1 ; CHECK-NEXT: ret entry: %0 = call @llvm.aarch64.sve.whilele.nxv2i1.i32(i32 %a, i32 %b) %1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %0) ret %1 } define @whilelo_fun(i32 %a, i32 %b) { ; CHECK-LABEL: whilelo_fun: ; CHECK: // %bb.0: // %entry ; CHECK-NEXT: whilelo p0.d, w0, w1 ; CHECK-NEXT: ret entry: %0 = call @llvm.aarch64.sve.whilelo.nxv2i1.i32(i32 %a, i32 %b) %1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %0) ret %1 } define @whilels_fun(i32 %a, i32 %b) { ; CHECK-LABEL: whilels_fun: ; CHECK: // %bb.0: // %entry ; CHECK-NEXT: whilels p0.d, w0, w1 ; CHECK-NEXT: ret entry: %0 = call @llvm.aarch64.sve.whilels.nxv2i1.i32(i32 %a, i32 %b) %1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %0) ret %1 } define @whilelt_fun(i32 %a, i32 %b) { ; CHECK-LABEL: whilelt_fun: ; CHECK: // %bb.0: // %entry ; CHECK-NEXT: whilelt p0.d, w0, w1 ; CHECK-NEXT: ret entry: %0 = call @llvm.aarch64.sve.whilelt.nxv2i1.i32(i32 %a, i32 %b) %1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %0) ret %1 } define @cmpeq_d_fun( %pg, %a, %b) { ; CHECK-LABEL: cmpeq_d_fun: ; CHECK: // %bb.0: ; CHECK-NEXT: cmpeq p0.d, p0/z, z0.d, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.cmpeq.nxv2i64( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %1) ret %out } define @cmpeq_wide_s_fun( %pg, %a, %b) { ; CHECK-LABEL: cmpeq_wide_s_fun: ; CHECK: // %bb.0: ; CHECK-NEXT: cmpeq p0.s, p0/z, z0.s, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.cmpeq.wide.nxv4i32( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv4i1( %1) ret %out } define @cmpge_d( %pg, %a, %b) { ; CHECK-LABEL: cmpge_d: ; CHECK: // %bb.0: ; CHECK-NEXT: cmpge p0.d, p0/z, z0.d, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.cmpge.nxv2i64( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %1) ret %out } define @cmpge_wide_s( %pg, %a, %b) { ; CHECK-LABEL: cmpge_wide_s: ; CHECK: // %bb.0: ; CHECK-NEXT: cmpge p0.s, p0/z, z0.s, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.cmpge.wide.nxv4i32( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv4i1( %1) ret %out } define @cmpgt_d( %pg, %a, %b) { ; CHECK-LABEL: cmpgt_d: ; CHECK: // %bb.0: ; CHECK-NEXT: cmpgt p0.d, p0/z, z0.d, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.cmpgt.nxv2i64( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %1) ret %out } define @cmpgt_wide_s( %pg, %a, %b) { ; CHECK-LABEL: cmpgt_wide_s: ; CHECK: // %bb.0: ; CHECK-NEXT: cmpgt p0.s, p0/z, z0.s, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.cmpgt.wide.nxv4i32( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv4i1( %1) ret %out } define @cmphi_d( %pg, %a, %b) { ; CHECK-LABEL: cmphi_d: ; CHECK: // %bb.0: ; CHECK-NEXT: cmphi p0.d, p0/z, z0.d, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.cmphi.nxv2i64( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %1) ret %out } define @cmphi_wide_s( %pg, %a, %b) { ; CHECK-LABEL: cmphi_wide_s: ; CHECK: // %bb.0: ; CHECK-NEXT: cmphi p0.s, p0/z, z0.s, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.cmphi.wide.nxv4i32( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv4i1( %1) ret %out } define @cmphs_d( %pg, %a, %b) { ; CHECK-LABEL: cmphs_d: ; CHECK: // %bb.0: ; CHECK-NEXT: cmphs p0.d, p0/z, z0.d, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.cmphs.nxv2i64( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %1) ret %out } define @cmphs_wide_s( %pg, %a, %b) { ; CHECK-LABEL: cmphs_wide_s: ; CHECK: // %bb.0: ; CHECK-NEXT: cmphs p0.s, p0/z, z0.s, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.cmphs.wide.nxv4i32( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv4i1( %1) ret %out } define @cmple_wide_s( %pg, %a, %b) { ; CHECK-LABEL: cmple_wide_s: ; CHECK: // %bb.0: ; CHECK-NEXT: cmple p0.s, p0/z, z0.s, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.cmple.wide.nxv4i32( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv4i1( %1) ret %out } define @cmplo_wide_s( %pg, %a, %b) { ; CHECK-LABEL: cmplo_wide_s: ; CHECK: // %bb.0: ; CHECK-NEXT: cmplo p0.s, p0/z, z0.s, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.cmplo.wide.nxv4i32( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv4i1( %1) ret %out } define @cmpls_wide_s( %pg, %a, %b) { ; CHECK-LABEL: cmpls_wide_s: ; CHECK: // %bb.0: ; CHECK-NEXT: cmpls p0.s, p0/z, z0.s, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.cmpls.wide.nxv4i32( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv4i1( %1) ret %out } define @cmplt_wide_s( %pg, %a, %b) { ; CHECK-LABEL: cmplt_wide_s: ; CHECK: // %bb.0: ; CHECK-NEXT: cmplt p0.s, p0/z, z0.s, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.cmplt.wide.nxv4i32( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv4i1( %1) ret %out } define @cmpne_d( %pg, %a, %b) { ; CHECK-LABEL: cmpne_d: ; CHECK: // %bb.0: ; CHECK-NEXT: cmpne p0.d, p0/z, z0.d, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.cmpne.nxv2i64( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %1) ret %out } define @cmpne_wide_s( %pg, %a, %b) { ; CHECK-LABEL: cmpne_wide_s: ; CHECK: // %bb.0: ; CHECK-NEXT: cmpne p0.s, p0/z, z0.s, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.cmpne.wide.nxv4i32( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv4i1( %1) ret %out } define @fcmeq_d( %pg, %a, %b) { ; CHECK-LABEL: fcmeq_d: ; CHECK: // %bb.0: ; CHECK-NEXT: fcmeq p0.d, p0/z, z0.d, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.fcmpeq.nxv2f64( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %1) ret %out } define @fcmgt_d( %pg, %a, %b) { ; CHECK-LABEL: fcmgt_d: ; CHECK: // %bb.0: ; CHECK-NEXT: fcmgt p0.d, p0/z, z0.d, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.fcmpgt.nxv2f64( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %1) ret %out } define @fcmne_d( %pg, %a, %b) { ; CHECK-LABEL: fcmne_d: ; CHECK: // %bb.0: ; CHECK-NEXT: fcmne p0.d, p0/z, z0.d, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.fcmpne.nxv2f64( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %1) ret %out } define @fcmuo_d( %pg, %a, %b) { ; CHECK-LABEL: fcmuo_d: ; CHECK: // %bb.0: ; CHECK-NEXT: fcmuo p0.d, p0/z, z0.d, z1.d ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.fcmpuo.nxv2f64( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %1) ret %out } define @match_i16( %pg, %a, %b) { ; CHECK-LABEL: match_i16: ; CHECK: // %bb.0: ; CHECK-NEXT: match p0.h, p0/z, z0.h, z1.h ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.match.nxv8i16( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv8i1( %1) ret %out } define @nmatch_i16( %pg, %a, %b) { ; CHECK-LABEL: nmatch_i16: ; CHECK: // %bb.0: ; CHECK-NEXT: nmatch p0.h, p0/z, z0.h, z1.h ; CHECK-NEXT: ret %1 = call @llvm.aarch64.sve.nmatch.nxv8i16( %pg, %a, %b) %out = tail call @llvm.aarch64.sve.convert.to.svbool.nxv8i1( %1) ret %out } declare @llvm.aarch64.sve.facgt.nxv2f64(, , ) declare @llvm.aarch64.sve.facge.nxv2f64(, , ) declare @llvm.aarch64.sve.fcmpeq.nxv2f64(, , ) declare @llvm.aarch64.sve.fcmpge.nxv2f64(, , ) declare @llvm.aarch64.sve.fcmpgt.nxv2f64(, , ) declare @llvm.aarch64.sve.fcmpne.nxv2f64(, , ) declare @llvm.aarch64.sve.fcmpuo.nxv2f64(, , ) declare @llvm.aarch64.sve.whilege.nxv2i1.i32(i32, i32) declare @llvm.aarch64.sve.whilegt.nxv2i1.i32(i32, i32) declare @llvm.aarch64.sve.whilehi.nxv2i1.i32(i32, i32) declare @llvm.aarch64.sve.whilehs.nxv2i1.i32(i32, i32) declare @llvm.aarch64.sve.whilele.nxv2i1.i32(i32, i32) declare @llvm.aarch64.sve.whilelo.nxv2i1.i32(i32, i32) declare @llvm.aarch64.sve.whilels.nxv2i1.i32(i32, i32) declare @llvm.aarch64.sve.whilelt.nxv2i1.i32(i32, i32) declare @llvm.aarch64.sve.cmpeq.nxv2i64(, , ) declare @llvm.aarch64.sve.cmpeq.wide.nxv4i32(, , ) declare @llvm.aarch64.sve.cmpge.nxv2i64(, , ) declare @llvm.aarch64.sve.cmpge.wide.nxv4i32(, , ) declare @llvm.aarch64.sve.cmpgt.nxv2i64(, , ) declare @llvm.aarch64.sve.cmpgt.wide.nxv4i32(, , ) declare @llvm.aarch64.sve.cmphi.nxv2i64(, , ) declare @llvm.aarch64.sve.cmphi.wide.nxv4i32(, , ) declare @llvm.aarch64.sve.cmphs.nxv2i64(, , ) declare @llvm.aarch64.sve.cmphs.wide.nxv4i32(, , ) declare @llvm.aarch64.sve.cmple.wide.nxv4i32(, , ) declare @llvm.aarch64.sve.cmplo.wide.nxv4i32(, , ) declare @llvm.aarch64.sve.cmpls.wide.nxv4i32(, , ) declare @llvm.aarch64.sve.cmplt.wide.nxv4i32(, , ) declare @llvm.aarch64.sve.cmpne.nxv2i64(, , ) declare @llvm.aarch64.sve.cmpne.wide.nxv4i32(, , ) declare @llvm.aarch64.sve.match.nxv8i16(, , ) declare @llvm.aarch64.sve.nmatch.nxv8i16(, , ) declare @llvm.aarch64.sve.convert.to.svbool.nxv2i1() declare @llvm.aarch64.sve.convert.to.svbool.nxv4i1() declare @llvm.aarch64.sve.convert.to.svbool.nxv8i1()