; NOTE: Assertions have been autogenerated by utils/update_test_checks.py ; RUN: opt -S -passes=instcombine < %s | FileCheck %s target triple = "aarch64-unknown-linux-gnu" define @try_combine_svbool_binop_and_0( %a, %b, %c) { ; CHECK-LABEL: @try_combine_svbool_binop_and_0( ; CHECK-NEXT: [[TMP1:%.*]] = call @llvm.aarch64.sve.convert.from.svbool.nxv4i1( [[B:%.*]]) ; CHECK-NEXT: [[TMP2:%.*]] = call @llvm.aarch64.sve.convert.from.svbool.nxv4i1( [[C:%.*]]) ; CHECK-NEXT: [[TMP3:%.*]] = call @llvm.aarch64.sve.and.z.nxv4i1( [[A:%.*]], [[TMP1]], [[TMP2]]) ; CHECK-NEXT: ret [[TMP3]] ; %t1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv4i1( %a) %t2 = tail call @llvm.aarch64.sve.and.z.nxv16i1( %t1, %b, %c) %t3 = tail call @llvm.aarch64.sve.convert.from.svbool.nxv4i1( %t2) ret %t3 } define @try_combine_svbool_binop_and_1( %a, %b) { ; CHECK-LABEL: @try_combine_svbool_binop_and_1( ; CHECK-NEXT: [[TMP1:%.*]] = call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( [[B:%.*]]) ; CHECK-NEXT: [[TMP2:%.*]] = call @llvm.aarch64.sve.and.z.nxv8i1( [[A:%.*]], [[TMP1]], [[TMP1]]) ; CHECK-NEXT: ret [[TMP2]] ; %t1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv8i1( %a) %t2 = tail call @llvm.aarch64.sve.and.z.nxv16i1( %t1, %b, %b) %t3 = tail call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( %t2) ret %t3 } define @try_combine_svbool_binop_and_2( %a, %b) { ; CHECK-LABEL: @try_combine_svbool_binop_and_2( ; CHECK-NEXT: [[TMP1:%.*]] = call @llvm.aarch64.sve.convert.from.svbool.nxv4i1( [[B:%.*]]) ; CHECK-NEXT: [[TMP2:%.*]] = call @llvm.aarch64.sve.and.z.nxv4i1( [[A:%.*]], [[TMP1]], [[TMP1]]) ; CHECK-NEXT: ret [[TMP2]] ; %t1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv4i1( %a) %t2 = tail call @llvm.aarch64.sve.and.z.nxv16i1( %t1, %b, %b) %t3 = tail call @llvm.aarch64.sve.convert.from.svbool.nxv4i1( %t2) ret %t3 } define @try_combine_svbool_binop_and_3( %a, %b) { ; CHECK-LABEL: @try_combine_svbool_binop_and_3( ; CHECK-NEXT: [[TMP1:%.*]] = call @llvm.aarch64.sve.convert.from.svbool.nxv2i1( [[B:%.*]]) ; CHECK-NEXT: [[TMP2:%.*]] = call @llvm.aarch64.sve.and.z.nxv2i1( [[A:%.*]], [[TMP1]], [[TMP1]]) ; CHECK-NEXT: ret [[TMP2]] ; %t1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %a) %t2 = tail call @llvm.aarch64.sve.and.z.nxv16i1( %t1, %b, %b) %t3 = tail call @llvm.aarch64.sve.convert.from.svbool.nxv2i1( %t2) ret %t3 } define @try_combine_svbool_binop_bic( %a, %b) { ; CHECK-LABEL: @try_combine_svbool_binop_bic( ; CHECK-NEXT: [[TMP1:%.*]] = call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( [[B:%.*]]) ; CHECK-NEXT: [[TMP2:%.*]] = call @llvm.aarch64.sve.bic.z.nxv8i1( [[A:%.*]], [[TMP1]], [[TMP1]]) ; CHECK-NEXT: ret [[TMP2]] ; %t1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv8i1( %a) %t2 = tail call @llvm.aarch64.sve.bic.z.nxv16i1( %t1, %b, %b) %t3 = tail call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( %t2) ret %t3 } define @try_combine_svbool_binop_eor( %a, %b) { ; CHECK-LABEL: @try_combine_svbool_binop_eor( ; CHECK-NEXT: [[TMP1:%.*]] = call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( [[B:%.*]]) ; CHECK-NEXT: [[TMP2:%.*]] = call @llvm.aarch64.sve.eor.z.nxv8i1( [[A:%.*]], [[TMP1]], [[TMP1]]) ; CHECK-NEXT: ret [[TMP2]] ; %t1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv8i1( %a) %t2 = tail call @llvm.aarch64.sve.eor.z.nxv16i1( %t1, %b, %b) %t3 = tail call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( %t2) ret %t3 } define @try_combine_svbool_binop_nand( %a, %b) { ; CHECK-LABEL: @try_combine_svbool_binop_nand( ; CHECK-NEXT: [[TMP1:%.*]] = call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( [[B:%.*]]) ; CHECK-NEXT: [[TMP2:%.*]] = call @llvm.aarch64.sve.nand.z.nxv8i1( [[A:%.*]], [[TMP1]], [[TMP1]]) ; CHECK-NEXT: ret [[TMP2]] ; %t1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv8i1( %a) %t2 = tail call @llvm.aarch64.sve.nand.z.nxv16i1( %t1, %b, %b) %t3 = tail call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( %t2) ret %t3 } define @try_combine_svbool_binop_nor( %a, %b) { ; CHECK-LABEL: @try_combine_svbool_binop_nor( ; CHECK-NEXT: [[TMP1:%.*]] = call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( [[B:%.*]]) ; CHECK-NEXT: [[TMP2:%.*]] = call @llvm.aarch64.sve.nor.z.nxv8i1( [[A:%.*]], [[TMP1]], [[TMP1]]) ; CHECK-NEXT: ret [[TMP2]] ; %t1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv8i1( %a) %t2 = tail call @llvm.aarch64.sve.nor.z.nxv16i1( %t1, %b, %b) %t3 = tail call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( %t2) ret %t3 } define @try_combine_svbool_binop_orn( %a, %b) { ; CHECK-LABEL: @try_combine_svbool_binop_orn( ; CHECK-NEXT: [[TMP1:%.*]] = call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( [[B:%.*]]) ; CHECK-NEXT: [[TMP2:%.*]] = call @llvm.aarch64.sve.orn.z.nxv8i1( [[A:%.*]], [[TMP1]], [[TMP1]]) ; CHECK-NEXT: ret [[TMP2]] ; %t1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv8i1( %a) %t2 = tail call @llvm.aarch64.sve.orn.z.nxv16i1( %t1, %b, %b) %t3 = tail call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( %t2) ret %t3 } define @try_combine_svbool_binop_orr( %a, %b) { ; CHECK-LABEL: @try_combine_svbool_binop_orr( ; CHECK-NEXT: [[TMP1:%.*]] = call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( [[B:%.*]]) ; CHECK-NEXT: [[TMP2:%.*]] = call @llvm.aarch64.sve.orr.z.nxv8i1( [[A:%.*]], [[TMP1]], [[TMP1]]) ; CHECK-NEXT: ret [[TMP2]] ; %t1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv8i1( %a) %t2 = tail call @llvm.aarch64.sve.orr.z.nxv16i1( %t1, %b, %b) %t3 = tail call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( %t2) ret %t3 } declare @llvm.aarch64.sve.convert.to.svbool.nxv8i1() declare @llvm.aarch64.sve.convert.to.svbool.nxv4i1() declare @llvm.aarch64.sve.convert.to.svbool.nxv2i1() declare @llvm.aarch64.sve.convert.from.svbool.nxv8i1() declare @llvm.aarch64.sve.convert.from.svbool.nxv4i1() declare @llvm.aarch64.sve.convert.from.svbool.nxv2i1() declare @llvm.aarch64.sve.and.z.nxv16i1(, , ) declare @llvm.aarch64.sve.bic.z.nxv16i1(, , ) declare @llvm.aarch64.sve.eor.z.nxv16i1(, , ) declare @llvm.aarch64.sve.nand.z.nxv16i1(, , ) declare @llvm.aarch64.sve.nor.z.nxv16i1(, , ) declare @llvm.aarch64.sve.orn.z.nxv16i1(, , ) declare @llvm.aarch64.sve.orr.z.nxv16i1(, , ) attributes #0 = { "target-features"="+sve" }