; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc < %s | FileCheck %s target triple = "aarch64-unknown-linux-gnu" ; ; ABS (sve_int_un_pred_arit_0) ; ; Check movprfx is inserted when no passthru/predicate is present define @abs_i8( %a, %b) #0 { ; CHECK-LABEL: abs_i8: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.b ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: abs z0.b, p0/m, z1.b ; CHECK-NEXT: ret %ret = tail call @llvm.abs.nxv16i8( %b, i1 0) ret %ret } ; Check movprfx is not inserted when dstReg == srcReg define @abs_i8_dupreg( %a) #0 { ; CHECK-LABEL: abs_i8_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.b ; CHECK-NEXT: abs z0.b, p0/m, z0.b ; CHECK-NEXT: ret %ret = tail call @llvm.abs.nxv16i8( %a, i1 0) ret %ret } ; Check movprfx is inserted when passthru is undef define @abs_i8_undef( %a, %b) #0 { ; CHECK-LABEL: abs_i8_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.b ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: abs z0.b, p0/m, z1.b ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv16i1(i32 31) %ret = tail call @llvm.aarch64.sve.abs.nxv16i8( undef, %pg, %b) ret %ret } ; Check movprfx is inserted when predicate is all active, making the passthru dead define @abs_i8_active( %a, %b) #0 { ; CHECK-LABEL: abs_i8_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.b ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: abs z0.b, p0/m, z1.b ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv16i1(i32 31) %ret = tail call @llvm.aarch64.sve.abs.nxv16i8( %a, %pg, %b) ret %ret } ; Check movprfx is not inserted when predicate is not all active, making the passthru used define @abs_i8_not_active( %a, %b) #0 { ; CHECK-LABEL: abs_i8_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: abs z0.b, p0/m, z1.b ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %pg.to = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %pg) %ret = tail call @llvm.aarch64.sve.abs.nxv16i8( %a, %pg.to, %b) ret %ret } define @abs_i16( %a, %b) #0 { ; CHECK-LABEL: abs_i16: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.h ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: abs z0.h, p0/m, z1.h ; CHECK-NEXT: ret %ret = tail call @llvm.abs.nxv8i16( %b, i1 0) ret %ret } define @abs_i16_dupreg( %a) #0 { ; CHECK-LABEL: abs_i16_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.h ; CHECK-NEXT: abs z0.h, p0/m, z0.h ; CHECK-NEXT: ret %ret = tail call @llvm.abs.nxv8i16( %a, i1 0) ret %ret } define @abs_i16_undef( %a, %b) #0 { ; CHECK-LABEL: abs_i16_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.h ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: abs z0.h, p0/m, z1.h ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv8i1(i32 31) %ret = tail call @llvm.aarch64.sve.abs.nxv8i16( undef, %pg, %b) ret %ret } define @abs_i16_active( %a, %b) #0 { ; CHECK-LABEL: abs_i16_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.h ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: abs z0.h, p0/m, z1.h ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv8i1(i32 31) %ret = tail call @llvm.aarch64.sve.abs.nxv8i16( %a, %pg, %b) ret %ret } define @abs_i16_not_active( %a, %b) #0 { ; CHECK-LABEL: abs_i16_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: abs z0.h, p0/m, z1.h ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %pg.to = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %pg) %pg.from = tail call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( %pg.to) %ret = tail call @llvm.aarch64.sve.abs.nxv8i16( %a, %pg.from, %b) ret %ret } define @abs_i32( %a, %b) #0 { ; CHECK-LABEL: abs_i32: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: abs z0.s, p0/m, z1.s ; CHECK-NEXT: ret %ret = tail call @llvm.abs.nxv4i32( %b, i1 0) ret %ret } define @abs_i32_dupreg( %a) #0 { ; CHECK-LABEL: abs_i32_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: abs z0.s, p0/m, z0.s ; CHECK-NEXT: ret %ret = tail call @llvm.abs.nxv4i32( %a, i1 0) ret %ret } define @abs_i32_undef( %a, %b) #0 { ; CHECK-LABEL: abs_i32_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: abs z0.s, p0/m, z1.s ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv4i1(i32 31) %ret = tail call @llvm.aarch64.sve.abs.nxv4i32( undef, %pg, %b) ret %ret } define @abs_i32_active( %a, %b) #0 { ; CHECK-LABEL: abs_i32_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: abs z0.s, p0/m, z1.s ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv4i1(i32 31) %ret = tail call @llvm.aarch64.sve.abs.nxv4i32( %a, %pg, %b) ret %ret } define @abs_i32_not_active( %a, %b) #0 { ; CHECK-LABEL: abs_i32_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: abs z0.s, p0/m, z1.s ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %pg.to = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %pg) %pg.from = tail call @llvm.aarch64.sve.convert.from.svbool.nxv4i1( %pg.to) %ret = tail call @llvm.aarch64.sve.abs.nxv4i32( %a, %pg.from, %b) ret %ret } define @abs_i64( %a, %b) #0 { ; CHECK-LABEL: abs_i64: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: abs z0.d, p0/m, z1.d ; CHECK-NEXT: ret %ret = tail call @llvm.abs.nxv2i64( %b, i1 0) ret %ret } define @abs_i64_dupreg( %a) #0 { ; CHECK-LABEL: abs_i64_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: abs z0.d, p0/m, z0.d ; CHECK-NEXT: ret %ret = tail call @llvm.abs.nxv2i64( %a, i1 0) ret %ret } define @abs_i64_undef( %a, %b) #0 { ; CHECK-LABEL: abs_i64_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: abs z0.d, p0/m, z1.d ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %ret = tail call @llvm.aarch64.sve.abs.nxv2i64( undef, %pg, %b) ret %ret } define @abs_i64_active( %a, %b) #0 { ; CHECK-LABEL: abs_i64_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: abs z0.d, p0/m, z1.d ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %ret = tail call @llvm.aarch64.sve.abs.nxv2i64( %a, %pg, %b) ret %ret } define @abs_i64_not_active( %a, %b, %pg) #0 { ; CHECK-LABEL: abs_i64_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: abs z0.d, p0/m, z1.d ; CHECK-NEXT: ret %ret = tail call @llvm.aarch64.sve.abs.nxv2i64( %a, %pg, %b) ret %ret } ; ; CLS (sve_int_un_pred_arit_1) ; define @cls_i8_dupreg( %a) #0 { ; CHECK-LABEL: cls_i8_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.b ; CHECK-NEXT: cls z0.b, p0/m, z0.b ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv16i1(i32 31) %ret = tail call @llvm.aarch64.sve.cls.nxv16i8( undef, %pg, %a) ret %ret } define @cls_i8_undef( %a, %b) #0 { ; CHECK-LABEL: cls_i8_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.b ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: cls z0.b, p0/m, z1.b ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv16i1(i32 31) %ret = tail call @llvm.aarch64.sve.cls.nxv16i8( undef, %pg, %b) ret %ret } define @cls_i8_active( %a, %b) #0 { ; CHECK-LABEL: cls_i8_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.b ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: cls z0.b, p0/m, z1.b ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv16i1(i32 31) %ret = tail call @llvm.aarch64.sve.cls.nxv16i8( %a, %pg, %b) ret %ret } define @cls_i8_not_active( %a, %b) #0 { ; CHECK-LABEL: cls_i8_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: cls z0.b, p0/m, z1.b ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %pg.to = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %pg) %ret = tail call @llvm.aarch64.sve.cls.nxv16i8( %a, %pg.to, %b) ret %ret } define @cls_i16_dupreg( %a) #0 { ; CHECK-LABEL: cls_i16_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.h ; CHECK-NEXT: cls z0.h, p0/m, z0.h ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv8i1(i32 31) %ret = tail call @llvm.aarch64.sve.cls.nxv8i16( undef, %pg, %a) ret %ret } define @cls_i16_undef( %a, %b) #0 { ; CHECK-LABEL: cls_i16_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.h ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: cls z0.h, p0/m, z1.h ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv8i1(i32 31) %ret = tail call @llvm.aarch64.sve.cls.nxv8i16( undef, %pg, %b) ret %ret } define @cls_i16_active( %a, %b) #0 { ; CHECK-LABEL: cls_i16_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.h ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: cls z0.h, p0/m, z1.h ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv8i1(i32 31) %ret = tail call @llvm.aarch64.sve.cls.nxv8i16( %a, %pg, %b) ret %ret } define @cls_i16_not_active( %a, %b) #0 { ; CHECK-LABEL: cls_i16_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: cls z0.h, p0/m, z1.h ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %pg.to = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %pg) %pg.from = tail call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( %pg.to) %ret = tail call @llvm.aarch64.sve.cls.nxv8i16( %a, %pg.from, %b) ret %ret } define @cls_i32_dupreg( %a) #0 { ; CHECK-LABEL: cls_i32_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: cls z0.s, p0/m, z0.s ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv4i1(i32 31) %ret = tail call @llvm.aarch64.sve.cls.nxv4i32( undef, %pg, %a) ret %ret } define @cls_i32_undef( %a, %b) #0 { ; CHECK-LABEL: cls_i32_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: cls z0.s, p0/m, z1.s ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv4i1(i32 31) %ret = tail call @llvm.aarch64.sve.cls.nxv4i32( undef, %pg, %b) ret %ret } define @cls_i32_active( %a, %b) #0 { ; CHECK-LABEL: cls_i32_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: cls z0.s, p0/m, z1.s ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv4i1(i32 31) %ret = tail call @llvm.aarch64.sve.cls.nxv4i32( %a, %pg, %b) ret %ret } define @cls_i32_not_active( %a, %b) #0 { ; CHECK-LABEL: cls_i32_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: cls z0.s, p0/m, z1.s ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %pg.to = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %pg) %pg.from = tail call @llvm.aarch64.sve.convert.from.svbool.nxv4i1( %pg.to) %ret = tail call @llvm.aarch64.sve.cls.nxv4i32( %a, %pg.from, %b) ret %ret } define @cls_i64_dupreg( %a) #0 { ; CHECK-LABEL: cls_i64_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: cls z0.d, p0/m, z0.d ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %ret = tail call @llvm.aarch64.sve.cls.nxv2i64( undef, %pg, %a) ret %ret } define @cls_i64_undef( %a, %b) #0 { ; CHECK-LABEL: cls_i64_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: cls z0.d, p0/m, z1.d ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %ret = tail call @llvm.aarch64.sve.cls.nxv2i64( undef, %pg, %b) ret %ret } define @cls_i64_active( %a, %b) #0 { ; CHECK-LABEL: cls_i64_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: cls z0.d, p0/m, z1.d ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %ret = tail call @llvm.aarch64.sve.cls.nxv2i64( %a, %pg, %b) ret %ret } define @cls_i64_not_active( %a, %b, %pg) #0 { ; CHECK-LABEL: cls_i64_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: cls z0.d, p0/m, z1.d ; CHECK-NEXT: ret %ret = tail call @llvm.aarch64.sve.cls.nxv2i64( %a, %pg, %b) ret %ret } ; ; FABS (sve_int_un_pred_arit_1_fp) ; define @fabs_f16( %a, %b) #0 { ; CHECK-LABEL: fabs_f16: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.h ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: fabs z0.h, p0/m, z1.h ; CHECK-NEXT: ret %ret = tail call @llvm.fabs.nxv8f16( %b) ret %ret } define @fabs_f16_dupreg( %a) #0 { ; CHECK-LABEL: fabs_f16_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.h ; CHECK-NEXT: fabs z0.h, p0/m, z0.h ; CHECK-NEXT: ret %ret = tail call @llvm.fabs.nxv8f16( %a) ret %ret } define @fabs_f16_undef( %a, %b) #0 { ; CHECK-LABEL: fabs_f16_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.h ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: fabs z0.h, p0/m, z1.h ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv8i1(i32 31) %ret = tail call @llvm.aarch64.sve.fabs.nxv8f16( undef, %pg, %b) ret %ret } define @fabs_f16_active( %a, %b) #0 { ; CHECK-LABEL: fabs_f16_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.h ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: fabs z0.h, p0/m, z1.h ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv8i1(i32 31) %ret = tail call @llvm.aarch64.sve.fabs.nxv8f16( %a, %pg, %b) ret %ret } define @fabs_f16_not_active( %a, %b) #0 { ; CHECK-LABEL: fabs_f16_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: fabs z0.h, p0/m, z1.h ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %pg.to = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %pg) %pg.from = tail call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( %pg.to) %ret = tail call @llvm.aarch64.sve.fabs.nxv8f16( %a, %pg.from, %b) ret %ret } define @fabs_f32( %a, %b) #0 { ; CHECK-LABEL: fabs_f32: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: fabs z0.s, p0/m, z1.s ; CHECK-NEXT: ret %ret = tail call @llvm.fabs.nxv4f32( %b) ret %ret } define @fabs_f32_dupreg( %a) #0 { ; CHECK-LABEL: fabs_f32_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: fabs z0.s, p0/m, z0.s ; CHECK-NEXT: ret %ret = tail call @llvm.fabs.nxv4f32( %a) ret %ret } define @fabs_f32_undef( %a, %b) #0 { ; CHECK-LABEL: fabs_f32_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: fabs z0.s, p0/m, z1.s ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv4i1(i32 31) %ret = tail call @llvm.aarch64.sve.fabs.nxv4f32( undef, %pg, %b) ret %ret } define @fabs_f32_active( %a, %b) #0 { ; CHECK-LABEL: fabs_f32_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: fabs z0.s, p0/m, z1.s ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv4i1(i32 31) %ret = tail call @llvm.aarch64.sve.fabs.nxv4f32( %a, %pg, %b) ret %ret } define @fabs_f32_not_active( %a, %b) #0 { ; CHECK-LABEL: fabs_f32_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: fabs z0.s, p0/m, z1.s ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %pg.to = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %pg) %pg.from = tail call @llvm.aarch64.sve.convert.from.svbool.nxv4i1( %pg.to) %ret = tail call @llvm.aarch64.sve.fabs.nxv4f32( %a, %pg.from, %b) ret %ret } define @fabs_f64( %a, %b) #0 { ; CHECK-LABEL: fabs_f64: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: fabs z0.d, p0/m, z1.d ; CHECK-NEXT: ret %ret = tail call @llvm.fabs.nxv2f64( %b) ret %ret } define @fabs_f64_dupreg( %a) #0 { ; CHECK-LABEL: fabs_f64_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: fabs z0.d, p0/m, z0.d ; CHECK-NEXT: ret %ret = tail call @llvm.fabs.nxv2f64( %a) ret %ret } define @fabs_f64_undef( %a, %b) #0 { ; CHECK-LABEL: fabs_f64_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: fabs z0.d, p0/m, z1.d ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %ret = tail call @llvm.aarch64.sve.fabs.nxv2f64( undef, %pg, %b) ret %ret } define @fabs_f64_active( %a, %b) #0 { ; CHECK-LABEL: fabs_f64_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: fabs z0.d, p0/m, z1.d ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %ret = tail call @llvm.aarch64.sve.fabs.nxv2f64( %a, %pg, %b) ret %ret } define @fabs_f64_not_active( %a, %b, %pg) #0 { ; CHECK-LABEL: fabs_f64_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: fabs z0.d, p0/m, z1.d ; CHECK-NEXT: ret %ret = tail call @llvm.aarch64.sve.fabs.nxv2f64( %a, %pg, %b) ret %ret } ; ; FSQRT (sve_fp_2op_p_zd_HSD) ; define @fsqrt_f16( %a, %b) #0 { ; CHECK-LABEL: fsqrt_f16: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.h ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: fsqrt z0.h, p0/m, z1.h ; CHECK-NEXT: ret %ret = tail call @llvm.sqrt.nxv8f16( %b) ret %ret } define @fsqrt_f16_dupreg( %a) #0 { ; CHECK-LABEL: fsqrt_f16_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.h ; CHECK-NEXT: fsqrt z0.h, p0/m, z0.h ; CHECK-NEXT: ret %ret = tail call @llvm.sqrt.nxv8f16( %a) ret %ret } define @fsqrt_f16_undef( %a, %b) #0 { ; CHECK-LABEL: fsqrt_f16_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.h ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: fsqrt z0.h, p0/m, z1.h ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv8i1(i32 31) %ret = tail call @llvm.aarch64.sve.fsqrt.nxv8f16( undef, %pg, %b) ret %ret } define @fsqrt_f16_active( %a, %b) #0 { ; CHECK-LABEL: fsqrt_f16_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.h ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: fsqrt z0.h, p0/m, z1.h ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv8i1(i32 31) %ret = tail call @llvm.aarch64.sve.fsqrt.nxv8f16( %a, %pg, %b) ret %ret } define @fsqrt_f16_not_active( %a, %b) #0 { ; CHECK-LABEL: fsqrt_f16_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: fsqrt z0.h, p0/m, z1.h ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %pg.to = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %pg) %pg.from = tail call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( %pg.to) %ret = tail call @llvm.aarch64.sve.fsqrt.nxv8f16( %a, %pg.from, %b) ret %ret } define @fsqrt_f32( %a, %b) #0 { ; CHECK-LABEL: fsqrt_f32: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: fsqrt z0.s, p0/m, z1.s ; CHECK-NEXT: ret %ret = tail call @llvm.sqrt.nxv4f32( %b) ret %ret } define @fsqrt_f32_dupreg( %a) #0 { ; CHECK-LABEL: fsqrt_f32_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: fsqrt z0.s, p0/m, z0.s ; CHECK-NEXT: ret %ret = tail call @llvm.sqrt.nxv4f32( %a) ret %ret } define @fsqrt_f32_undef( %a, %b) #0 { ; CHECK-LABEL: fsqrt_f32_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: fsqrt z0.s, p0/m, z1.s ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv4i1(i32 31) %ret = tail call @llvm.aarch64.sve.fsqrt.nxv4f32( undef, %pg, %b) ret %ret } define @fsqrt_f32_active( %a, %b) #0 { ; CHECK-LABEL: fsqrt_f32_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: fsqrt z0.s, p0/m, z1.s ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv4i1(i32 31) %ret = tail call @llvm.aarch64.sve.fsqrt.nxv4f32( %a, %pg, %b) ret %ret } define @fsqrt_f32_not_active( %a, %b) #0 { ; CHECK-LABEL: fsqrt_f32_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: fsqrt z0.s, p0/m, z1.s ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %pg.to = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %pg) %pg.from = tail call @llvm.aarch64.sve.convert.from.svbool.nxv4i1( %pg.to) %ret = tail call @llvm.aarch64.sve.fsqrt.nxv4f32( %a, %pg.from, %b) ret %ret } define @fsqrt_f64( %a, %b) #0 { ; CHECK-LABEL: fsqrt_f64: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: fsqrt z0.d, p0/m, z1.d ; CHECK-NEXT: ret %ret = tail call @llvm.sqrt.nxv2f64( %b) ret %ret } define @fsqrt_f64_dupreg( %a) #0 { ; CHECK-LABEL: fsqrt_f64_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: fsqrt z0.d, p0/m, z0.d ; CHECK-NEXT: ret %ret = tail call @llvm.sqrt.nxv2f64( %a) ret %ret } define @fsqrt_f64_undef( %a, %b) #0 { ; CHECK-LABEL: fsqrt_f64_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: fsqrt z0.d, p0/m, z1.d ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %ret = tail call @llvm.aarch64.sve.fsqrt.nxv2f64( undef, %pg, %b) ret %ret } define @fsqrt_f64_active( %a, %b) #0 { ; CHECK-LABEL: fsqrt_f64_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: fsqrt z0.d, p0/m, z1.d ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %ret = tail call @llvm.aarch64.sve.fsqrt.nxv2f64( %a, %pg, %b) ret %ret } define @fsqrt_f64_not_active( %a, %b, %pg) #0 { ; CHECK-LABEL: fsqrt_f64_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: fsqrt z0.d, p0/m, z1.d ; CHECK-NEXT: ret %ret = tail call @llvm.aarch64.sve.fsqrt.nxv2f64( %a, %pg, %b) ret %ret } ; ; SXTB (sve_int_un_pred_arit_0_h) ; define @sxtb_i16( %a, %b) #0 { ; CHECK-LABEL: sxtb_i16: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.h ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: sxtb z0.h, p0/m, z1.h ; CHECK-NEXT: ret %ret = sext %b to ret %ret } define @sxtb_i16_dupreg( %a) #0 { ; CHECK-LABEL: sxtb_i16_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.h ; CHECK-NEXT: sxtb z0.h, p0/m, z0.h ; CHECK-NEXT: ret %ret = sext %a to ret %ret } define @sxtb_i16_undef( %a, %b) #0 { ; CHECK-LABEL: sxtb_i16_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.h ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: sxtb z0.h, p0/m, z1.h ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv8i1(i32 31) %ret = tail call @llvm.aarch64.sve.sxtb.nxv8i16( undef, %pg, %b) ret %ret } define @sxtb_i16_active( %a, %b) #0 { ; CHECK-LABEL: sxtb_i16_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.h ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: sxtb z0.h, p0/m, z1.h ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv8i1(i32 31) %ret = tail call @llvm.aarch64.sve.sxtb.nxv8i16( %a, %pg, %b) ret %ret } define @sxtb_i16_not_active( %a, %b) #0 { ; CHECK-LABEL: sxtb_i16_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: sxtb z0.h, p0/m, z1.h ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %pg.to = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %pg) %pg.from = tail call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( %pg.to) %ret = tail call @llvm.aarch64.sve.sxtb.nxv8i16( %a, %pg.from, %b) ret %ret } define @sxtb_i32( %a, %b) #0 { ; CHECK-LABEL: sxtb_i32: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: sxtb z0.s, p0/m, z1.s ; CHECK-NEXT: ret %ret = sext %b to ret %ret } define @sxtb_i32_dupreg( %a) #0 { ; CHECK-LABEL: sxtb_i32_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: sxtb z0.s, p0/m, z0.s ; CHECK-NEXT: ret %ret = sext %a to ret %ret } define @sxtb_i32_undef( %a, %b) #0 { ; CHECK-LABEL: sxtb_i32_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: sxtb z0.s, p0/m, z1.s ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv4i1(i32 31) %ret = tail call @llvm.aarch64.sve.sxtb.nxv4i32( undef, %pg, %b) ret %ret } define @sxtb_i32_active( %a, %b) #0 { ; CHECK-LABEL: sxtb_i32_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: sxtb z0.s, p0/m, z1.s ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv4i1(i32 31) %ret = tail call @llvm.aarch64.sve.sxtb.nxv4i32( %a, %pg, %b) ret %ret } define @sxtb_i32_not_active( %a, %b) #0 { ; CHECK-LABEL: sxtb_i32_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: sxtb z0.s, p0/m, z1.s ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %pg.to = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %pg) %pg.from = tail call @llvm.aarch64.sve.convert.from.svbool.nxv4i1( %pg.to) %ret = tail call @llvm.aarch64.sve.sxtb.nxv4i32( %a, %pg.from, %b) ret %ret } define @sxtb_i64( %a, %b) #0 { ; CHECK-LABEL: sxtb_i64: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: sxtb z0.d, p0/m, z1.d ; CHECK-NEXT: ret %ret = sext %b to ret %ret } define @sxtb_i64_dupreg( %a) #0 { ; CHECK-LABEL: sxtb_i64_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: sxtb z0.d, p0/m, z0.d ; CHECK-NEXT: ret %ret = sext %a to ret %ret } define @sxtb_i64_undef( %a, %b) #0 { ; CHECK-LABEL: sxtb_i64_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: sxtb z0.d, p0/m, z1.d ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %ret = tail call @llvm.aarch64.sve.sxtb.nxv2i64( undef, %pg, %b) ret %ret } define @sxtb_i64_active( %a, %b) #0 { ; CHECK-LABEL: sxtb_i64_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: sxtb z0.d, p0/m, z1.d ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %ret = tail call @llvm.aarch64.sve.sxtb.nxv2i64( %a, %pg, %b) ret %ret } define @sxtb_i64_not_active( %a, %b, %pg) #0 { ; CHECK-LABEL: sxtb_i64_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: sxtb z0.d, p0/m, z1.d ; CHECK-NEXT: ret %ret = tail call @llvm.aarch64.sve.sxtb.nxv2i64( %a, %pg, %b) ret %ret } ; ; SXTH (sve_int_un_pred_arit_0_w) ; define @sxth_i32( %a, %b) #0 { ; CHECK-LABEL: sxth_i32: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: sxth z0.s, p0/m, z1.s ; CHECK-NEXT: ret %ret = sext %b to ret %ret } define @sxth_i32_dupreg( %a) #0 { ; CHECK-LABEL: sxth_i32_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: sxth z0.s, p0/m, z0.s ; CHECK-NEXT: ret %ret = sext %a to ret %ret } define @sxth_i32_undef( %a, %b) #0 { ; CHECK-LABEL: sxth_i32_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: sxth z0.s, p0/m, z1.s ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv4i1(i32 31) %ret = tail call @llvm.aarch64.sve.sxth.nxv4i32( undef, %pg, %b) ret %ret } define @sxth_i32_active( %a, %b) #0 { ; CHECK-LABEL: sxth_i32_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.s ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: sxth z0.s, p0/m, z1.s ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv4i1(i32 31) %ret = tail call @llvm.aarch64.sve.sxth.nxv4i32( %a, %pg, %b) ret %ret } define @sxth_i32_not_active( %a, %b) #0 { ; CHECK-LABEL: sxth_i32_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: sxth z0.s, p0/m, z1.s ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %pg.to = tail call @llvm.aarch64.sve.convert.to.svbool.nxv2i1( %pg) %pg.from = tail call @llvm.aarch64.sve.convert.from.svbool.nxv4i1( %pg.to) %ret = tail call @llvm.aarch64.sve.sxth.nxv4i32( %a, %pg.from, %b) ret %ret } define @sxth_i64( %a, %b) #0 { ; CHECK-LABEL: sxth_i64: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: sxth z0.d, p0/m, z1.d ; CHECK-NEXT: ret %ret = sext %b to ret %ret } define @sxth_i64_dupreg( %a) #0 { ; CHECK-LABEL: sxth_i64_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: sxth z0.d, p0/m, z0.d ; CHECK-NEXT: ret %ret = sext %a to ret %ret } define @sxth_i64_undef( %a, %b) #0 { ; CHECK-LABEL: sxth_i64_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: sxth z0.d, p0/m, z1.d ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %ret = tail call @llvm.aarch64.sve.sxth.nxv2i64( undef, %pg, %b) ret %ret } define @sxth_i64_active( %a, %b) #0 { ; CHECK-LABEL: sxth_i64_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: sxth z0.d, p0/m, z1.d ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %ret = tail call @llvm.aarch64.sve.sxth.nxv2i64( %a, %pg, %b) ret %ret } define @sxth_i64_not_active( %a, %b, %pg) #0 { ; CHECK-LABEL: sxth_i64_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: sxth z0.d, p0/m, z1.d ; CHECK-NEXT: ret %ret = tail call @llvm.aarch64.sve.sxth.nxv2i64( %a, %pg, %b) ret %ret } ; ; SXTW (sve_int_un_pred_arit_0_d) ; define @sxtw_i64( %a, %b) #0 { ; CHECK-LABEL: sxtw_i64: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: sxtw z0.d, p0/m, z1.d ; CHECK-NEXT: ret %ret = sext %b to ret %ret } define @sxtw_i64_dupreg( %a) #0 { ; CHECK-LABEL: sxtw_i64_dupreg: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: sxtw z0.d, p0/m, z0.d ; CHECK-NEXT: ret %ret = sext %a to ret %ret } define @sxtw_i64_undef( %a, %b) #0 { ; CHECK-LABEL: sxtw_i64_undef: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: sxtw z0.d, p0/m, z1.d ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %ret = tail call @llvm.aarch64.sve.sxtw.nxv2i64( undef, %pg, %b) ret %ret } define @sxtw_i64_active( %a, %b) #0 { ; CHECK-LABEL: sxtw_i64_active: ; CHECK: // %bb.0: ; CHECK-NEXT: ptrue p0.d ; CHECK-NEXT: movprfx z0, z1 ; CHECK-NEXT: sxtw z0.d, p0/m, z1.d ; CHECK-NEXT: ret %pg = tail call @llvm.aarch64.sve.ptrue.nxv2i1(i32 31) %ret = tail call @llvm.aarch64.sve.sxtw.nxv2i64( %a, %pg, %b) ret %ret } define @sxtw_i64_not_active( %a, %b, %pg) #0 { ; CHECK-LABEL: sxtw_i64_not_active: ; CHECK: // %bb.0: ; CHECK-NEXT: sxtw z0.d, p0/m, z1.d ; CHECK-NEXT: ret %ret = tail call @llvm.aarch64.sve.sxtw.nxv2i64( %a, %pg, %b) ret %ret } declare @llvm.aarch64.sve.ptrue.nxv16i1(i32) declare @llvm.aarch64.sve.ptrue.nxv8i1(i32) declare @llvm.aarch64.sve.ptrue.nxv4i1(i32) declare @llvm.aarch64.sve.ptrue.nxv2i1(i32) declare @llvm.aarch64.sve.convert.to.svbool.nxv8i1() declare @llvm.aarch64.sve.convert.to.svbool.nxv4i1() declare @llvm.aarch64.sve.convert.to.svbool.nxv2i1() declare @llvm.aarch64.sve.convert.from.svbool.nxv8i1() declare @llvm.aarch64.sve.convert.from.svbool.nxv4i1() declare @llvm.aarch64.sve.convert.from.svbool.nxv2i1() declare @llvm.aarch64.sve.abs.nxv16i8(, , ) declare @llvm.aarch64.sve.abs.nxv8i16(, , ) declare @llvm.aarch64.sve.abs.nxv4i32(, , ) declare @llvm.aarch64.sve.abs.nxv2i64(, , ) declare @llvm.abs.nxv16i8(, i1) declare @llvm.abs.nxv8i16(, i1) declare @llvm.abs.nxv4i32(, i1) declare @llvm.abs.nxv2i64(, i1) declare @llvm.aarch64.sve.cls.nxv16i8(, , ) declare @llvm.aarch64.sve.cls.nxv8i16(, , ) declare @llvm.aarch64.sve.cls.nxv4i32(, , ) declare @llvm.aarch64.sve.cls.nxv2i64(, , ) declare @llvm.aarch64.sve.fabs.nxv8f16(, , ) declare @llvm.aarch64.sve.fabs.nxv4f32(, , ) declare @llvm.aarch64.sve.fabs.nxv2f64(, , ) declare @llvm.fabs.nxv8f16() declare @llvm.fabs.nxv4f32() declare @llvm.fabs.nxv2f64() declare @llvm.aarch64.sve.fsqrt.nxv8f16(, , ) declare @llvm.aarch64.sve.fsqrt.nxv4f32(, , ) declare @llvm.aarch64.sve.fsqrt.nxv2f64(, , ) declare @llvm.sqrt.nxv8f16() declare @llvm.sqrt.nxv4f32() declare @llvm.sqrt.nxv2f64() declare @llvm.aarch64.sve.sxtb.nxv8i16(, , ) declare @llvm.aarch64.sve.sxtb.nxv4i32(, , ) declare @llvm.aarch64.sve.sxtb.nxv2i64(, , ) declare @llvm.aarch64.sve.sxth.nxv4i32(, , ) declare @llvm.aarch64.sve.sxth.nxv2i64(, , ) declare @llvm.aarch64.sve.sxtw.nxv2i64(, , ) attributes #0 = { nounwind "target-features"="+sve" }