; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=aarch64--linux-gnu -mattr=sve < %s | FileCheck %s ; RUN: llc -mtriple=aarch64--linux-gnu -mattr=sme < %s | FileCheck %s ; ld2b define { , } @ld2.nxv32i8( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld2.nxv32i8: ; CHECK: // %bb.0: ; CHECK-NEXT: ld2b { z0.b, z1.b }, p0/z, [x0, x1] ; CHECK-NEXT: ret %addr2 = getelementptr i8, ptr %addr, i64 %a %res = call { , } @llvm.aarch64.sve.ld2.sret.nxv16i8( %Pg, ptr %addr2) ret { , } %res } ; ld2h define { , } @ld2.nxv16i16( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld2.nxv16i16: ; CHECK: // %bb.0: ; CHECK-NEXT: ld2h { z0.h, z1.h }, p0/z, [x0, x1, lsl #1] ; CHECK-NEXT: ret %addr2 = getelementptr i16, ptr %addr, i64 %a %res = call { , } @llvm.aarch64.sve.ld2.sret.nxv8i16( %Pg, ptr %addr2) ret { , } %res } define { , } @ld2.nxv16f16( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld2.nxv16f16: ; CHECK: // %bb.0: ; CHECK-NEXT: ld2h { z0.h, z1.h }, p0/z, [x0, x1, lsl #1] ; CHECK-NEXT: ret %addr2 = getelementptr half, ptr %addr, i64 %a %res = call { , } @llvm.aarch64.sve.ld2.sret.nxv8f16( %Pg, ptr %addr2) ret { , } %res } define { , } @ld2.nxv16bf16( %Pg, ptr %addr, i64 %a) #0 { ; CHECK-LABEL: ld2.nxv16bf16: ; CHECK: // %bb.0: ; CHECK-NEXT: ld2h { z0.h, z1.h }, p0/z, [x0, x1, lsl #1] ; CHECK-NEXT: ret %addr2 = getelementptr bfloat, ptr %addr, i64 %a %res = call { , } @llvm.aarch64.sve.ld2.sret.nxv8bf16( %Pg, ptr %addr2) ret { , } %res } ; ld2w define { , } @ld2.nxv8i32( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld2.nxv8i32: ; CHECK: // %bb.0: ; CHECK-NEXT: ld2w { z0.s, z1.s }, p0/z, [x0, x1, lsl #2] ; CHECK-NEXT: ret %addr2 = getelementptr i32, ptr %addr, i64 %a %res = call { , } @llvm.aarch64.sve.ld2.sret.nxv4i32( %Pg, ptr %addr2) ret { , } %res } define { , } @ld2.nxv8f32( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld2.nxv8f32: ; CHECK: // %bb.0: ; CHECK-NEXT: ld2w { z0.s, z1.s }, p0/z, [x0, x1, lsl #2] ; CHECK-NEXT: ret %addr2 = getelementptr float, ptr %addr, i64 %a %res = call { , } @llvm.aarch64.sve.ld2.sret.nxv4f32( %Pg, ptr %addr2) ret { , } %res } ; ld2d define { , } @ld2.nxv4i64( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld2.nxv4i64: ; CHECK: // %bb.0: ; CHECK-NEXT: ld2d { z0.d, z1.d }, p0/z, [x0, x1, lsl #3] ; CHECK-NEXT: ret %addr2 = getelementptr i64, ptr %addr, i64 %a %res = call { , } @llvm.aarch64.sve.ld2.sret.nxv2i64( %Pg, ptr %addr2) ret { , } %res } define { , } @ld2.nxv4f64( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld2.nxv4f64: ; CHECK: // %bb.0: ; CHECK-NEXT: ld2d { z0.d, z1.d }, p0/z, [x0, x1, lsl #3] ; CHECK-NEXT: ret %addr2 = getelementptr double, ptr %addr, i64 %a %res = call { , } @llvm.aarch64.sve.ld2.sret.nxv2f64( %Pg, ptr %addr2) ret { , } %res } ; ld3b define { , , } @ld3.nxv48i8( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld3.nxv48i8: ; CHECK: // %bb.0: ; CHECK-NEXT: ld3b { z0.b - z2.b }, p0/z, [x0, x1] ; CHECK-NEXT: ret %addr2 = getelementptr i8, ptr %addr, i64 %a %res = call { , , } @llvm.aarch64.sve.ld3.sret.nxv16i8( %Pg, ptr %addr2) ret { , , } %res } ; ld3h define { , , } @ld3.nxv24i16( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld3.nxv24i16: ; CHECK: // %bb.0: ; CHECK-NEXT: ld3h { z0.h - z2.h }, p0/z, [x0, x1, lsl #1] ; CHECK-NEXT: ret %addr2 = getelementptr i16, ptr %addr, i64 %a %res = call { , , } @llvm.aarch64.sve.ld3.sret.nxv8i16( %Pg, ptr %addr2) ret { , , } %res } define { , , } @ld3.nxv24f16( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld3.nxv24f16: ; CHECK: // %bb.0: ; CHECK-NEXT: ld3h { z0.h - z2.h }, p0/z, [x0, x1, lsl #1] ; CHECK-NEXT: ret %addr2 = getelementptr half, ptr %addr, i64 %a %res = call { , , } @llvm.aarch64.sve.ld3.sret.nxv8f16( %Pg, ptr %addr2) ret { , , } %res } define { , , } @ld3.nxv24bf16( %Pg, ptr %addr, i64 %a) #0 { ; CHECK-LABEL: ld3.nxv24bf16: ; CHECK: // %bb.0: ; CHECK-NEXT: ld3h { z0.h - z2.h }, p0/z, [x0, x1, lsl #1] ; CHECK-NEXT: ret %addr2 = getelementptr bfloat, ptr %addr, i64 %a %res = call { , , } @llvm.aarch64.sve.ld3.sret.nxv8bf16( %Pg, ptr %addr2) ret { , , } %res } ; ld3w define { , , } @ld3.nxv12i32( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld3.nxv12i32: ; CHECK: // %bb.0: ; CHECK-NEXT: ld3w { z0.s - z2.s }, p0/z, [x0, x1, lsl #2] ; CHECK-NEXT: ret %addr2 = getelementptr i32, ptr %addr, i64 %a %res = call { , , } @llvm.aarch64.sve.ld3.sret.nxv4i32( %Pg, ptr %addr2) ret { , , } %res } define { , , } @ld3.nxv12f32( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld3.nxv12f32: ; CHECK: // %bb.0: ; CHECK-NEXT: ld3w { z0.s - z2.s }, p0/z, [x0, x1, lsl #2] ; CHECK-NEXT: ret %addr2 = getelementptr float, ptr %addr, i64 %a %res = call { , , } @llvm.aarch64.sve.ld3.sret.nxv4f32( %Pg, ptr %addr2) ret { , , } %res } ; ld3d define { , , } @ld3.nxv6i64( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld3.nxv6i64: ; CHECK: // %bb.0: ; CHECK-NEXT: ld3d { z0.d - z2.d }, p0/z, [x0, x1, lsl #3] ; CHECK-NEXT: ret %addr2 = getelementptr i64, ptr %addr, i64 %a %res = call { , , } @llvm.aarch64.sve.ld3.sret.nxv2i64( %Pg, ptr %addr2) ret { , , } %res } define { , , } @ld3.nxv6f64( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld3.nxv6f64: ; CHECK: // %bb.0: ; CHECK-NEXT: ld3d { z0.d - z2.d }, p0/z, [x0, x1, lsl #3] ; CHECK-NEXT: ret %addr2 = getelementptr double, ptr %addr, i64 %a %res = call { , , } @llvm.aarch64.sve.ld3.sret.nxv2f64( %Pg, ptr %addr2) ret { , , } %res } ; ld4b define { , , , } @ld4.nxv64i8( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld4.nxv64i8: ; CHECK: // %bb.0: ; CHECK-NEXT: ld4b { z0.b - z3.b }, p0/z, [x0, x1] ; CHECK-NEXT: ret %addr2 = getelementptr i8, ptr %addr, i64 %a %res = call { , , , } @llvm.aarch64.sve.ld4.sret.nxv16i8( %Pg, ptr %addr2) ret { , , , } %res } ; ld4h define { , , , } @ld4.nxv32i16( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld4.nxv32i16: ; CHECK: // %bb.0: ; CHECK-NEXT: ld4h { z0.h - z3.h }, p0/z, [x0, x1, lsl #1] ; CHECK-NEXT: ret %addr2 = getelementptr i16, ptr %addr, i64 %a %res = call { , , , } @llvm.aarch64.sve.ld4.sret.nxv8i16( %Pg, ptr %addr2) ret { , , , } %res } define { , , , } @ld4.nxv32f16( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld4.nxv32f16: ; CHECK: // %bb.0: ; CHECK-NEXT: ld4h { z0.h - z3.h }, p0/z, [x0, x1, lsl #1] ; CHECK-NEXT: ret %addr2 = getelementptr half, ptr %addr, i64 %a %res = call { , , , } @llvm.aarch64.sve.ld4.sret.nxv8f16( %Pg, ptr %addr2) ret { , , , } %res } define { , , , } @ld4.nxv32bf16( %Pg, ptr %addr, i64 %a) #0 { ; CHECK-LABEL: ld4.nxv32bf16: ; CHECK: // %bb.0: ; CHECK-NEXT: ld4h { z0.h - z3.h }, p0/z, [x0, x1, lsl #1] ; CHECK-NEXT: ret %addr2 = getelementptr bfloat, ptr %addr, i64 %a %res = call { , , , } @llvm.aarch64.sve.ld4.sret.nxv8bf16( %Pg, ptr %addr2) ret { , , , } %res } ; ld4w define { , , , } @ld4.nxv16i32( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld4.nxv16i32: ; CHECK: // %bb.0: ; CHECK-NEXT: ld4w { z0.s - z3.s }, p0/z, [x0, x1, lsl #2] ; CHECK-NEXT: ret %addr2 = getelementptr i32, ptr %addr, i64 %a %res = call { , , , } @llvm.aarch64.sve.ld4.sret.nxv4i32( %Pg, ptr %addr2) ret { , , , } %res } define { , , , } @ld4.nxv16f32( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld4.nxv16f32: ; CHECK: // %bb.0: ; CHECK-NEXT: ld4w { z0.s - z3.s }, p0/z, [x0, x1, lsl #2] ; CHECK-NEXT: ret %addr2 = getelementptr float, ptr %addr, i64 %a %res = call { , , , } @llvm.aarch64.sve.ld4.sret.nxv4f32( %Pg, ptr %addr2) ret { , , , } %res } ; ld4d define { , , , } @ld4.nxv8i64( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld4.nxv8i64: ; CHECK: // %bb.0: ; CHECK-NEXT: ld4d { z0.d - z3.d }, p0/z, [x0, x1, lsl #3] ; CHECK-NEXT: ret %addr2 = getelementptr i64, ptr %addr, i64 %a %res = call { , , , } @llvm.aarch64.sve.ld4.sret.nxv2i64( %Pg, ptr %addr2) ret { , , , } %res } define { , , , } @ld4.nxv8f64( %Pg, ptr %addr, i64 %a) { ; CHECK-LABEL: ld4.nxv8f64: ; CHECK: // %bb.0: ; CHECK-NEXT: ld4d { z0.d - z3.d }, p0/z, [x0, x1, lsl #3] ; CHECK-NEXT: ret %addr2 = getelementptr double, ptr %addr, i64 %a %res = call { , , , } @llvm.aarch64.sve.ld4.sret.nxv2f64( %Pg, ptr %addr2) ret { , , , } %res } declare { , } @llvm.aarch64.sve.ld2.sret.nxv16i8(, ptr) declare { , } @llvm.aarch64.sve.ld2.sret.nxv8i16(, ptr) declare { , } @llvm.aarch64.sve.ld2.sret.nxv4i32(, ptr) declare { , } @llvm.aarch64.sve.ld2.sret.nxv2i64(, ptr) declare { , } @llvm.aarch64.sve.ld2.sret.nxv8f16(, ptr) declare { , } @llvm.aarch64.sve.ld2.sret.nxv8bf16(, ptr) declare { , } @llvm.aarch64.sve.ld2.sret.nxv4f32(, ptr) declare { , } @llvm.aarch64.sve.ld2.sret.nxv2f64(, ptr) declare { , , } @llvm.aarch64.sve.ld3.sret.nxv16i8(, ptr) declare { , , } @llvm.aarch64.sve.ld3.sret.nxv8i16(, ptr) declare { , , } @llvm.aarch64.sve.ld3.sret.nxv4i32(, ptr) declare { , , } @llvm.aarch64.sve.ld3.sret.nxv2i64(, ptr) declare { , , } @llvm.aarch64.sve.ld3.sret.nxv8f16(, ptr) declare { , , } @llvm.aarch64.sve.ld3.sret.nxv8bf16(, ptr) declare { , , } @llvm.aarch64.sve.ld3.sret.nxv4f32(, ptr) declare { , , } @llvm.aarch64.sve.ld3.sret.nxv2f64(, ptr) declare { , , , } @llvm.aarch64.sve.ld4.sret.nxv16i8(, ptr) declare { , , , } @llvm.aarch64.sve.ld4.sret.nxv8i16(, ptr) declare { , , , } @llvm.aarch64.sve.ld4.sret.nxv4i32(, ptr) declare { , , , } @llvm.aarch64.sve.ld4.sret.nxv2i64(, ptr) declare { , , , } @llvm.aarch64.sve.ld4.sret.nxv8f16(, ptr) declare { , , , } @llvm.aarch64.sve.ld4.sret.nxv8bf16(, ptr) declare { , , , } @llvm.aarch64.sve.ld4.sret.nxv4f32(, ptr) declare { , , , } @llvm.aarch64.sve.ld4.sret.nxv2f64(, ptr) ; +bf16 is required for the bfloat version. attributes #0 = { "target-features"="+bf16" }