; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=aarch64-linux-gnu -mattr=+sve2 < %s | FileCheck %s ; ; STNT1B, STNT1W, STNT1H, STNT1D: vector base + scalar offset ; stnt1b { z0.s }, p0/z, [z0.s, x0] ; ; STNT1B define void @stnt1b_s( %data, %pg, %base, i64 %offset) { ; CHECK-LABEL: stnt1b_s: ; CHECK: // %bb.0: ; CHECK-NEXT: stnt1b { z0.s }, p0, [z1.s, x0] ; CHECK-NEXT: ret %data_trunc = trunc %data to call void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv4i8.nxv4i32( %data_trunc, %pg, %base, i64 %offset) ret void } define void @stnt1b_d( %data, %pg, %base, i64 %offset) { ; CHECK-LABEL: stnt1b_d: ; CHECK: // %bb.0: ; CHECK-NEXT: stnt1b { z0.d }, p0, [z1.d, x0] ; CHECK-NEXT: ret %data_trunc = trunc %data to call void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv2i8.nxv2i64( %data_trunc, %pg, %base, i64 %offset) ret void } ; STNT1H define void @stnt1h_s( %data, %pg, %base, i64 %offset) { ; CHECK-LABEL: stnt1h_s: ; CHECK: // %bb.0: ; CHECK-NEXT: stnt1h { z0.s }, p0, [z1.s, x0] ; CHECK-NEXT: ret %data_trunc = trunc %data to call void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv4i16.nxv4i32( %data_trunc, %pg, %base, i64 %offset) ret void } define void @stnt1h_d( %data, %pg, %base, i64 %offset) { ; CHECK-LABEL: stnt1h_d: ; CHECK: // %bb.0: ; CHECK-NEXT: stnt1h { z0.d }, p0, [z1.d, x0] ; CHECK-NEXT: ret %data_trunc = trunc %data to call void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv2i16.nxv2i64( %data_trunc, %pg, %base, i64 %offset) ret void } ; STNT1W define void @stnt1w_s( %data, %pg, %base, i64 %offset) { ; CHECK-LABEL: stnt1w_s: ; CHECK: // %bb.0: ; CHECK-NEXT: stnt1w { z0.s }, p0, [z1.s, x0] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv4i32.nxv4i32( %data, %pg, %base, i64 %offset) ret void } define void @stnt1w_f32_s( %data, %pg, %base, i64 %offset) { ; CHECK-LABEL: stnt1w_f32_s: ; CHECK: // %bb.0: ; CHECK-NEXT: stnt1w { z0.s }, p0, [z1.s, x0] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv4f32.nxv4i32( %data, %pg, %base, i64 %offset) ret void } define void @stnt1w_d( %data, %pg, %base, i64 %offset) { ; CHECK-LABEL: stnt1w_d: ; CHECK: // %bb.0: ; CHECK-NEXT: stnt1w { z0.d }, p0, [z1.d, x0] ; CHECK-NEXT: ret %data_trunc = trunc %data to call void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv2i32.nxv2i64( %data_trunc, %pg, %base, i64 %offset) ret void } ; STNT1D define void @stnt1d_d( %data, %pg, %base, i64 %offset) { ; CHECK-LABEL: stnt1d_d: ; CHECK: // %bb.0: ; CHECK-NEXT: stnt1d { z0.d }, p0, [z1.d, x0] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv2i64.nxv2i64( %data, %pg, %base, i64 %offset) ret void } define void @stnt1d_f64_d( %data, %pg, %base, i64 %offset) { ; CHECK-LABEL: stnt1d_f64_d: ; CHECK: // %bb.0: ; CHECK-NEXT: stnt1d { z0.d }, p0, [z1.d, x0] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv2f64.nxv2i64( %data, %pg, %base, i64 %offset) ret void } ; STNT1B declare void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv2i8.nxv2i64(, , , i64) declare void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv4i8.nxv4i32(, , , i64) ; STNT1H declare void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv2i16.nxv2i64(, , , i64) declare void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv4i16.nxv4i32(, , , i64) ; STNT1W declare void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv2i32.nxv2i64(, , , i64) declare void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv4i32.nxv4i32(, , , i64) declare void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv4f32.nxv4i32(, , , i64) ; STNT1D declare void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv2i64.nxv2i64(, , , i64) declare void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv2f32.nxv2i64(, , , i64) declare void @llvm.aarch64.sve.stnt1.scatter.scalar.offset.nxv2f64.nxv2i64(, , , i64)