; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=aarch64--linux-gnu -mattr=+sve < %s | FileCheck %s ; PRFB , , [.S{, #}] -> 32-bit element define void @llvm_aarch64_sve_prfb_gather_scalar_offset_nx4vi32( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfb_gather_scalar_offset_nx4vi32: ; CHECK: // %bb.0: ; CHECK-NEXT: prfb pldl1strm, p0, [z0.s, #7] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfb.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 7, i32 1) ret void } ; PRFB , , [.D{, #}] -> 64-bit element define void @llvm_aarch64_sve_prfb_gather_scalar_offset_nx2vi64( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfb_gather_scalar_offset_nx2vi64: ; CHECK: // %bb.0: ; CHECK-NEXT: prfb pldl1strm, p0, [z0.d, #7] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfb.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 7, i32 1) ret void } ; PRFH , , [.S{, #}] -> 32-bit element define void @llvm_aarch64_sve_prfh_gather_scalar_offset_nx4vi32( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfh_gather_scalar_offset_nx4vi32: ; CHECK: // %bb.0: ; CHECK-NEXT: prfh pldl1strm, p0, [z0.s, #6] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfh.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 6, i32 1) ret void } ; PRFH , , [.D{, #}] -> 64-bit element define void @llvm_aarch64_sve_prfh_gather_scalar_offset_nx2vi64( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfh_gather_scalar_offset_nx2vi64: ; CHECK: // %bb.0: ; CHECK-NEXT: prfh pldl1strm, p0, [z0.d, #6] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfh.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 6, i32 1) ret void } ; PRFW , , [.S{, #}] -> 32-bit element define void @llvm_aarch64_sve_prfw_gather_scalar_offset_nx4vi32( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfw_gather_scalar_offset_nx4vi32: ; CHECK: // %bb.0: ; CHECK-NEXT: prfw pldl1strm, p0, [z0.s, #12] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfw.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 12, i32 1) ret void } ; PRFW , , [.D{, #}] -> 64-bit element define void @llvm_aarch64_sve_prfw_gather_scalar_offset_nx2vi64( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfw_gather_scalar_offset_nx2vi64: ; CHECK: // %bb.0: ; CHECK-NEXT: prfw pldl1strm, p0, [z0.d, #12] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfw.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 12, i32 1) ret void } ; PRFD , , [.S{, #}] -> 32-bit element define void @llvm_aarch64_sve_prfd_gather_scalar_offset_nx4vi32( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfd_gather_scalar_offset_nx4vi32: ; CHECK: // %bb.0: ; CHECK-NEXT: prfd pldl1strm, p0, [z0.s, #16] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfd.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 16, i32 1) ret void } ; PRFD , , [.D{, #}] -> 64-bit element define void @llvm_aarch64_sve_prfd_gather_scalar_offset_nx2vi64( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfd_gather_scalar_offset_nx2vi64: ; CHECK: // %bb.0: ; CHECK-NEXT: prfd pldl1strm, p0, [z0.d, #16] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfd.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 16, i32 1) ret void } declare void @llvm.aarch64.sve.prfb.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 %offset, i32 %prfop) declare void @llvm.aarch64.sve.prfb.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 %offset, i32 %prfop) declare void @llvm.aarch64.sve.prfh.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 %offset, i32 %prfop) declare void @llvm.aarch64.sve.prfh.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 %offset, i32 %prfop) declare void @llvm.aarch64.sve.prfw.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 %offset, i32 %prfop) declare void @llvm.aarch64.sve.prfw.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 %offset, i32 %prfop) declare void @llvm.aarch64.sve.prfd.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 %offset, i32 %prfop) declare void @llvm.aarch64.sve.prfd.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 %offset, i32 %prfop)