; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=riscv64 -mattr=+v -verify-machineinstrs < %s | FileCheck %s declare @llvm.riscv.vloxei.nxv4i32.nxv4i64( , *, , i64); define @test_vloxei(* %ptr, %offset, i64 %vl) { ; CHECK-LABEL: test_vloxei: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, zero, e64, m4, ta, ma ; CHECK-NEXT: vzext.vf8 v12, v8 ; CHECK-NEXT: vsll.vi v12, v12, 4 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma ; CHECK-NEXT: vloxei64.v v8, (a0), v12 ; CHECK-NEXT: ret entry: %offset.ext = zext %offset to %shamt = insertelement undef, i64 4, i32 0 %shamt.vec = shufflevector %shamt, poison, zeroinitializer %shl = shl %offset.ext, %shamt.vec %res = call @llvm.riscv.vloxei.nxv4i32.nxv4i64( undef, * %ptr, %shl, i64 %vl) ret %res } define @test_vloxei2(* %ptr, %offset, i64 %vl) { ; CHECK-LABEL: test_vloxei2: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, zero, e64, m4, ta, ma ; CHECK-NEXT: vzext.vf8 v12, v8 ; CHECK-NEXT: vsll.vi v12, v12, 14 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma ; CHECK-NEXT: vloxei64.v v8, (a0), v12 ; CHECK-NEXT: ret entry: %offset.ext = zext %offset to %shamt = insertelement undef, i64 14, i32 0 %shamt.vec = shufflevector %shamt, poison, zeroinitializer %shl = shl %offset.ext, %shamt.vec %res = call @llvm.riscv.vloxei.nxv4i32.nxv4i64( undef, * %ptr, %shl, i64 %vl) ret %res } define @test_vloxei3(* %ptr, %offset, i64 %vl) { ; CHECK-LABEL: test_vloxei3: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, zero, e64, m4, ta, ma ; CHECK-NEXT: vzext.vf8 v12, v8 ; CHECK-NEXT: vsll.vi v12, v12, 26 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma ; CHECK-NEXT: vloxei64.v v8, (a0), v12 ; CHECK-NEXT: ret entry: %offset.ext = zext %offset to %shamt = insertelement undef, i64 26, i32 0 %shamt.vec = shufflevector %shamt, poison, zeroinitializer %shl = shl %offset.ext, %shamt.vec %res = call @llvm.riscv.vloxei.nxv4i32.nxv4i64( undef, * %ptr, %shl, i64 %vl) ret %res } ; Test use vp.zext to extend. declare @llvm.vp.zext.nxvi64.nxv1i8(, , i32) define @test_vloxei4(* %ptr, %offset, %m, i32 zeroext %vl) { ; CHECK-LABEL: test_vloxei4: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, ma ; CHECK-NEXT: vzext.vf8 v12, v8, v0.t ; CHECK-NEXT: vsetvli a2, zero, e64, m4, ta, ma ; CHECK-NEXT: vsll.vi v12, v12, 4 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma ; CHECK-NEXT: vloxei64.v v8, (a0), v12 ; CHECK-NEXT: ret entry: %offset.ext = call @llvm.vp.zext.nxvi64.nxv1i8( %offset, %m, i32 %vl) %shamt = insertelement undef, i64 4, i32 0 %shamt.vec = shufflevector %shamt, poison, zeroinitializer %shl = shl %offset.ext, %shamt.vec %vl.i64 = zext i32 %vl to i64 %res = call @llvm.riscv.vloxei.nxv4i32.nxv4i64( undef, * %ptr, %shl, i64 %vl.i64) ret %res } ; Test orignal extnened type is enough narrow. declare @llvm.riscv.vloxei.nxv4i32.nxv4i16( , *, , i64); define @test_vloxei5(* %ptr, %offset, i64 %vl) { ; CHECK-LABEL: test_vloxei5: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, zero, e16, m1, ta, ma ; CHECK-NEXT: vzext.vf2 v9, v8 ; CHECK-NEXT: vsll.vi v10, v9, 12 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma ; CHECK-NEXT: vloxei16.v v8, (a0), v10 ; CHECK-NEXT: ret entry: %offset.ext = zext %offset to %shamt = insertelement undef, i16 12, i32 0 %shamt.vec = shufflevector %shamt, poison, zeroinitializer %shl = shl %offset.ext, %shamt.vec %res = call @llvm.riscv.vloxei.nxv4i32.nxv4i16( undef, * %ptr, %shl, i64 %vl) ret %res } define @test_vloxei6(* %ptr, %offset, i64 %vl) { ; CHECK-LABEL: test_vloxei6: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: li a2, 127 ; CHECK-NEXT: vsetvli a3, zero, e8, mf2, ta, ma ; CHECK-NEXT: vand.vx v8, v8, a2 ; CHECK-NEXT: vsetvli zero, zero, e64, m4, ta, ma ; CHECK-NEXT: vzext.vf8 v12, v8 ; CHECK-NEXT: vsll.vi v12, v12, 4 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma ; CHECK-NEXT: vloxei64.v v8, (a0), v12 ; CHECK-NEXT: ret entry: %offset.ext = zext %offset to %shamt = insertelement undef, i64 4, i32 0 %shamt.vec = shufflevector %shamt, poison, zeroinitializer %shl = shl %offset.ext, %shamt.vec %res = call @llvm.riscv.vloxei.nxv4i32.nxv4i64( undef, * %ptr, %shl, i64 %vl) ret %res } define @test_vloxei7(* %ptr, %offset, i64 %vl) { ; CHECK-LABEL: test_vloxei7: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, zero, e64, m4, ta, ma ; CHECK-NEXT: vmv.v.i v8, 0 ; CHECK-NEXT: vmerge.vim v8, v8, 1, v0 ; CHECK-NEXT: vsll.vi v12, v8, 2 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma ; CHECK-NEXT: vloxei64.v v8, (a0), v12 ; CHECK-NEXT: ret entry: %offset.ext = zext %offset to %shamt = insertelement undef, i64 2, i32 0 %shamt.vec = shufflevector %shamt, poison, zeroinitializer %shl = shl %offset.ext, %shamt.vec %res = call @llvm.riscv.vloxei.nxv4i32.nxv4i64( undef, * %ptr, %shl, i64 %vl) ret %res } declare @llvm.riscv.vloxei.mask.nxv4i32.nxv4i64( , *, , , i64, i64); define @test_vloxei_mask(* %ptr, %offset, %m, i64 %vl) { ; CHECK-LABEL: test_vloxei_mask: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, zero, e64, m4, ta, ma ; CHECK-NEXT: vzext.vf8 v12, v8 ; CHECK-NEXT: vsll.vi v12, v12, 4 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma ; CHECK-NEXT: vloxei64.v v8, (a0), v12, v0.t ; CHECK-NEXT: ret entry: %offset.ext = zext %offset to %shamt = insertelement undef, i64 4, i32 0 %shamt.vec = shufflevector %shamt, poison, zeroinitializer %shl = shl %offset.ext, %shamt.vec %res = call @llvm.riscv.vloxei.mask.nxv4i32.nxv4i64( undef, * %ptr, %shl, %m, i64 %vl, i64 1) ret %res } declare @llvm.riscv.vluxei.nxv4i32.nxv4i64( , *, , i64); define @test_vluxei(* %ptr, %offset, i64 %vl) { ; CHECK-LABEL: test_vluxei: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, zero, e64, m4, ta, ma ; CHECK-NEXT: vzext.vf8 v12, v8 ; CHECK-NEXT: vsll.vi v12, v12, 4 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma ; CHECK-NEXT: vluxei64.v v8, (a0), v12 ; CHECK-NEXT: ret entry: %offset.ext = zext %offset to %shamt = insertelement undef, i64 4, i32 0 %shamt.vec = shufflevector %shamt, poison, zeroinitializer %shl = shl %offset.ext, %shamt.vec %res = call @llvm.riscv.vluxei.nxv4i32.nxv4i64( undef, * %ptr, %shl, i64 %vl) ret %res } declare @llvm.riscv.vluxei.mask.nxv4i32.nxv4i64( , *, , , i64, i64); define @test_vluxei_mask(* %ptr, %offset, %m, i64 %vl) { ; CHECK-LABEL: test_vluxei_mask: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, zero, e64, m4, ta, ma ; CHECK-NEXT: vzext.vf8 v12, v8 ; CHECK-NEXT: vsll.vi v12, v12, 4 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma ; CHECK-NEXT: vluxei64.v v8, (a0), v12, v0.t ; CHECK-NEXT: ret entry: %offset.ext = zext %offset to %shamt = insertelement undef, i64 4, i32 0 %shamt.vec = shufflevector %shamt, poison, zeroinitializer %shl = shl %offset.ext, %shamt.vec %res = call @llvm.riscv.vluxei.mask.nxv4i32.nxv4i64( undef, * %ptr, %shl, %m, i64 %vl, i64 1) ret %res } declare void @llvm.riscv.vsoxei.nxv4i32.nxv4i64( , *, , i64); define void @test_vsoxei( %val, * %ptr, %offset, i64 %vl) { ; CHECK-LABEL: test_vsoxei: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, zero, e64, m4, ta, ma ; CHECK-NEXT: vzext.vf8 v12, v10 ; CHECK-NEXT: vsll.vi v12, v12, 4 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma ; CHECK-NEXT: vsoxei64.v v8, (a0), v12 ; CHECK-NEXT: ret entry: %offset.ext = zext %offset to %shamt = insertelement undef, i64 4, i32 0 %shamt.vec = shufflevector %shamt, poison, zeroinitializer %shl = shl %offset.ext, %shamt.vec call void @llvm.riscv.vsoxei.nxv4i32.nxv4i64( %val, * %ptr, %shl, i64 %vl) ret void } declare void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i64( , *, , , i64); define void @test_vsoxei_mask( %val, * %ptr, %offset, %m, i64 %vl) { ; CHECK-LABEL: test_vsoxei_mask: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, zero, e64, m4, ta, ma ; CHECK-NEXT: vzext.vf8 v12, v10 ; CHECK-NEXT: vsll.vi v12, v12, 4 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma ; CHECK-NEXT: vsoxei64.v v8, (a0), v12, v0.t ; CHECK-NEXT: ret entry: %offset.ext = zext %offset to %shamt = insertelement undef, i64 4, i32 0 %shamt.vec = shufflevector %shamt, poison, zeroinitializer %shl = shl %offset.ext, %shamt.vec call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i64( %val, * %ptr, %shl, %m, i64 %vl) ret void } declare void @llvm.riscv.vsuxei.nxv4i32.nxv4i64( , *, , i64); define void @test_vsuxei( %val, * %ptr, %offset, i64 %vl) { ; CHECK-LABEL: test_vsuxei: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, zero, e64, m4, ta, ma ; CHECK-NEXT: vzext.vf8 v12, v10 ; CHECK-NEXT: vsll.vi v12, v12, 4 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma ; CHECK-NEXT: vsuxei64.v v8, (a0), v12 ; CHECK-NEXT: ret entry: %offset.ext = zext %offset to %shamt = insertelement undef, i64 4, i32 0 %shamt.vec = shufflevector %shamt, poison, zeroinitializer %shl = shl %offset.ext, %shamt.vec call void @llvm.riscv.vsuxei.nxv4i32.nxv4i64( %val, * %ptr, %shl, i64 %vl) ret void } declare void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i64( , *, , , i64); define void @test_vsuxei_mask( %val, * %ptr, %offset, %m, i64 %vl) { ; CHECK-LABEL: test_vsuxei_mask: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, zero, e64, m4, ta, ma ; CHECK-NEXT: vzext.vf8 v12, v10 ; CHECK-NEXT: vsll.vi v12, v12, 4 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma ; CHECK-NEXT: vsuxei64.v v8, (a0), v12, v0.t ; CHECK-NEXT: ret entry: %offset.ext = zext %offset to %shamt = insertelement undef, i64 4, i32 0 %shamt.vec = shufflevector %shamt, poison, zeroinitializer %shl = shl %offset.ext, %shamt.vec call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i64( %val, * %ptr, %shl, %m, i64 %vl) ret void }