; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=riscv64 -mattr=+zve32x \ ; RUN: -verify-machineinstrs < %s | FileCheck %s ; Make sure we don't select a 0 vl to X0 in the custom isel handlers we use ; for these intrinsics. declare {,} @llvm.riscv.vlseg2.nxv16i16(,, ptr, i64) declare {,} @llvm.riscv.vlseg2.mask.nxv16i16(,, ptr, , i64, i64) define @test_vlseg2_mask_nxv16i16(ptr %base, %mask) { ; CHECK-LABEL: test_vlseg2_mask_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, mu ; CHECK-NEXT: vlseg2e16.v v4, (a0) ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vlseg2e16.v v4, (a0), v0.t ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlseg2.nxv16i16( undef, undef, ptr %base, i64 0) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlseg2.mask.nxv16i16( %1, %1, ptr %base, %mask, i64 0, i64 1) %3 = extractvalue {,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv16i16(,, ptr, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv16i16(,, ptr, i64, , i64, i64) define @test_vlsseg2_mask_nxv16i16(ptr %base, i64 %offset, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, mu ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1, v0.t ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16i16( undef, undef, ptr %base, i64 %offset, i64 0) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv16i16( %1, %1, ptr %base, i64 %offset, %mask, i64 0, i64 1) %3 = extractvalue {,} %2, 1 ret %3 } declare {,} @llvm.riscv.vloxseg2.nxv16i16.nxv16i16(,, ptr, , i64) declare {,} @llvm.riscv.vloxseg2.mask.nxv16i16.nxv16i16(,, ptr, , , i64, i64) define @test_vloxseg2_mask_nxv16i16_nxv16i16(ptr %base, %index, %mask) { ; CHECK-LABEL: test_vloxseg2_mask_nxv16i16_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, mu ; CHECK-NEXT: vloxseg2ei16.v v12, (a0), v8 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vloxseg2ei16.v v12, (a0), v8, v0.t ; CHECK-NEXT: vmv4r.v v8, v16 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vloxseg2.nxv16i16.nxv16i16( undef, undef, ptr %base, %index, i64 0) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vloxseg2.mask.nxv16i16.nxv16i16( %1, %1, ptr %base, %index, %mask, i64 0, i64 1) %3 = extractvalue {,} %2, 1 ret %3 } declare {,} @llvm.riscv.vluxseg2.nxv16i16.nxv16i16(,, ptr, , i64) declare {,} @llvm.riscv.vluxseg2.mask.nxv16i16.nxv16i16(,, ptr, , , i64, i64) define @test_vluxseg2_mask_nxv16i16_nxv16i16(ptr %base, %index, %mask) { ; CHECK-LABEL: test_vluxseg2_mask_nxv16i16_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, mu ; CHECK-NEXT: vluxseg2ei16.v v12, (a0), v8 ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vluxseg2ei16.v v12, (a0), v8, v0.t ; CHECK-NEXT: vmv4r.v v8, v16 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vluxseg2.nxv16i16.nxv16i16( undef, undef, ptr %base, %index, i64 0) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vluxseg2.mask.nxv16i16.nxv16i16( %1, %1, ptr %base, %index, %mask, i64 0, i64 1) %3 = extractvalue {,} %2, 1 ret %3 } declare {,, i64} @llvm.riscv.vlseg2ff.nxv16i16(,, ptr , i64) declare {,, i64} @llvm.riscv.vlseg2ff.mask.nxv16i16(,, ptr, , i64, i64) define @test_vlseg2ff_nxv16i16(ptr %base, ptr %outvl) { ; CHECK-LABEL: test_vlseg2ff_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, ma ; CHECK-NEXT: vlseg2e16ff.v v4, (a0) ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a1) ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.nxv16i16( undef, undef, ptr %base, i64 0) %1 = extractvalue {,, i64} %0, 1 %2 = extractvalue {,, i64} %0, 2 store i64 %2, ptr %outvl ret %1 } define @test_vlseg2ff_mask_nxv16i16( %val, ptr %base, %mask, ptr %outvl) { ; CHECK-LABEL: test_vlseg2ff_mask_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv4r.v v4, v8 ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, mu ; CHECK-NEXT: vlseg2e16ff.v v4, (a0), v0.t ; CHECK-NEXT: csrr a0, vl ; CHECK-NEXT: sd a0, 0(a1) ; CHECK-NEXT: ret entry: %0 = tail call {,, i64} @llvm.riscv.vlseg2ff.mask.nxv16i16( %val, %val, ptr %base, %mask, i64 0, i64 1) %1 = extractvalue {,, i64} %0, 1 %2 = extractvalue {,, i64} %0, 2 store i64 %2, ptr %outvl ret %1 } declare void @llvm.riscv.vsseg2.nxv16i16(,, ptr , i64) declare void @llvm.riscv.vsseg2.mask.nxv16i16(,, ptr, , i64) define void @test_vsseg2_nxv16i16( %val, ptr %base) { ; CHECK-LABEL: test_vsseg2_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, ma ; CHECK-NEXT: vsseg2e16.v v8, (a0) ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsseg2.nxv16i16( %val, %val, ptr %base, i64 0) ret void } define void @test_vsseg2_mask_nxv16i16( %val, ptr %base, %mask) { ; CHECK-LABEL: test_vsseg2_mask_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, ma ; CHECK-NEXT: vsseg2e16.v v8, (a0), v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsseg2.mask.nxv16i16( %val, %val, ptr %base, %mask, i64 0) ret void } declare void @llvm.riscv.vssseg2.nxv16i16(,, ptr, i64, i64) declare void @llvm.riscv.vssseg2.mask.nxv16i16(,, ptr, i64, , i64) define void @test_vssseg2_nxv16i16( %val, ptr %base, i64 %offset) { ; CHECK-LABEL: test_vssseg2_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, ma ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vssseg2.nxv16i16( %val, %val, ptr %base, i64 %offset, i64 0) ret void } define void @test_vssseg2_mask_nxv16i16( %val, ptr %base, i64 %offset, %mask) { ; CHECK-LABEL: test_vssseg2_mask_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, ma ; CHECK-NEXT: vssseg2e16.v v8, (a0), a1, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vssseg2.mask.nxv16i16( %val, %val, ptr %base, i64 %offset, %mask, i64 0) ret void } declare void @llvm.riscv.vsoxseg2.nxv16i16.nxv16i16(,, ptr, , i64) declare void @llvm.riscv.vsoxseg2.mask.nxv16i16.nxv16i16(,, ptr, , , i64) define void @test_vsoxseg2_nxv16i16_nxv16i16( %val, ptr %base, %index) { ; CHECK-LABEL: test_vsoxseg2_nxv16i16_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, ma ; CHECK-NEXT: vsoxseg2ei16.v v8, (a0), v16 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg2.nxv16i16.nxv16i16( %val, %val, ptr %base, %index, i64 0) ret void } define void @test_vsoxseg2_mask_nxv16i16_nxv16i16( %val, ptr %base, %index, %mask) { ; CHECK-LABEL: test_vsoxseg2_mask_nxv16i16_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, ma ; CHECK-NEXT: vsoxseg2ei16.v v8, (a0), v16, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsoxseg2.mask.nxv16i16.nxv16i16( %val, %val, ptr %base, %index, %mask, i64 0) ret void } declare void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i16(,, ptr, , i64) declare void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i16(,, ptr, , , i64) define void @test_vsuxseg2_nxv16i16_nxv16i16( %val, ptr %base, %index) { ; CHECK-LABEL: test_vsuxseg2_nxv16i16_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, ma ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v16 ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i16( %val, %val, ptr %base, %index, i64 0) ret void } define void @test_vsuxseg2_mask_nxv16i16_nxv16i16( %val, ptr %base, %index, %mask) { ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i16_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vmv4r.v v16, v12 ; CHECK-NEXT: vmv4r.v v12, v8 ; CHECK-NEXT: vsetivli zero, 0, e16, m4, ta, ma ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v16, v0.t ; CHECK-NEXT: ret entry: tail call void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i16( %val, %val, ptr %base, %index, %mask, i64 0) ret void }