; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=aarch64-linux-gnu -mattr=+sme2 -verify-machineinstrs < %s | FileCheck %s ; ; FCVT ; define @multi_vector_cvt_x2_f16( %unused, %zn1, %zn2) { ; CHECK-LABEL: multi_vector_cvt_x2_f16: ; CHECK: // %bb.0: ; CHECK-NEXT: mov z3.d, z2.d ; CHECK-NEXT: mov z2.d, z1.d ; CHECK-NEXT: fcvt z0.h, { z2.s, z3.s } ; CHECK-NEXT: ret %res = call @llvm.aarch64.sve.fcvt.x2.nxv4f32( %zn1, %zn2) ret %res } ; ; BFCVT ; define @multi_vector_cvt_x2_bf16( %unused, %zn1, %zn2) { ; CHECK-LABEL: multi_vector_cvt_x2_bf16: ; CHECK: // %bb.0: ; CHECK-NEXT: mov z3.d, z2.d ; CHECK-NEXT: mov z2.d, z1.d ; CHECK-NEXT: bfcvt z0.h, { z2.s, z3.s } ; CHECK-NEXT: ret %res = call @llvm.aarch64.sve.bfcvt.x2( %zn1, %zn2) ret %res } ; ; FCVTZS ; define {, } @multi_vector_cvt_x2_s32_f32( %unused, %zn0, %zn1) { ; CHECK-LABEL: multi_vector_cvt_x2_s32_f32: ; CHECK: // %bb.0: ; CHECK-NEXT: mov z3.d, z2.d ; CHECK-NEXT: mov z2.d, z1.d ; CHECK-NEXT: fcvtzs { z0.s, z1.s }, { z2.s, z3.s } ; CHECK-NEXT: ret %res = call {, } @llvm.aarch64.sve.fcvtzs.x2.nxv4i32.nxv4f32( %zn0, %zn1) ret {, } %res } define {, ,, } @multi_vector_cvt_x4_s32_f32( %unused, %zn0, %zn1, %zn2, %zn3) { ; CHECK-LABEL: multi_vector_cvt_x4_s32_f32: ; CHECK: // %bb.0: ; CHECK-NEXT: mov z7.d, z4.d ; CHECK-NEXT: mov z6.d, z3.d ; CHECK-NEXT: mov z5.d, z2.d ; CHECK-NEXT: mov z4.d, z1.d ; CHECK-NEXT: fcvtzs { z0.s - z3.s }, { z4.s - z7.s } ; CHECK-NEXT: ret %res = call {, ,, } @llvm.aarch64.sve.fcvtzs.x4.nxv4i32.nxv4f32( %zn0, %zn1, %zn2, %zn3) ret {, , , } %res } ; ; FCVTZU ; define {, } @multi_vector_cvt_x2_u32_f32( %unused, %zn0, %zn1) { ; CHECK-LABEL: multi_vector_cvt_x2_u32_f32: ; CHECK: // %bb.0: ; CHECK-NEXT: mov z3.d, z2.d ; CHECK-NEXT: mov z2.d, z1.d ; CHECK-NEXT: fcvtzu { z0.s, z1.s }, { z2.s, z3.s } ; CHECK-NEXT: ret %res = call {, } @llvm.aarch64.sve.fcvtzu.x2.nxv4i32.nxv4f32( %zn0, %zn1) ret {, } %res } define {, , , } @multi_vector_cvt_x4_u32_f32( %unused, %zn0, %zn1, %zn2, %zn3) { ; CHECK-LABEL: multi_vector_cvt_x4_u32_f32: ; CHECK: // %bb.0: ; CHECK-NEXT: mov z7.d, z4.d ; CHECK-NEXT: mov z6.d, z3.d ; CHECK-NEXT: mov z5.d, z2.d ; CHECK-NEXT: mov z4.d, z1.d ; CHECK-NEXT: fcvtzu { z0.s - z3.s }, { z4.s - z7.s } ; CHECK-NEXT: ret %res = call {, ,, } @llvm.aarch64.sve.fcvtzu.x4.nxv4i32.nxv4f32( %zn0, %zn1, %zn2, %zn3) ret {, , , } %res } ; ; SCVTF ; define {, } @multi_vector_cvt_x2_f32_s32( %unused, %zn0, %zn1) { ; CHECK-LABEL: multi_vector_cvt_x2_f32_s32: ; CHECK: // %bb.0: ; CHECK-NEXT: mov z3.d, z2.d ; CHECK-NEXT: mov z2.d, z1.d ; CHECK-NEXT: scvtf { z0.s, z1.s }, { z2.s, z3.s } ; CHECK-NEXT: ret %res = call {, } @llvm.aarch64.sve.scvtf.x2.nxv4f32.nxv4i32( %zn0, %zn1) ret {, } %res } define {, ,, } @multi_vector_cvt_x4_f32_s32( %unused, %zn0, %zn1, %zn2, %zn3) { ; CHECK-LABEL: multi_vector_cvt_x4_f32_s32: ; CHECK: // %bb.0: ; CHECK-NEXT: mov z7.d, z4.d ; CHECK-NEXT: mov z6.d, z3.d ; CHECK-NEXT: mov z5.d, z2.d ; CHECK-NEXT: mov z4.d, z1.d ; CHECK-NEXT: scvtf { z0.s - z3.s }, { z4.s - z7.s } ; CHECK-NEXT: ret %res = call {, , , } @llvm.aarch64.sve.scvtf.x4.nxv4f32.nxv4i32( %zn0, %zn1, %zn2, %zn3) ret {, ,, } %res } ; ; UCVTF ; define {, } @multi_vector_cvt_x2_f32_u32( %unused, %zn0, %zn1) { ; CHECK-LABEL: multi_vector_cvt_x2_f32_u32: ; CHECK: // %bb.0: ; CHECK-NEXT: mov z3.d, z2.d ; CHECK-NEXT: mov z2.d, z1.d ; CHECK-NEXT: ucvtf { z0.s, z1.s }, { z2.s, z3.s } ; CHECK-NEXT: ret %res = call {, } @llvm.aarch64.sve.ucvtf.x2.nxv4f32.nxv4i32( %zn0, %zn1) ret {, } %res } define {, ,, } @multi_vector_cvt_x4_f32_u32( %unused, %zn0, %zn1, %zn2, %zn3) { ; CHECK-LABEL: multi_vector_cvt_x4_f32_u32: ; CHECK: // %bb.0: ; CHECK-NEXT: mov z7.d, z4.d ; CHECK-NEXT: mov z6.d, z3.d ; CHECK-NEXT: mov z5.d, z2.d ; CHECK-NEXT: mov z4.d, z1.d ; CHECK-NEXT: ucvtf { z0.s - z3.s }, { z4.s - z7.s } ; CHECK-NEXT: ret %res = call {, , , } @llvm.aarch64.sve.ucvtf.x4.nxv4f32.nxv4i32( %zn0, %zn1, %zn2, %zn3) ret {, ,, } %res } declare @llvm.aarch64.sve.fcvt.x2.nxv4f32(, ) declare @llvm.aarch64.sve.bfcvt.x2(, ) declare {, } @llvm.aarch64.sve.fcvtzs.x2.nxv4i32.nxv4f32(,) declare {, } @llvm.aarch64.sve.fcvtzu.x2.nxv4i32.nxv4f32(,) declare {, } @llvm.aarch64.sve.scvtf.x2.nxv4f32.nxv4i32(,) declare {, } @llvm.aarch64.sve.ucvtf.x2.nxv4f32.nxv4i32(,) declare {, ,, } @llvm.aarch64.sve.fcvtzs.x4.nxv4i32.nxv4f32(,,,) declare {, ,, } @llvm.aarch64.sve.fcvtzu.x4.nxv4i32.nxv4f32(,,,) declare {, ,, } @llvm.aarch64.sve.scvtf.x4.nxv4f32.nxv4i32(,,,) declare {, ,, } @llvm.aarch64.sve.ucvtf.x4.nxv4f32.nxv4i32(,,,)