; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=aarch64-linux-gnu -mattr=+sve < %s | FileCheck %s ; ; ABS ; define @abs_i8( %a, %pg, %b) { ; CHECK-LABEL: abs_i8: ; CHECK: // %bb.0: ; CHECK-NEXT: abs z0.b, p0/m, z1.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.abs.nxv16i8( %a, %pg, %b) ret %out } define @abs_i16( %a, %pg, %b) { ; CHECK-LABEL: abs_i16: ; CHECK: // %bb.0: ; CHECK-NEXT: abs z0.h, p0/m, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.abs.nxv8i16( %a, %pg, %b) ret %out } define @abs_i32( %a, %pg, %b) { ; CHECK-LABEL: abs_i32: ; CHECK: // %bb.0: ; CHECK-NEXT: abs z0.s, p0/m, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.abs.nxv4i32( %a, %pg, %b) ret %out } define @abs_i64( %a, %pg, %b) { ; CHECK-LABEL: abs_i64: ; CHECK: // %bb.0: ; CHECK-NEXT: abs z0.d, p0/m, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.abs.nxv2i64( %a, %pg, %b) ret %out } ; ; NEG ; define @neg_i8( %a, %pg, %b) { ; CHECK-LABEL: neg_i8: ; CHECK: // %bb.0: ; CHECK-NEXT: neg z0.b, p0/m, z1.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.neg.nxv16i8( %a, %pg, %b) ret %out } define @neg_i16( %a, %pg, %b) { ; CHECK-LABEL: neg_i16: ; CHECK: // %bb.0: ; CHECK-NEXT: neg z0.h, p0/m, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.neg.nxv8i16( %a, %pg, %b) ret %out } define @neg_i32( %a, %pg, %b) { ; CHECK-LABEL: neg_i32: ; CHECK: // %bb.0: ; CHECK-NEXT: neg z0.s, p0/m, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.neg.nxv4i32( %a, %pg, %b) ret %out } define @neg_i64( %a, %pg, %b) { ; CHECK-LABEL: neg_i64: ; CHECK: // %bb.0: ; CHECK-NEXT: neg z0.d, p0/m, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.neg.nxv2i64( %a, %pg, %b) ret %out } ; SDOT define @sdot_i32( %a, %b, %c) { ; CHECK-LABEL: sdot_i32: ; CHECK: // %bb.0: ; CHECK-NEXT: sdot z0.s, z1.b, z2.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sdot.nxv4i32( %a, %b, %c) ret %out } define @sdot_i64( %a, %b, %c) { ; CHECK-LABEL: sdot_i64: ; CHECK: // %bb.0: ; CHECK-NEXT: sdot z0.d, z1.h, z2.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sdot.nxv2i64( %a, %b, %c) ret %out } define @test_sdot_i64_zero( %a, %b, %c) { ; CHECK-LABEL: test_sdot_i64_zero: ; CHECK: // %bb.0: // %entry ; CHECK-NEXT: sdot z0.d, z1.h, z2.h ; CHECK-NEXT: ret entry: %vdot1.i = call @llvm.aarch64.sve.sdot.nxv2i64( zeroinitializer, %b, %c) %ret = add %vdot1.i, %a ret %ret } define @test_sdot_i32_zero( %a, %b, %c) { ; CHECK-LABEL: test_sdot_i32_zero: ; CHECK: // %bb.0: // %entry ; CHECK-NEXT: sdot z0.s, z1.b, z2.b ; CHECK-NEXT: ret entry: %vdot1.i = call @llvm.aarch64.sve.sdot.nxv4i32( zeroinitializer, %b, %c) %ret = add %vdot1.i, %a ret %ret } ; SDOT (Indexed) define @sdot_lane_i32( %a, %b, %c) { ; CHECK-LABEL: sdot_lane_i32: ; CHECK: // %bb.0: ; CHECK-NEXT: sdot z0.s, z1.b, z2.b[2] ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sdot.lane.nxv4i32( %a, %b, %c, i32 2) ret %out } define @sdot_lane_i64( %a, %b, %c) { ; CHECK-LABEL: sdot_lane_i64: ; CHECK: // %bb.0: ; CHECK-NEXT: sdot z0.d, z1.h, z2.h[1] ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sdot.lane.nxv2i64( %a, %b, %c, i32 1) ret %out } ; SQADD define @sqadd_i8( %a, %b) { ; CHECK-LABEL: sqadd_i8: ; CHECK: // %bb.0: ; CHECK-NEXT: sqadd z0.b, z0.b, z1.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sqadd.x.nxv16i8( %a, %b) ret %out } define @sqadd_i16( %a, %b) { ; CHECK-LABEL: sqadd_i16: ; CHECK: // %bb.0: ; CHECK-NEXT: sqadd z0.h, z0.h, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sqadd.x.nxv8i16( %a, %b) ret %out } define @sqadd_i32( %a, %b) { ; CHECK-LABEL: sqadd_i32: ; CHECK: // %bb.0: ; CHECK-NEXT: sqadd z0.s, z0.s, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sqadd.x.nxv4i32( %a, %b) ret %out } define @sqadd_i64( %a, %b) { ; CHECK-LABEL: sqadd_i64: ; CHECK: // %bb.0: ; CHECK-NEXT: sqadd z0.d, z0.d, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sqadd.x.nxv2i64( %a, %b) ret %out } ; SQSUB define @sqsub_i8( %a, %b) { ; CHECK-LABEL: sqsub_i8: ; CHECK: // %bb.0: ; CHECK-NEXT: sqsub z0.b, z0.b, z1.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sqsub.x.nxv16i8( %a, %b) ret %out } define @sqsub_i16( %a, %b) { ; CHECK-LABEL: sqsub_i16: ; CHECK: // %bb.0: ; CHECK-NEXT: sqsub z0.h, z0.h, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sqsub.x.nxv8i16( %a, %b) ret %out } define @sqsub_i32( %a, %b) { ; CHECK-LABEL: sqsub_i32: ; CHECK: // %bb.0: ; CHECK-NEXT: sqsub z0.s, z0.s, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sqsub.x.nxv4i32( %a, %b) ret %out } define @sqsub_i64( %a, %b) { ; CHECK-LABEL: sqsub_i64: ; CHECK: // %bb.0: ; CHECK-NEXT: sqsub z0.d, z0.d, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sqsub.x.nxv2i64( %a, %b) ret %out } ; UDOT define @udot_i32( %a, %b, %c) { ; CHECK-LABEL: udot_i32: ; CHECK: // %bb.0: ; CHECK-NEXT: udot z0.s, z1.b, z2.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.udot.nxv4i32( %a, %b, %c) ret %out } define @udot_i64( %a, %b, %c) { ; CHECK-LABEL: udot_i64: ; CHECK: // %bb.0: ; CHECK-NEXT: udot z0.d, z1.h, z2.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.udot.nxv2i64( %a, %b, %c) ret %out } define @test_udot_i64_zero( %a, %b, %c) { ; CHECK-LABEL: test_udot_i64_zero: ; CHECK: // %bb.0: // %entry ; CHECK-NEXT: udot z0.d, z1.h, z2.h ; CHECK-NEXT: ret entry: %vdot1.i = call @llvm.aarch64.sve.udot.nxv2i64( zeroinitializer, %b, %c) %ret = add %vdot1.i, %a ret %ret } define @test_udot_i32_zero( %a, %b, %c) { ; CHECK-LABEL: test_udot_i32_zero: ; CHECK: // %bb.0: // %entry ; CHECK-NEXT: udot z0.s, z1.b, z2.b ; CHECK-NEXT: ret entry: %vdot1.i = call @llvm.aarch64.sve.udot.nxv4i32( zeroinitializer, %b, %c) %ret = add %vdot1.i, %a ret %ret } ; UDOT (Indexed) define @udot_lane_i32( %a, %b, %c) { ; CHECK-LABEL: udot_lane_i32: ; CHECK: // %bb.0: ; CHECK-NEXT: udot z0.s, z1.b, z2.b[2] ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.udot.lane.nxv4i32( %a, %b, %c, i32 2) ret %out } ; UQADD define @uqadd_i8( %a, %b) { ; CHECK-LABEL: uqadd_i8: ; CHECK: // %bb.0: ; CHECK-NEXT: uqadd z0.b, z0.b, z1.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uqadd.x.nxv16i8( %a, %b) ret %out } define @uqadd_i16( %a, %b) { ; CHECK-LABEL: uqadd_i16: ; CHECK: // %bb.0: ; CHECK-NEXT: uqadd z0.h, z0.h, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uqadd.x.nxv8i16( %a, %b) ret %out } define @uqadd_i32( %a, %b) { ; CHECK-LABEL: uqadd_i32: ; CHECK: // %bb.0: ; CHECK-NEXT: uqadd z0.s, z0.s, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uqadd.x.nxv4i32( %a, %b) ret %out } define @uqadd_i64( %a, %b) { ; CHECK-LABEL: uqadd_i64: ; CHECK: // %bb.0: ; CHECK-NEXT: uqadd z0.d, z0.d, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uqadd.x.nxv2i64( %a, %b) ret %out } ; UQSUB define @uqsub_i8( %a, %b) { ; CHECK-LABEL: uqsub_i8: ; CHECK: // %bb.0: ; CHECK-NEXT: uqsub z0.b, z0.b, z1.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uqsub.x.nxv16i8( %a, %b) ret %out } define @uqsub_i16( %a, %b) { ; CHECK-LABEL: uqsub_i16: ; CHECK: // %bb.0: ; CHECK-NEXT: uqsub z0.h, z0.h, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uqsub.x.nxv8i16( %a, %b) ret %out } define @uqsub_i32( %a, %b) { ; CHECK-LABEL: uqsub_i32: ; CHECK: // %bb.0: ; CHECK-NEXT: uqsub z0.s, z0.s, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uqsub.x.nxv4i32( %a, %b) ret %out } define @uqsub_i64( %a, %b) { ; CHECK-LABEL: uqsub_i64: ; CHECK: // %bb.0: ; CHECK-NEXT: uqsub z0.d, z0.d, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uqsub.x.nxv2i64( %a, %b) ret %out } declare @llvm.aarch64.sve.abs.nxv16i8(, , ) declare @llvm.aarch64.sve.abs.nxv8i16(, , ) declare @llvm.aarch64.sve.abs.nxv4i32(, , ) declare @llvm.aarch64.sve.abs.nxv2i64(, , ) declare @llvm.aarch64.sve.neg.nxv16i8(, , ) declare @llvm.aarch64.sve.neg.nxv8i16(, , ) declare @llvm.aarch64.sve.neg.nxv4i32(, , ) declare @llvm.aarch64.sve.neg.nxv2i64(, , ) declare @llvm.aarch64.sve.sdot.nxv4i32(, , ) declare @llvm.aarch64.sve.sdot.nxv2i64(, , ) declare @llvm.aarch64.sve.sdot.lane.nxv4i32(, , , i32) declare @llvm.aarch64.sve.sdot.lane.nxv2i64(, , , i32) declare @llvm.aarch64.sve.sqadd.x.nxv16i8(, ) declare @llvm.aarch64.sve.sqadd.x.nxv8i16(, ) declare @llvm.aarch64.sve.sqadd.x.nxv4i32(, ) declare @llvm.aarch64.sve.sqadd.x.nxv2i64(, ) declare @llvm.aarch64.sve.sqsub.x.nxv16i8(, ) declare @llvm.aarch64.sve.sqsub.x.nxv8i16(, ) declare @llvm.aarch64.sve.sqsub.x.nxv4i32(, ) declare @llvm.aarch64.sve.sqsub.x.nxv2i64(, ) declare @llvm.aarch64.sve.udot.nxv4i32(, , ) declare @llvm.aarch64.sve.udot.nxv2i64(, , ) declare @llvm.aarch64.sve.udot.lane.nxv4i32(, , , i32) declare @llvm.aarch64.sve.udot.lane.nxv2i64(, , , i32) declare @llvm.aarch64.sve.uqadd.x.nxv16i8(, ) declare @llvm.aarch64.sve.uqadd.x.nxv8i16(, ) declare @llvm.aarch64.sve.uqadd.x.nxv4i32(, ) declare @llvm.aarch64.sve.uqadd.x.nxv2i64(, ) declare @llvm.aarch64.sve.uqsub.x.nxv16i8(, ) declare @llvm.aarch64.sve.uqsub.x.nxv8i16(, ) declare @llvm.aarch64.sve.uqsub.x.nxv4i32(, ) declare @llvm.aarch64.sve.uqsub.x.nxv2i64(, )