; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 4 ; RUN: llc -mtriple=aarch64-linux-gnu -mattr=+sve2p1 -mattr=+b16b16 < %s | FileCheck %s ; Replace pattern min(max(v1,v2),v3) by clamp define @uclampi8( %a, %b, %c) { ; CHECK-LABEL: uclampi8: ; CHECK: // %bb.0: ; CHECK-NEXT: uclamp z0.b, z1.b, z2.b ; CHECK-NEXT: ret %min = tail call @llvm.umax.nxv16i8( %a, %b) %res = tail call @llvm.umin.nxv16i8( %min, %c) ret %res } define @uclampi16( %a, %b, %c) { ; CHECK-LABEL: uclampi16: ; CHECK: // %bb.0: ; CHECK-NEXT: uclamp z0.h, z1.h, z2.h ; CHECK-NEXT: ret %min = tail call @llvm.umax.nxv8i16( %a, %b) %res = tail call @llvm.umin.nxv8i16( %min, %c) ret %res } define @uclampi32( %a, %b, %c) { ; CHECK-LABEL: uclampi32: ; CHECK: // %bb.0: ; CHECK-NEXT: uclamp z0.s, z1.s, z2.s ; CHECK-NEXT: ret %min = tail call @llvm.umax.nxv4i32( %a, %b) %res = tail call @llvm.umin.nxv4i32( %min, %c) ret %res } define @uclampi64( %a, %b, %c) { ; CHECK-LABEL: uclampi64: ; CHECK: // %bb.0: ; CHECK-NEXT: uclamp z0.d, z1.d, z2.d ; CHECK-NEXT: ret %min = tail call @llvm.umax.nxv2i64( %a, %b) %res = tail call @llvm.umin.nxv2i64( %min, %c) ret %res } define @sclampi8( %a, %b, %c) { ; CHECK-LABEL: sclampi8: ; CHECK: // %bb.0: ; CHECK-NEXT: sclamp z0.b, z1.b, z2.b ; CHECK-NEXT: ret %min = tail call @llvm.smax.nxv16i8( %a, %b) %res = tail call @llvm.smin.nxv16i8( %min, %c) ret %res } define @sclampi16( %a, %b, %c) { ; CHECK-LABEL: sclampi16: ; CHECK: // %bb.0: ; CHECK-NEXT: sclamp z0.h, z1.h, z2.h ; CHECK-NEXT: ret %min = tail call @llvm.smax.nxv8i16( %a, %b) %res = tail call @llvm.smin.nxv8i16( %min, %c) ret %res } define @sclampi32( %a, %b, %c) { ; CHECK-LABEL: sclampi32: ; CHECK: // %bb.0: ; CHECK-NEXT: sclamp z0.s, z1.s, z2.s ; CHECK-NEXT: ret %min = tail call @llvm.smax.nxv4i32( %a, %b) %res = tail call @llvm.smin.nxv4i32( %min, %c) ret %res } define @sclampi64( %a, %b, %c) { ; CHECK-LABEL: sclampi64: ; CHECK: // %bb.0: ; CHECK-NEXT: sclamp z0.d, z1.d, z2.d ; CHECK-NEXT: ret %min = tail call @llvm.smax.nxv2i64( %a, %b) %res = tail call @llvm.smin.nxv2i64( %min, %c) ret %res } define @fclampbf16( %a, %b, %c) { ; CHECK-LABEL: fclampbf16: ; CHECK: // %bb.0: ; CHECK-NEXT: bfclamp z0.h, z1.h, z2.h ; CHECK-NEXT: ret %min = tail call @llvm.aarch64.sve.fmaxnm.u.nxv8bf16( splat (i1 true), %a, %b) %res = tail call @llvm.aarch64.sve.fminnm.u.nxv8bf16( splat (i1 true), %min, %c) ret %res } define @fclampf16( %a, %b, %c) { ; CHECK-LABEL: fclampf16: ; CHECK: // %bb.0: ; CHECK-NEXT: fclamp z0.h, z1.h, z2.h ; CHECK-NEXT: ret %min = call @llvm.maxnum.nxv8f16( %a, %b) %res = call @llvm.minnum.nxv8f16( %min, %c) ret %res } define @fclampf32( %a, %b, %c) { ; CHECK-LABEL: fclampf32: ; CHECK: // %bb.0: ; CHECK-NEXT: fclamp z0.s, z1.s, z2.s ; CHECK-NEXT: ret %min = tail call @llvm.maxnum.nxv4f32( %a, %b) %res = tail call @llvm.minnum.nxv4f32( %min, %c) ret %res } define @fclampf64( %a, %b, %c) { ; CHECK-LABEL: fclampf64: ; CHECK: // %bb.0: ; CHECK-NEXT: fclamp z0.d, z1.d, z2.d ; CHECK-NEXT: ret %min = tail call @llvm.maxnum.nxv2f64( %a, %b) %res = tail call @llvm.minnum.nxv2f64( %min, %c) ret %res } declare @llvm.umax.nxv16i8(, ) declare @llvm.umin.nxv16i8(, ) declare @llvm.umax.nxv8i16(, ) declare @llvm.umin.nxv8i16(, ) declare @llvm.umax.nxv4i32(, ) declare @llvm.umin.nxv4i32(, ) declare @llvm.umax.nxv2i64(, ) declare @llvm.umin.nxv2i64(, ) declare @llvm.smax.nxv16i8(, ) declare @llvm.smin.nxv16i8(, ) declare @llvm.smax.nxv8i16(, ) declare @llvm.smin.nxv8i16(, ) declare @llvm.smax.nxv4i32(, ) declare @llvm.smin.nxv4i32(, ) declare @llvm.smax.nxv2i64(, ) declare @llvm.smin.nxv2i64(, ) declare @llvm.aarch64.sve.fmaxnm.u.nxv8bf16(, , ) declare @llvm.aarch64.sve.fminnm.u.nxv8bf16(, , ) declare @llvm.maxnum.nxv8f16 (, ) declare @llvm.minnum.nxv8f16 (, ) declare @llvm.maxnum.nxv4f32 (, ) declare @llvm.minnum.nxv4f32 (, ) declare @llvm.maxnum.nxv2f64 (, ) declare @llvm.minnum.nxv2f64 (, )