157 lines
7.4 KiB
YAML
157 lines
7.4 KiB
YAML
# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
|
|
# RUN: llc -mtriple=riscv64 -run-pass=legalizer %s -o - \
|
|
# RUN: | FileCheck %s --check-prefix=RV64I
|
|
# RUN: llc -mtriple=riscv64 -mattr=+zbb -run-pass=legalizer %s -o - \
|
|
# RUN: | FileCheck %s --check-prefix=RV64ZBB
|
|
|
|
---
|
|
name: abs_i8
|
|
body: |
|
|
bb.0.entry:
|
|
; RV64I-LABEL: name: abs_i8
|
|
; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
|
|
; RV64I-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY]], 8
|
|
; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[ASSERT_ZEXT]](s64)
|
|
; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
|
|
; RV64I-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[TRUNC]], [[C]](s64)
|
|
; RV64I-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
|
|
; RV64I-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C1]](s64)
|
|
; RV64I-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 7
|
|
; RV64I-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[ASHR]], [[C2]](s64)
|
|
; RV64I-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[ASSERT_ZEXT]](s64)
|
|
; RV64I-NEXT: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[TRUNC1]], [[ASHR1]]
|
|
; RV64I-NEXT: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[ADD]], [[ASHR1]]
|
|
; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[XOR]](s32)
|
|
; RV64I-NEXT: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
|
|
; RV64I-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[ANYEXT]], [[C3]]
|
|
; RV64I-NEXT: $x10 = COPY [[AND]](s64)
|
|
; RV64I-NEXT: PseudoRET implicit $x10
|
|
;
|
|
; RV64ZBB-LABEL: name: abs_i8
|
|
; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
|
|
; RV64ZBB-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY]], 8
|
|
; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
|
|
; RV64ZBB-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[ASSERT_ZEXT]], [[C]](s64)
|
|
; RV64ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
|
|
; RV64ZBB-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
|
|
; RV64ZBB-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[C1]], [[ASHR]]
|
|
; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[ASHR]], [[SUB]]
|
|
; RV64ZBB-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
|
|
; RV64ZBB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[SMAX]], [[C2]]
|
|
; RV64ZBB-NEXT: $x10 = COPY [[AND]](s64)
|
|
; RV64ZBB-NEXT: PseudoRET implicit $x10
|
|
%1:_(s64) = COPY $x10
|
|
%2:_(s64) = G_ASSERT_ZEXT %1, 8
|
|
%0:_(s8) = G_TRUNC %2(s64)
|
|
%3:_(s8) = G_ABS %0
|
|
%4:_(s64) = G_ZEXT %3(s8)
|
|
$x10 = COPY %4(s64)
|
|
PseudoRET implicit $x10
|
|
...
|
|
---
|
|
name: abs_i16
|
|
body: |
|
|
bb.0.entry:
|
|
; RV64I-LABEL: name: abs_i16
|
|
; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
|
|
; RV64I-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 16
|
|
; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[ASSERT_SEXT]](s64)
|
|
; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
|
|
; RV64I-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[TRUNC]], [[C]](s64)
|
|
; RV64I-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
|
|
; RV64I-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C1]](s64)
|
|
; RV64I-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 15
|
|
; RV64I-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[ASHR]], [[C2]](s64)
|
|
; RV64I-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[ASSERT_SEXT]](s64)
|
|
; RV64I-NEXT: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[TRUNC1]], [[ASHR1]]
|
|
; RV64I-NEXT: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[ADD]], [[ASHR1]]
|
|
; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[XOR]](s32)
|
|
; RV64I-NEXT: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
|
|
; RV64I-NEXT: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[ANYEXT]], [[C3]](s64)
|
|
; RV64I-NEXT: [[ASHR2:%[0-9]+]]:_(s64) = G_ASHR [[SHL1]], [[C3]](s64)
|
|
; RV64I-NEXT: $x10 = COPY [[ASHR2]](s64)
|
|
; RV64I-NEXT: PseudoRET implicit $x10
|
|
;
|
|
; RV64ZBB-LABEL: name: abs_i16
|
|
; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
|
|
; RV64ZBB-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 16
|
|
; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
|
|
; RV64ZBB-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[ASSERT_SEXT]], [[C]](s64)
|
|
; RV64ZBB-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
|
|
; RV64ZBB-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
|
|
; RV64ZBB-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[C1]], [[ASHR]]
|
|
; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[ASHR]], [[SUB]]
|
|
; RV64ZBB-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
|
|
; RV64ZBB-NEXT: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[SMAX]], [[C2]](s64)
|
|
; RV64ZBB-NEXT: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SHL1]], [[C2]](s64)
|
|
; RV64ZBB-NEXT: $x10 = COPY [[ASHR1]](s64)
|
|
; RV64ZBB-NEXT: PseudoRET implicit $x10
|
|
%1:_(s64) = COPY $x10
|
|
%2:_(s64) = G_ASSERT_SEXT %1, 16
|
|
%0:_(s16) = G_TRUNC %2(s64)
|
|
%3:_(s16) = G_ABS %0
|
|
%4:_(s64) = G_SEXT %3(s16)
|
|
$x10 = COPY %4(s64)
|
|
PseudoRET implicit $x10
|
|
...
|
|
---
|
|
name: abs_i32
|
|
body: |
|
|
bb.0.entry:
|
|
; RV64I-LABEL: name: abs_i32
|
|
; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
|
|
; RV64I-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 32
|
|
; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[ASSERT_SEXT]](s64)
|
|
; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 31
|
|
; RV64I-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[TRUNC]], [[C]](s64)
|
|
; RV64I-NEXT: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[TRUNC]], [[ASHR]]
|
|
; RV64I-NEXT: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[ADD]], [[ASHR]]
|
|
; RV64I-NEXT: [[SEXT:%[0-9]+]]:_(s64) = G_SEXT [[XOR]](s32)
|
|
; RV64I-NEXT: $x10 = COPY [[SEXT]](s64)
|
|
; RV64I-NEXT: PseudoRET implicit $x10
|
|
;
|
|
; RV64ZBB-LABEL: name: abs_i32
|
|
; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
|
|
; RV64ZBB-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 32
|
|
; RV64ZBB-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[ASSERT_SEXT]](s64)
|
|
; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
|
|
; RV64ZBB-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C]], [[TRUNC]]
|
|
; RV64ZBB-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[ASSERT_SEXT]], 32
|
|
; RV64ZBB-NEXT: [[SEXT:%[0-9]+]]:_(s64) = G_SEXT [[SUB]](s32)
|
|
; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[SEXT_INREG]], [[SEXT]]
|
|
; RV64ZBB-NEXT: [[SEXT_INREG1:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SMAX]], 32
|
|
; RV64ZBB-NEXT: $x10 = COPY [[SEXT_INREG1]](s64)
|
|
; RV64ZBB-NEXT: PseudoRET implicit $x10
|
|
%1:_(s64) = COPY $x10
|
|
%2:_(s64) = G_ASSERT_SEXT %1, 32
|
|
%0:_(s32) = G_TRUNC %2(s64)
|
|
%3:_(s32) = G_ABS %0
|
|
%4:_(s64) = G_SEXT %3(s32)
|
|
$x10 = COPY %4(s64)
|
|
PseudoRET implicit $x10
|
|
...
|
|
---
|
|
name: abs_i64
|
|
body: |
|
|
bb.0.entry:
|
|
; RV64I-LABEL: name: abs_i64
|
|
; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
|
|
; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 63
|
|
; RV64I-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[COPY]], [[C]](s64)
|
|
; RV64I-NEXT: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY]], [[ASHR]]
|
|
; RV64I-NEXT: [[XOR:%[0-9]+]]:_(s64) = G_XOR [[ADD]], [[ASHR]]
|
|
; RV64I-NEXT: $x10 = COPY [[XOR]](s64)
|
|
; RV64I-NEXT: PseudoRET implicit $x10
|
|
;
|
|
; RV64ZBB-LABEL: name: abs_i64
|
|
; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
|
|
; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
|
|
; RV64ZBB-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[C]], [[COPY]]
|
|
; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[COPY]], [[SUB]]
|
|
; RV64ZBB-NEXT: $x10 = COPY [[SMAX]](s64)
|
|
; RV64ZBB-NEXT: PseudoRET implicit $x10
|
|
%0:_(s64) = COPY $x10
|
|
%1:_(s64) = G_ABS %0
|
|
$x10 = COPY %1(s64)
|
|
PseudoRET implicit $x10
|
|
...
|