2896 lines
104 KiB
Text
2896 lines
104 KiB
Text
# RUN: llvm-mc -triple=amdgcn -mcpu=gfx900 -disassemble -show-encoding < %s | FileCheck -strict-whitespace %s
|
|
|
|
# CHECK: v_cndmask_b32_e32 v5, v1, v2, vcc ; encoding: [0x01,0x05,0x0a,0x00]
|
|
0x01,0x05,0x0a,0x00
|
|
|
|
# CHECK: v_cndmask_b32_e32 v255, v1, v2, vcc ; encoding: [0x01,0x05,0xfe,0x01]
|
|
0x01,0x05,0xfe,0x01
|
|
|
|
# CHECK: v_cndmask_b32_e32 v5, v255, v2, vcc ; encoding: [0xff,0x05,0x0a,0x00]
|
|
0xff,0x05,0x0a,0x00
|
|
|
|
# CHECK: v_cndmask_b32_e32 v5, 0, v2, vcc ; encoding: [0x80,0x04,0x0a,0x00]
|
|
0x80,0x04,0x0a,0x00
|
|
|
|
# CHECK: v_cndmask_b32_e32 v5, -1, v2, vcc ; encoding: [0xc1,0x04,0x0a,0x00]
|
|
0xc1,0x04,0x0a,0x00
|
|
|
|
# CHECK: v_cndmask_b32_e32 v5, 0.5, v2, vcc ; encoding: [0xf0,0x04,0x0a,0x00]
|
|
0xf0,0x04,0x0a,0x00
|
|
|
|
# CHECK: v_cndmask_b32_e32 v5, -4.0, v2, vcc ; encoding: [0xf7,0x04,0x0a,0x00]
|
|
0xf7,0x04,0x0a,0x00
|
|
|
|
# CHECK: v_cndmask_b32_e32 v5, v1, v255, vcc ; encoding: [0x01,0xff,0x0b,0x00]
|
|
0x01,0xff,0x0b,0x00
|
|
|
|
# CHECK: v_add_f32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x02]
|
|
0x01,0x05,0x0a,0x02
|
|
|
|
# CHECK: v_add_f32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x03]
|
|
0x01,0x05,0xfe,0x03
|
|
|
|
# CHECK: v_add_f32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x02]
|
|
0xff,0x05,0x0a,0x02
|
|
|
|
# CHECK: v_add_f32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x02]
|
|
0x01,0x04,0x0a,0x02
|
|
|
|
# CHECK: v_add_f32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x02]
|
|
0x65,0x04,0x0a,0x02
|
|
|
|
# CHECK: v_add_f32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x02]
|
|
0x66,0x04,0x0a,0x02
|
|
|
|
# CHECK: v_add_f32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x02]
|
|
0x67,0x04,0x0a,0x02
|
|
|
|
# CHECK: v_add_f32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x02]
|
|
0x6a,0x04,0x0a,0x02
|
|
|
|
# CHECK: v_add_f32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x02]
|
|
0x6b,0x04,0x0a,0x02
|
|
|
|
# CHECK: v_add_f32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x02]
|
|
0x7c,0x04,0x0a,0x02
|
|
|
|
# CHECK: v_add_f32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x02]
|
|
0x7e,0x04,0x0a,0x02
|
|
|
|
# CHECK: v_add_f32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x02]
|
|
0x7f,0x04,0x0a,0x02
|
|
|
|
# CHECK: v_add_f32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x02]
|
|
0x80,0x04,0x0a,0x02
|
|
|
|
# CHECK: v_add_f32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x02]
|
|
0xc1,0x04,0x0a,0x02
|
|
|
|
# CHECK: v_add_f32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x02]
|
|
0xf0,0x04,0x0a,0x02
|
|
|
|
# CHECK: v_add_f32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x02]
|
|
0xf7,0x04,0x0a,0x02
|
|
|
|
# CHECK: v_add_f32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x02,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x02,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_add_f32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x02,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x02,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_add_f32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x02]
|
|
0x01,0xff,0x0b,0x02
|
|
|
|
# CHECK: v_sub_f32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x04]
|
|
0x01,0x05,0x0a,0x04
|
|
|
|
# CHECK: v_sub_f32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x05]
|
|
0x01,0x05,0xfe,0x05
|
|
|
|
# CHECK: v_sub_f32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x04]
|
|
0xff,0x05,0x0a,0x04
|
|
|
|
# CHECK: v_sub_f32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x04]
|
|
0x01,0x04,0x0a,0x04
|
|
|
|
# CHECK: v_sub_f32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x04]
|
|
0x65,0x04,0x0a,0x04
|
|
|
|
# CHECK: v_sub_f32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x04]
|
|
0x66,0x04,0x0a,0x04
|
|
|
|
# CHECK: v_sub_f32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x04]
|
|
0x67,0x04,0x0a,0x04
|
|
|
|
# CHECK: v_sub_f32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x04]
|
|
0x6a,0x04,0x0a,0x04
|
|
|
|
# CHECK: v_sub_f32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x04]
|
|
0x6b,0x04,0x0a,0x04
|
|
|
|
# CHECK: v_sub_f32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x04]
|
|
0x7c,0x04,0x0a,0x04
|
|
|
|
# CHECK: v_sub_f32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x04]
|
|
0x7e,0x04,0x0a,0x04
|
|
|
|
# CHECK: v_sub_f32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x04]
|
|
0x7f,0x04,0x0a,0x04
|
|
|
|
# CHECK: v_sub_f32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x04]
|
|
0x80,0x04,0x0a,0x04
|
|
|
|
# CHECK: v_sub_f32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x04]
|
|
0xc1,0x04,0x0a,0x04
|
|
|
|
# CHECK: v_sub_f32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x04]
|
|
0xf0,0x04,0x0a,0x04
|
|
|
|
# CHECK: v_sub_f32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x04]
|
|
0xf7,0x04,0x0a,0x04
|
|
|
|
# CHECK: v_sub_f32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x04,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x04,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_sub_f32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x04,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x04,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_sub_f32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x04]
|
|
0x01,0xff,0x0b,0x04
|
|
|
|
# CHECK: v_subrev_f32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x06]
|
|
0x01,0x05,0x0a,0x06
|
|
|
|
# CHECK: v_subrev_f32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x07]
|
|
0x01,0x05,0xfe,0x07
|
|
|
|
# CHECK: v_subrev_f32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x06]
|
|
0xff,0x05,0x0a,0x06
|
|
|
|
# CHECK: v_subrev_f32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x06]
|
|
0x01,0x04,0x0a,0x06
|
|
|
|
# CHECK: v_subrev_f32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x06]
|
|
0x65,0x04,0x0a,0x06
|
|
|
|
# CHECK: v_subrev_f32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x06]
|
|
0x66,0x04,0x0a,0x06
|
|
|
|
# CHECK: v_subrev_f32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x06]
|
|
0x67,0x04,0x0a,0x06
|
|
|
|
# CHECK: v_subrev_f32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x06]
|
|
0x6a,0x04,0x0a,0x06
|
|
|
|
# CHECK: v_subrev_f32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x06]
|
|
0x6b,0x04,0x0a,0x06
|
|
|
|
# CHECK: v_subrev_f32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x06]
|
|
0x7c,0x04,0x0a,0x06
|
|
|
|
# CHECK: v_subrev_f32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x06]
|
|
0x7e,0x04,0x0a,0x06
|
|
|
|
# CHECK: v_subrev_f32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x06]
|
|
0x7f,0x04,0x0a,0x06
|
|
|
|
# CHECK: v_subrev_f32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x06]
|
|
0x80,0x04,0x0a,0x06
|
|
|
|
# CHECK: v_subrev_f32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x06]
|
|
0xc1,0x04,0x0a,0x06
|
|
|
|
# CHECK: v_subrev_f32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x06]
|
|
0xf0,0x04,0x0a,0x06
|
|
|
|
# CHECK: v_subrev_f32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x06]
|
|
0xf7,0x04,0x0a,0x06
|
|
|
|
# CHECK: v_subrev_f32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x06,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x06,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_subrev_f32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x06,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x06,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_subrev_f32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x06]
|
|
0x01,0xff,0x0b,0x06
|
|
|
|
# CHECK: v_mul_legacy_f32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x08]
|
|
0x01,0x05,0x0a,0x08
|
|
|
|
# CHECK: v_mul_legacy_f32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x09]
|
|
0x01,0x05,0xfe,0x09
|
|
|
|
# CHECK: v_mul_legacy_f32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x08]
|
|
0xff,0x05,0x0a,0x08
|
|
|
|
# CHECK: v_mul_legacy_f32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x08]
|
|
0x01,0x04,0x0a,0x08
|
|
|
|
# CHECK: v_mul_legacy_f32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x08]
|
|
0x65,0x04,0x0a,0x08
|
|
|
|
# CHECK: v_mul_legacy_f32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x08]
|
|
0x66,0x04,0x0a,0x08
|
|
|
|
# CHECK: v_mul_legacy_f32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x08]
|
|
0x67,0x04,0x0a,0x08
|
|
|
|
# CHECK: v_mul_legacy_f32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x08]
|
|
0x6a,0x04,0x0a,0x08
|
|
|
|
# CHECK: v_mul_legacy_f32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x08]
|
|
0x6b,0x04,0x0a,0x08
|
|
|
|
# CHECK: v_mul_legacy_f32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x08]
|
|
0x7c,0x04,0x0a,0x08
|
|
|
|
# CHECK: v_mul_legacy_f32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x08]
|
|
0x7e,0x04,0x0a,0x08
|
|
|
|
# CHECK: v_mul_legacy_f32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x08]
|
|
0x7f,0x04,0x0a,0x08
|
|
|
|
# CHECK: v_mul_legacy_f32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x08]
|
|
0x80,0x04,0x0a,0x08
|
|
|
|
# CHECK: v_mul_legacy_f32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x08]
|
|
0xc1,0x04,0x0a,0x08
|
|
|
|
# CHECK: v_mul_legacy_f32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x08]
|
|
0xf0,0x04,0x0a,0x08
|
|
|
|
# CHECK: v_mul_legacy_f32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x08]
|
|
0xf7,0x04,0x0a,0x08
|
|
|
|
# CHECK: v_mul_legacy_f32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x08,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x08,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_mul_legacy_f32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x08,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x08,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_mul_legacy_f32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x08]
|
|
0x01,0xff,0x0b,0x08
|
|
|
|
# CHECK: v_mul_f32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x0a]
|
|
0x01,0x05,0x0a,0x0a
|
|
|
|
# CHECK: v_mul_f32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x0b]
|
|
0x01,0x05,0xfe,0x0b
|
|
|
|
# CHECK: v_mul_f32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x0a]
|
|
0xff,0x05,0x0a,0x0a
|
|
|
|
# CHECK: v_mul_f32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x0a]
|
|
0x01,0x04,0x0a,0x0a
|
|
|
|
# CHECK: v_mul_f32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x0a]
|
|
0x65,0x04,0x0a,0x0a
|
|
|
|
# CHECK: v_mul_f32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x0a]
|
|
0x66,0x04,0x0a,0x0a
|
|
|
|
# CHECK: v_mul_f32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x0a]
|
|
0x67,0x04,0x0a,0x0a
|
|
|
|
# CHECK: v_mul_f32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x0a]
|
|
0x6a,0x04,0x0a,0x0a
|
|
|
|
# CHECK: v_mul_f32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x0a]
|
|
0x6b,0x04,0x0a,0x0a
|
|
|
|
# CHECK: v_mul_f32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x0a]
|
|
0x7c,0x04,0x0a,0x0a
|
|
|
|
# CHECK: v_mul_f32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x0a]
|
|
0x7e,0x04,0x0a,0x0a
|
|
|
|
# CHECK: v_mul_f32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x0a]
|
|
0x7f,0x04,0x0a,0x0a
|
|
|
|
# CHECK: v_mul_f32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x0a]
|
|
0x80,0x04,0x0a,0x0a
|
|
|
|
# CHECK: v_mul_f32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x0a]
|
|
0xc1,0x04,0x0a,0x0a
|
|
|
|
# CHECK: v_mul_f32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x0a]
|
|
0xf0,0x04,0x0a,0x0a
|
|
|
|
# CHECK: v_mul_f32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x0a]
|
|
0xf7,0x04,0x0a,0x0a
|
|
|
|
# CHECK: v_mul_f32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x0a,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x0a,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_mul_f32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x0a,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x0a,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_mul_f32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x0a]
|
|
0x01,0xff,0x0b,0x0a
|
|
|
|
# CHECK: v_mul_i32_i24_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x0c]
|
|
0x01,0x05,0x0a,0x0c
|
|
|
|
# CHECK: v_mul_i32_i24_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x0d]
|
|
0x01,0x05,0xfe,0x0d
|
|
|
|
# CHECK: v_mul_i32_i24_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x0c]
|
|
0xff,0x05,0x0a,0x0c
|
|
|
|
# CHECK: v_mul_i32_i24_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x0c]
|
|
0x01,0x04,0x0a,0x0c
|
|
|
|
# CHECK: v_mul_i32_i24_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x0c]
|
|
0x65,0x04,0x0a,0x0c
|
|
|
|
# CHECK: v_mul_i32_i24_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x0c]
|
|
0x66,0x04,0x0a,0x0c
|
|
|
|
# CHECK: v_mul_i32_i24_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x0c]
|
|
0x67,0x04,0x0a,0x0c
|
|
|
|
# CHECK: v_mul_i32_i24_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x0c]
|
|
0x6a,0x04,0x0a,0x0c
|
|
|
|
# CHECK: v_mul_i32_i24_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x0c]
|
|
0x6b,0x04,0x0a,0x0c
|
|
|
|
# CHECK: v_mul_i32_i24_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x0c]
|
|
0x7c,0x04,0x0a,0x0c
|
|
|
|
# CHECK: v_mul_i32_i24_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x0c]
|
|
0x7e,0x04,0x0a,0x0c
|
|
|
|
# CHECK: v_mul_i32_i24_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x0c]
|
|
0x7f,0x04,0x0a,0x0c
|
|
|
|
# CHECK: v_mul_i32_i24_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x0c]
|
|
0x80,0x04,0x0a,0x0c
|
|
|
|
# CHECK: v_mul_i32_i24_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x0c]
|
|
0xc1,0x04,0x0a,0x0c
|
|
|
|
# CHECK: v_mul_i32_i24_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x0c]
|
|
0xf0,0x04,0x0a,0x0c
|
|
|
|
# CHECK: v_mul_i32_i24_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x0c]
|
|
0xf7,0x04,0x0a,0x0c
|
|
|
|
# CHECK: v_mul_i32_i24_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x0c,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x0c,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_mul_i32_i24_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x0c,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x0c,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_mul_i32_i24_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x0c]
|
|
0x01,0xff,0x0b,0x0c
|
|
|
|
# CHECK: v_mul_hi_i32_i24_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x0e]
|
|
0x01,0x05,0x0a,0x0e
|
|
|
|
# CHECK: v_mul_hi_i32_i24_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x0f]
|
|
0x01,0x05,0xfe,0x0f
|
|
|
|
# CHECK: v_mul_hi_i32_i24_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x0e]
|
|
0xff,0x05,0x0a,0x0e
|
|
|
|
# CHECK: v_mul_hi_i32_i24_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x0e]
|
|
0x01,0x04,0x0a,0x0e
|
|
|
|
# CHECK: v_mul_hi_i32_i24_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x0e]
|
|
0x65,0x04,0x0a,0x0e
|
|
|
|
# CHECK: v_mul_hi_i32_i24_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x0e]
|
|
0x66,0x04,0x0a,0x0e
|
|
|
|
# CHECK: v_mul_hi_i32_i24_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x0e]
|
|
0x67,0x04,0x0a,0x0e
|
|
|
|
# CHECK: v_mul_hi_i32_i24_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x0e]
|
|
0x6a,0x04,0x0a,0x0e
|
|
|
|
# CHECK: v_mul_hi_i32_i24_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x0e]
|
|
0x6b,0x04,0x0a,0x0e
|
|
|
|
# CHECK: v_mul_hi_i32_i24_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x0e]
|
|
0x7c,0x04,0x0a,0x0e
|
|
|
|
# CHECK: v_mul_hi_i32_i24_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x0e]
|
|
0x7e,0x04,0x0a,0x0e
|
|
|
|
# CHECK: v_mul_hi_i32_i24_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x0e]
|
|
0x7f,0x04,0x0a,0x0e
|
|
|
|
# CHECK: v_mul_hi_i32_i24_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x0e]
|
|
0x80,0x04,0x0a,0x0e
|
|
|
|
# CHECK: v_mul_hi_i32_i24_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x0e]
|
|
0xc1,0x04,0x0a,0x0e
|
|
|
|
# CHECK: v_mul_hi_i32_i24_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x0e]
|
|
0xf0,0x04,0x0a,0x0e
|
|
|
|
# CHECK: v_mul_hi_i32_i24_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x0e]
|
|
0xf7,0x04,0x0a,0x0e
|
|
|
|
# CHECK: v_mul_hi_i32_i24_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x0e,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x0e,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_mul_hi_i32_i24_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x0e,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x0e,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_mul_hi_i32_i24_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x0e]
|
|
0x01,0xff,0x0b,0x0e
|
|
|
|
# CHECK: v_mul_u32_u24_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x10]
|
|
0x01,0x05,0x0a,0x10
|
|
|
|
# CHECK: v_mul_u32_u24_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x11]
|
|
0x01,0x05,0xfe,0x11
|
|
|
|
# CHECK: v_mul_u32_u24_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x10]
|
|
0xff,0x05,0x0a,0x10
|
|
|
|
# CHECK: v_mul_u32_u24_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x10]
|
|
0x01,0x04,0x0a,0x10
|
|
|
|
# CHECK: v_mul_u32_u24_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x10]
|
|
0x65,0x04,0x0a,0x10
|
|
|
|
# CHECK: v_mul_u32_u24_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x10]
|
|
0x66,0x04,0x0a,0x10
|
|
|
|
# CHECK: v_mul_u32_u24_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x10]
|
|
0x67,0x04,0x0a,0x10
|
|
|
|
# CHECK: v_mul_u32_u24_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x10]
|
|
0x6a,0x04,0x0a,0x10
|
|
|
|
# CHECK: v_mul_u32_u24_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x10]
|
|
0x6b,0x04,0x0a,0x10
|
|
|
|
# CHECK: v_mul_u32_u24_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x10]
|
|
0x7c,0x04,0x0a,0x10
|
|
|
|
# CHECK: v_mul_u32_u24_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x10]
|
|
0x7e,0x04,0x0a,0x10
|
|
|
|
# CHECK: v_mul_u32_u24_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x10]
|
|
0x7f,0x04,0x0a,0x10
|
|
|
|
# CHECK: v_mul_u32_u24_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x10]
|
|
0x80,0x04,0x0a,0x10
|
|
|
|
# CHECK: v_mul_u32_u24_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x10]
|
|
0xc1,0x04,0x0a,0x10
|
|
|
|
# CHECK: v_mul_u32_u24_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x10]
|
|
0xf0,0x04,0x0a,0x10
|
|
|
|
# CHECK: v_mul_u32_u24_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x10]
|
|
0xf7,0x04,0x0a,0x10
|
|
|
|
# CHECK: v_mul_u32_u24_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x10,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x10,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_mul_u32_u24_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x10,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x10,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_mul_u32_u24_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x10]
|
|
0x01,0xff,0x0b,0x10
|
|
|
|
# CHECK: v_mul_hi_u32_u24_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x12]
|
|
0x01,0x05,0x0a,0x12
|
|
|
|
# CHECK: v_mul_hi_u32_u24_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x13]
|
|
0x01,0x05,0xfe,0x13
|
|
|
|
# CHECK: v_mul_hi_u32_u24_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x12]
|
|
0xff,0x05,0x0a,0x12
|
|
|
|
# CHECK: v_mul_hi_u32_u24_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x12]
|
|
0x01,0x04,0x0a,0x12
|
|
|
|
# CHECK: v_mul_hi_u32_u24_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x12]
|
|
0x65,0x04,0x0a,0x12
|
|
|
|
# CHECK: v_mul_hi_u32_u24_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x12]
|
|
0x66,0x04,0x0a,0x12
|
|
|
|
# CHECK: v_mul_hi_u32_u24_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x12]
|
|
0x67,0x04,0x0a,0x12
|
|
|
|
# CHECK: v_mul_hi_u32_u24_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x12]
|
|
0x6a,0x04,0x0a,0x12
|
|
|
|
# CHECK: v_mul_hi_u32_u24_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x12]
|
|
0x6b,0x04,0x0a,0x12
|
|
|
|
# CHECK: v_mul_hi_u32_u24_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x12]
|
|
0x7c,0x04,0x0a,0x12
|
|
|
|
# CHECK: v_mul_hi_u32_u24_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x12]
|
|
0x7e,0x04,0x0a,0x12
|
|
|
|
# CHECK: v_mul_hi_u32_u24_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x12]
|
|
0x7f,0x04,0x0a,0x12
|
|
|
|
# CHECK: v_mul_hi_u32_u24_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x12]
|
|
0x80,0x04,0x0a,0x12
|
|
|
|
# CHECK: v_mul_hi_u32_u24_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x12]
|
|
0xc1,0x04,0x0a,0x12
|
|
|
|
# CHECK: v_mul_hi_u32_u24_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x12]
|
|
0xf0,0x04,0x0a,0x12
|
|
|
|
# CHECK: v_mul_hi_u32_u24_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x12]
|
|
0xf7,0x04,0x0a,0x12
|
|
|
|
# CHECK: v_mul_hi_u32_u24_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x12,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x12,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_mul_hi_u32_u24_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x12,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x12,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_mul_hi_u32_u24_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x12]
|
|
0x01,0xff,0x0b,0x12
|
|
|
|
# CHECK: v_min_f32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x14]
|
|
0x01,0x05,0x0a,0x14
|
|
|
|
# CHECK: v_min_f32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x15]
|
|
0x01,0x05,0xfe,0x15
|
|
|
|
# CHECK: v_min_f32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x14]
|
|
0xff,0x05,0x0a,0x14
|
|
|
|
# CHECK: v_min_f32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x14]
|
|
0x01,0x04,0x0a,0x14
|
|
|
|
# CHECK: v_min_f32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x14]
|
|
0x65,0x04,0x0a,0x14
|
|
|
|
# CHECK: v_min_f32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x14]
|
|
0x66,0x04,0x0a,0x14
|
|
|
|
# CHECK: v_min_f32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x14]
|
|
0x67,0x04,0x0a,0x14
|
|
|
|
# CHECK: v_min_f32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x14]
|
|
0x6a,0x04,0x0a,0x14
|
|
|
|
# CHECK: v_min_f32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x14]
|
|
0x6b,0x04,0x0a,0x14
|
|
|
|
# CHECK: v_min_f32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x14]
|
|
0x7c,0x04,0x0a,0x14
|
|
|
|
# CHECK: v_min_f32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x14]
|
|
0x7e,0x04,0x0a,0x14
|
|
|
|
# CHECK: v_min_f32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x14]
|
|
0x7f,0x04,0x0a,0x14
|
|
|
|
# CHECK: v_min_f32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x14]
|
|
0x80,0x04,0x0a,0x14
|
|
|
|
# CHECK: v_min_f32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x14]
|
|
0xc1,0x04,0x0a,0x14
|
|
|
|
# CHECK: v_min_f32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x14]
|
|
0xf0,0x04,0x0a,0x14
|
|
|
|
# CHECK: v_min_f32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x14]
|
|
0xf7,0x04,0x0a,0x14
|
|
|
|
# CHECK: v_min_f32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x14,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x14,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_min_f32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x14,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x14,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_min_f32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x14]
|
|
0x01,0xff,0x0b,0x14
|
|
|
|
# CHECK: v_max_f32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x16]
|
|
0x01,0x05,0x0a,0x16
|
|
|
|
# CHECK: v_max_f32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x17]
|
|
0x01,0x05,0xfe,0x17
|
|
|
|
# CHECK: v_max_f32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x16]
|
|
0xff,0x05,0x0a,0x16
|
|
|
|
# CHECK: v_max_f32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x16]
|
|
0x01,0x04,0x0a,0x16
|
|
|
|
# CHECK: v_max_f32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x16]
|
|
0x65,0x04,0x0a,0x16
|
|
|
|
# CHECK: v_max_f32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x16]
|
|
0x66,0x04,0x0a,0x16
|
|
|
|
# CHECK: v_max_f32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x16]
|
|
0x67,0x04,0x0a,0x16
|
|
|
|
# CHECK: v_max_f32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x16]
|
|
0x6a,0x04,0x0a,0x16
|
|
|
|
# CHECK: v_max_f32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x16]
|
|
0x6b,0x04,0x0a,0x16
|
|
|
|
# CHECK: v_max_f32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x16]
|
|
0x7c,0x04,0x0a,0x16
|
|
|
|
# CHECK: v_max_f32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x16]
|
|
0x7e,0x04,0x0a,0x16
|
|
|
|
# CHECK: v_max_f32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x16]
|
|
0x7f,0x04,0x0a,0x16
|
|
|
|
# CHECK: v_max_f32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x16]
|
|
0x80,0x04,0x0a,0x16
|
|
|
|
# CHECK: v_max_f32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x16]
|
|
0xc1,0x04,0x0a,0x16
|
|
|
|
# CHECK: v_max_f32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x16]
|
|
0xf0,0x04,0x0a,0x16
|
|
|
|
# CHECK: v_max_f32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x16]
|
|
0xf7,0x04,0x0a,0x16
|
|
|
|
# CHECK: v_max_f32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x16,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x16,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_max_f32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x16,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x16,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_max_f32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x16]
|
|
0x01,0xff,0x0b,0x16
|
|
|
|
# CHECK: v_min_i32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x18]
|
|
0x01,0x05,0x0a,0x18
|
|
|
|
# CHECK: v_min_i32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x19]
|
|
0x01,0x05,0xfe,0x19
|
|
|
|
# CHECK: v_min_i32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x18]
|
|
0xff,0x05,0x0a,0x18
|
|
|
|
# CHECK: v_min_i32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x18]
|
|
0x01,0x04,0x0a,0x18
|
|
|
|
# CHECK: v_min_i32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x18]
|
|
0x65,0x04,0x0a,0x18
|
|
|
|
# CHECK: v_min_i32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x18]
|
|
0x66,0x04,0x0a,0x18
|
|
|
|
# CHECK: v_min_i32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x18]
|
|
0x67,0x04,0x0a,0x18
|
|
|
|
# CHECK: v_min_i32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x18]
|
|
0x6a,0x04,0x0a,0x18
|
|
|
|
# CHECK: v_min_i32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x18]
|
|
0x6b,0x04,0x0a,0x18
|
|
|
|
# CHECK: v_min_i32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x18]
|
|
0x7c,0x04,0x0a,0x18
|
|
|
|
# CHECK: v_min_i32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x18]
|
|
0x7e,0x04,0x0a,0x18
|
|
|
|
# CHECK: v_min_i32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x18]
|
|
0x7f,0x04,0x0a,0x18
|
|
|
|
# CHECK: v_min_i32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x18]
|
|
0x80,0x04,0x0a,0x18
|
|
|
|
# CHECK: v_min_i32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x18]
|
|
0xc1,0x04,0x0a,0x18
|
|
|
|
# CHECK: v_min_i32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x18]
|
|
0xf0,0x04,0x0a,0x18
|
|
|
|
# CHECK: v_min_i32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x18]
|
|
0xf7,0x04,0x0a,0x18
|
|
|
|
# CHECK: v_min_i32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x18,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x18,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_min_i32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x18,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x18,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_min_i32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x18]
|
|
0x01,0xff,0x0b,0x18
|
|
|
|
# CHECK: v_max_i32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x1a]
|
|
0x01,0x05,0x0a,0x1a
|
|
|
|
# CHECK: v_max_i32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x1b]
|
|
0x01,0x05,0xfe,0x1b
|
|
|
|
# CHECK: v_max_i32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x1a]
|
|
0xff,0x05,0x0a,0x1a
|
|
|
|
# CHECK: v_max_i32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x1a]
|
|
0x01,0x04,0x0a,0x1a
|
|
|
|
# CHECK: v_max_i32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x1a]
|
|
0x65,0x04,0x0a,0x1a
|
|
|
|
# CHECK: v_max_i32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x1a]
|
|
0x66,0x04,0x0a,0x1a
|
|
|
|
# CHECK: v_max_i32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x1a]
|
|
0x67,0x04,0x0a,0x1a
|
|
|
|
# CHECK: v_max_i32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x1a]
|
|
0x6a,0x04,0x0a,0x1a
|
|
|
|
# CHECK: v_max_i32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x1a]
|
|
0x6b,0x04,0x0a,0x1a
|
|
|
|
# CHECK: v_max_i32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x1a]
|
|
0x7c,0x04,0x0a,0x1a
|
|
|
|
# CHECK: v_max_i32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x1a]
|
|
0x7e,0x04,0x0a,0x1a
|
|
|
|
# CHECK: v_max_i32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x1a]
|
|
0x7f,0x04,0x0a,0x1a
|
|
|
|
# CHECK: v_max_i32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x1a]
|
|
0x80,0x04,0x0a,0x1a
|
|
|
|
# CHECK: v_max_i32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x1a]
|
|
0xc1,0x04,0x0a,0x1a
|
|
|
|
# CHECK: v_max_i32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x1a]
|
|
0xf0,0x04,0x0a,0x1a
|
|
|
|
# CHECK: v_max_i32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x1a]
|
|
0xf7,0x04,0x0a,0x1a
|
|
|
|
# CHECK: v_max_i32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x1a,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x1a,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_max_i32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x1a,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x1a,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_max_i32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x1a]
|
|
0x01,0xff,0x0b,0x1a
|
|
|
|
# CHECK: v_min_u32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x1c]
|
|
0x01,0x05,0x0a,0x1c
|
|
|
|
# CHECK: v_min_u32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x1d]
|
|
0x01,0x05,0xfe,0x1d
|
|
|
|
# CHECK: v_min_u32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x1c]
|
|
0xff,0x05,0x0a,0x1c
|
|
|
|
# CHECK: v_min_u32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x1c]
|
|
0x01,0x04,0x0a,0x1c
|
|
|
|
# CHECK: v_min_u32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x1c]
|
|
0x65,0x04,0x0a,0x1c
|
|
|
|
# CHECK: v_min_u32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x1c]
|
|
0x66,0x04,0x0a,0x1c
|
|
|
|
# CHECK: v_min_u32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x1c]
|
|
0x67,0x04,0x0a,0x1c
|
|
|
|
# CHECK: v_min_u32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x1c]
|
|
0x6a,0x04,0x0a,0x1c
|
|
|
|
# CHECK: v_min_u32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x1c]
|
|
0x6b,0x04,0x0a,0x1c
|
|
|
|
# CHECK: v_min_u32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x1c]
|
|
0x7c,0x04,0x0a,0x1c
|
|
|
|
# CHECK: v_min_u32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x1c]
|
|
0x7e,0x04,0x0a,0x1c
|
|
|
|
# CHECK: v_min_u32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x1c]
|
|
0x7f,0x04,0x0a,0x1c
|
|
|
|
# CHECK: v_min_u32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x1c]
|
|
0x80,0x04,0x0a,0x1c
|
|
|
|
# CHECK: v_min_u32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x1c]
|
|
0xc1,0x04,0x0a,0x1c
|
|
|
|
# CHECK: v_min_u32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x1c]
|
|
0xf0,0x04,0x0a,0x1c
|
|
|
|
# CHECK: v_min_u32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x1c]
|
|
0xf7,0x04,0x0a,0x1c
|
|
|
|
# CHECK: v_min_u32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x1c,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x1c,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_min_u32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x1c,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x1c,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_min_u32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x1c]
|
|
0x01,0xff,0x0b,0x1c
|
|
|
|
# CHECK: v_max_u32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x1e]
|
|
0x01,0x05,0x0a,0x1e
|
|
|
|
# CHECK: v_max_u32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x1f]
|
|
0x01,0x05,0xfe,0x1f
|
|
|
|
# CHECK: v_max_u32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x1e]
|
|
0xff,0x05,0x0a,0x1e
|
|
|
|
# CHECK: v_max_u32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x1e]
|
|
0x01,0x04,0x0a,0x1e
|
|
|
|
# CHECK: v_max_u32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x1e]
|
|
0x65,0x04,0x0a,0x1e
|
|
|
|
# CHECK: v_max_u32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x1e]
|
|
0x66,0x04,0x0a,0x1e
|
|
|
|
# CHECK: v_max_u32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x1e]
|
|
0x67,0x04,0x0a,0x1e
|
|
|
|
# CHECK: v_max_u32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x1e]
|
|
0x6a,0x04,0x0a,0x1e
|
|
|
|
# CHECK: v_max_u32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x1e]
|
|
0x6b,0x04,0x0a,0x1e
|
|
|
|
# CHECK: v_max_u32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x1e]
|
|
0x7c,0x04,0x0a,0x1e
|
|
|
|
# CHECK: v_max_u32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x1e]
|
|
0x7e,0x04,0x0a,0x1e
|
|
|
|
# CHECK: v_max_u32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x1e]
|
|
0x7f,0x04,0x0a,0x1e
|
|
|
|
# CHECK: v_max_u32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x1e]
|
|
0x80,0x04,0x0a,0x1e
|
|
|
|
# CHECK: v_max_u32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x1e]
|
|
0xc1,0x04,0x0a,0x1e
|
|
|
|
# CHECK: v_max_u32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x1e]
|
|
0xf0,0x04,0x0a,0x1e
|
|
|
|
# CHECK: v_max_u32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x1e]
|
|
0xf7,0x04,0x0a,0x1e
|
|
|
|
# CHECK: v_max_u32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x1e,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x1e,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_max_u32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x1e,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x1e,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_max_u32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x1e]
|
|
0x01,0xff,0x0b,0x1e
|
|
|
|
# CHECK: v_lshrrev_b32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x20]
|
|
0x01,0x05,0x0a,0x20
|
|
|
|
# CHECK: v_lshrrev_b32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x21]
|
|
0x01,0x05,0xfe,0x21
|
|
|
|
# CHECK: v_lshrrev_b32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x20]
|
|
0xff,0x05,0x0a,0x20
|
|
|
|
# CHECK: v_lshrrev_b32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x20]
|
|
0x01,0x04,0x0a,0x20
|
|
|
|
# CHECK: v_lshrrev_b32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x20]
|
|
0x65,0x04,0x0a,0x20
|
|
|
|
# CHECK: v_lshrrev_b32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x20]
|
|
0x66,0x04,0x0a,0x20
|
|
|
|
# CHECK: v_lshrrev_b32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x20]
|
|
0x67,0x04,0x0a,0x20
|
|
|
|
# CHECK: v_lshrrev_b32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x20]
|
|
0x6a,0x04,0x0a,0x20
|
|
|
|
# CHECK: v_lshrrev_b32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x20]
|
|
0x6b,0x04,0x0a,0x20
|
|
|
|
# CHECK: v_lshrrev_b32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x20]
|
|
0x7c,0x04,0x0a,0x20
|
|
|
|
# CHECK: v_lshrrev_b32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x20]
|
|
0x7e,0x04,0x0a,0x20
|
|
|
|
# CHECK: v_lshrrev_b32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x20]
|
|
0x7f,0x04,0x0a,0x20
|
|
|
|
# CHECK: v_lshrrev_b32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x20]
|
|
0x80,0x04,0x0a,0x20
|
|
|
|
# CHECK: v_lshrrev_b32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x20]
|
|
0xc1,0x04,0x0a,0x20
|
|
|
|
# CHECK: v_lshrrev_b32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x20]
|
|
0xf0,0x04,0x0a,0x20
|
|
|
|
# CHECK: v_lshrrev_b32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x20]
|
|
0xf7,0x04,0x0a,0x20
|
|
|
|
# CHECK: v_lshrrev_b32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x20,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x20,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_lshrrev_b32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x20,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x20,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_lshrrev_b32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x20]
|
|
0x01,0xff,0x0b,0x20
|
|
|
|
# CHECK: v_ashrrev_i32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x22]
|
|
0x01,0x05,0x0a,0x22
|
|
|
|
# CHECK: v_ashrrev_i32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x23]
|
|
0x01,0x05,0xfe,0x23
|
|
|
|
# CHECK: v_ashrrev_i32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x22]
|
|
0xff,0x05,0x0a,0x22
|
|
|
|
# CHECK: v_ashrrev_i32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x22]
|
|
0x01,0x04,0x0a,0x22
|
|
|
|
# CHECK: v_ashrrev_i32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x22]
|
|
0x65,0x04,0x0a,0x22
|
|
|
|
# CHECK: v_ashrrev_i32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x22]
|
|
0x66,0x04,0x0a,0x22
|
|
|
|
# CHECK: v_ashrrev_i32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x22]
|
|
0x67,0x04,0x0a,0x22
|
|
|
|
# CHECK: v_ashrrev_i32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x22]
|
|
0x6a,0x04,0x0a,0x22
|
|
|
|
# CHECK: v_ashrrev_i32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x22]
|
|
0x6b,0x04,0x0a,0x22
|
|
|
|
# CHECK: v_ashrrev_i32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x22]
|
|
0x7c,0x04,0x0a,0x22
|
|
|
|
# CHECK: v_ashrrev_i32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x22]
|
|
0x7e,0x04,0x0a,0x22
|
|
|
|
# CHECK: v_ashrrev_i32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x22]
|
|
0x7f,0x04,0x0a,0x22
|
|
|
|
# CHECK: v_ashrrev_i32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x22]
|
|
0x80,0x04,0x0a,0x22
|
|
|
|
# CHECK: v_ashrrev_i32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x22]
|
|
0xc1,0x04,0x0a,0x22
|
|
|
|
# CHECK: v_ashrrev_i32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x22]
|
|
0xf0,0x04,0x0a,0x22
|
|
|
|
# CHECK: v_ashrrev_i32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x22]
|
|
0xf7,0x04,0x0a,0x22
|
|
|
|
# CHECK: v_ashrrev_i32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x22,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x22,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_ashrrev_i32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x22,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x22,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_ashrrev_i32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x22]
|
|
0x01,0xff,0x0b,0x22
|
|
|
|
# CHECK: v_lshlrev_b32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x24]
|
|
0x01,0x05,0x0a,0x24
|
|
|
|
# CHECK: v_lshlrev_b32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x25]
|
|
0x01,0x05,0xfe,0x25
|
|
|
|
# CHECK: v_lshlrev_b32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x24]
|
|
0xff,0x05,0x0a,0x24
|
|
|
|
# CHECK: v_lshlrev_b32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x24]
|
|
0x01,0x04,0x0a,0x24
|
|
|
|
# CHECK: v_lshlrev_b32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x24]
|
|
0x65,0x04,0x0a,0x24
|
|
|
|
# CHECK: v_lshlrev_b32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x24]
|
|
0x66,0x04,0x0a,0x24
|
|
|
|
# CHECK: v_lshlrev_b32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x24]
|
|
0x67,0x04,0x0a,0x24
|
|
|
|
# CHECK: v_lshlrev_b32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x24]
|
|
0x6a,0x04,0x0a,0x24
|
|
|
|
# CHECK: v_lshlrev_b32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x24]
|
|
0x6b,0x04,0x0a,0x24
|
|
|
|
# CHECK: v_lshlrev_b32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x24]
|
|
0x7c,0x04,0x0a,0x24
|
|
|
|
# CHECK: v_lshlrev_b32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x24]
|
|
0x7e,0x04,0x0a,0x24
|
|
|
|
# CHECK: v_lshlrev_b32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x24]
|
|
0x7f,0x04,0x0a,0x24
|
|
|
|
# CHECK: v_lshlrev_b32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x24]
|
|
0x80,0x04,0x0a,0x24
|
|
|
|
# CHECK: v_lshlrev_b32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x24]
|
|
0xc1,0x04,0x0a,0x24
|
|
|
|
# CHECK: v_lshlrev_b32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x24]
|
|
0xf0,0x04,0x0a,0x24
|
|
|
|
# CHECK: v_lshlrev_b32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x24]
|
|
0xf7,0x04,0x0a,0x24
|
|
|
|
# CHECK: v_lshlrev_b32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x24,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x24,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_lshlrev_b32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x24,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x24,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_lshlrev_b32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x24]
|
|
0x01,0xff,0x0b,0x24
|
|
|
|
# CHECK: v_and_b32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x26]
|
|
0x01,0x05,0x0a,0x26
|
|
|
|
# CHECK: v_and_b32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x27]
|
|
0x01,0x05,0xfe,0x27
|
|
|
|
# CHECK: v_and_b32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x26]
|
|
0xff,0x05,0x0a,0x26
|
|
|
|
# CHECK: v_and_b32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x26]
|
|
0x01,0x04,0x0a,0x26
|
|
|
|
# CHECK: v_and_b32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x26]
|
|
0x65,0x04,0x0a,0x26
|
|
|
|
# CHECK: v_and_b32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x26]
|
|
0x66,0x04,0x0a,0x26
|
|
|
|
# CHECK: v_and_b32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x26]
|
|
0x67,0x04,0x0a,0x26
|
|
|
|
# CHECK: v_and_b32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x26]
|
|
0x6a,0x04,0x0a,0x26
|
|
|
|
# CHECK: v_and_b32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x26]
|
|
0x6b,0x04,0x0a,0x26
|
|
|
|
# CHECK: v_and_b32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x26]
|
|
0x7c,0x04,0x0a,0x26
|
|
|
|
# CHECK: v_and_b32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x26]
|
|
0x7e,0x04,0x0a,0x26
|
|
|
|
# CHECK: v_and_b32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x26]
|
|
0x7f,0x04,0x0a,0x26
|
|
|
|
# CHECK: v_and_b32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x26]
|
|
0x80,0x04,0x0a,0x26
|
|
|
|
# CHECK: v_and_b32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x26]
|
|
0xc1,0x04,0x0a,0x26
|
|
|
|
# CHECK: v_and_b32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x26]
|
|
0xf0,0x04,0x0a,0x26
|
|
|
|
# CHECK: v_and_b32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x26]
|
|
0xf7,0x04,0x0a,0x26
|
|
|
|
# CHECK: v_and_b32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x26,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x26,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_and_b32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x26,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x26,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_and_b32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x26]
|
|
0x01,0xff,0x0b,0x26
|
|
|
|
# CHECK: v_or_b32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x28]
|
|
0x01,0x05,0x0a,0x28
|
|
|
|
# CHECK: v_or_b32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x29]
|
|
0x01,0x05,0xfe,0x29
|
|
|
|
# CHECK: v_or_b32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x28]
|
|
0xff,0x05,0x0a,0x28
|
|
|
|
# CHECK: v_or_b32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x28]
|
|
0x01,0x04,0x0a,0x28
|
|
|
|
# CHECK: v_or_b32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x28]
|
|
0x65,0x04,0x0a,0x28
|
|
|
|
# CHECK: v_or_b32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x28]
|
|
0x66,0x04,0x0a,0x28
|
|
|
|
# CHECK: v_or_b32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x28]
|
|
0x67,0x04,0x0a,0x28
|
|
|
|
# CHECK: v_or_b32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x28]
|
|
0x6a,0x04,0x0a,0x28
|
|
|
|
# CHECK: v_or_b32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x28]
|
|
0x6b,0x04,0x0a,0x28
|
|
|
|
# CHECK: v_or_b32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x28]
|
|
0x7c,0x04,0x0a,0x28
|
|
|
|
# CHECK: v_or_b32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x28]
|
|
0x7e,0x04,0x0a,0x28
|
|
|
|
# CHECK: v_or_b32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x28]
|
|
0x7f,0x04,0x0a,0x28
|
|
|
|
# CHECK: v_or_b32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x28]
|
|
0x80,0x04,0x0a,0x28
|
|
|
|
# CHECK: v_or_b32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x28]
|
|
0xc1,0x04,0x0a,0x28
|
|
|
|
# CHECK: v_or_b32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x28]
|
|
0xf0,0x04,0x0a,0x28
|
|
|
|
# CHECK: v_or_b32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x28]
|
|
0xf7,0x04,0x0a,0x28
|
|
|
|
# CHECK: v_or_b32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x28,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x28,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_or_b32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x28,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x28,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_or_b32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x28]
|
|
0x01,0xff,0x0b,0x28
|
|
|
|
# CHECK: v_xor_b32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x2a]
|
|
0x01,0x05,0x0a,0x2a
|
|
|
|
# CHECK: v_xor_b32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x2b]
|
|
0x01,0x05,0xfe,0x2b
|
|
|
|
# CHECK: v_xor_b32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x2a]
|
|
0xff,0x05,0x0a,0x2a
|
|
|
|
# CHECK: v_xor_b32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x2a]
|
|
0x01,0x04,0x0a,0x2a
|
|
|
|
# CHECK: v_xor_b32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x2a]
|
|
0x65,0x04,0x0a,0x2a
|
|
|
|
# CHECK: v_xor_b32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x2a]
|
|
0x66,0x04,0x0a,0x2a
|
|
|
|
# CHECK: v_xor_b32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x2a]
|
|
0x67,0x04,0x0a,0x2a
|
|
|
|
# CHECK: v_xor_b32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x2a]
|
|
0x6a,0x04,0x0a,0x2a
|
|
|
|
# CHECK: v_xor_b32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x2a]
|
|
0x6b,0x04,0x0a,0x2a
|
|
|
|
# CHECK: v_xor_b32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x2a]
|
|
0x7c,0x04,0x0a,0x2a
|
|
|
|
# CHECK: v_xor_b32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x2a]
|
|
0x7e,0x04,0x0a,0x2a
|
|
|
|
# CHECK: v_xor_b32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x2a]
|
|
0x7f,0x04,0x0a,0x2a
|
|
|
|
# CHECK: v_xor_b32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x2a]
|
|
0x80,0x04,0x0a,0x2a
|
|
|
|
# CHECK: v_xor_b32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x2a]
|
|
0xc1,0x04,0x0a,0x2a
|
|
|
|
# CHECK: v_xor_b32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x2a]
|
|
0xf0,0x04,0x0a,0x2a
|
|
|
|
# CHECK: v_xor_b32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x2a]
|
|
0xf7,0x04,0x0a,0x2a
|
|
|
|
# CHECK: v_xor_b32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x2a,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x2a,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_xor_b32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x2a,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x2a,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_xor_b32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x2a]
|
|
0x01,0xff,0x0b,0x2a
|
|
|
|
# CHECK: v_mac_f32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x2c]
|
|
0x01,0x05,0x0a,0x2c
|
|
|
|
# CHECK: v_mac_f32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x2d]
|
|
0x01,0x05,0xfe,0x2d
|
|
|
|
# CHECK: v_mac_f32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x2c]
|
|
0xff,0x05,0x0a,0x2c
|
|
|
|
# CHECK: v_mac_f32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x2c]
|
|
0x01,0x04,0x0a,0x2c
|
|
|
|
# CHECK: v_mac_f32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x2c]
|
|
0x65,0x04,0x0a,0x2c
|
|
|
|
# CHECK: v_mac_f32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x2c]
|
|
0x66,0x04,0x0a,0x2c
|
|
|
|
# CHECK: v_mac_f32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x2c]
|
|
0x67,0x04,0x0a,0x2c
|
|
|
|
# CHECK: v_mac_f32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x2c]
|
|
0x6a,0x04,0x0a,0x2c
|
|
|
|
# CHECK: v_mac_f32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x2c]
|
|
0x6b,0x04,0x0a,0x2c
|
|
|
|
# CHECK: v_mac_f32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x2c]
|
|
0x7c,0x04,0x0a,0x2c
|
|
|
|
# CHECK: v_mac_f32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x2c]
|
|
0x7e,0x04,0x0a,0x2c
|
|
|
|
# CHECK: v_mac_f32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x2c]
|
|
0x7f,0x04,0x0a,0x2c
|
|
|
|
# CHECK: v_mac_f32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x2c]
|
|
0x80,0x04,0x0a,0x2c
|
|
|
|
# CHECK: v_mac_f32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x2c]
|
|
0xc1,0x04,0x0a,0x2c
|
|
|
|
# CHECK: v_mac_f32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x2c]
|
|
0xf0,0x04,0x0a,0x2c
|
|
|
|
# CHECK: v_mac_f32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x2c]
|
|
0xf7,0x04,0x0a,0x2c
|
|
|
|
# CHECK: v_mac_f32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x2c,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x2c,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_mac_f32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x2c,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x2c,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_mac_f32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x2c]
|
|
0x01,0xff,0x0b,0x2c
|
|
|
|
# CHECK: v_madmk_f32 v5, v1, 0x11213141, v3 ; encoding: [0x01,0x07,0x0a,0x2e,0x41,0x31,0x21,0x11]
|
|
0x01,0x07,0x0a,0x2e,0x41,0x31,0x21,0x11
|
|
|
|
# CHECK: v_madmk_f32 v255, v1, 0x11213141, v3 ; encoding: [0x01,0x07,0xfe,0x2f,0x41,0x31,0x21,0x11]
|
|
0x01,0x07,0xfe,0x2f,0x41,0x31,0x21,0x11
|
|
|
|
# CHECK: v_madmk_f32 v5, v255, 0x11213141, v3 ; encoding: [0xff,0x07,0x0a,0x2e,0x41,0x31,0x21,0x11]
|
|
0xff,0x07,0x0a,0x2e,0x41,0x31,0x21,0x11
|
|
|
|
# CHECK: v_madmk_f32 v5, 0, 0x11213141, v3 ; encoding: [0x80,0x06,0x0a,0x2e,0x41,0x31,0x21,0x11]
|
|
0x80,0x06,0x0a,0x2e,0x41,0x31,0x21,0x11
|
|
|
|
# CHECK: v_madmk_f32 v5, -1, 0x11213141, v3 ; encoding: [0xc1,0x06,0x0a,0x2e,0x41,0x31,0x21,0x11]
|
|
0xc1,0x06,0x0a,0x2e,0x41,0x31,0x21,0x11
|
|
|
|
# CHECK: v_madmk_f32 v5, 0.5, 0x11213141, v3 ; encoding: [0xf0,0x06,0x0a,0x2e,0x41,0x31,0x21,0x11]
|
|
0xf0,0x06,0x0a,0x2e,0x41,0x31,0x21,0x11
|
|
|
|
# CHECK: v_madmk_f32 v5, -4.0, 0x11213141, v3 ; encoding: [0xf7,0x06,0x0a,0x2e,0x41,0x31,0x21,0x11]
|
|
0xf7,0x06,0x0a,0x2e,0x41,0x31,0x21,0x11
|
|
|
|
# CHECK: v_madmk_f32 v5, v1, 0xa1b1c1d1, v3 ; encoding: [0x01,0x07,0x0a,0x2e,0xd1,0xc1,0xb1,0xa1]
|
|
0x01,0x07,0x0a,0x2e,0xd1,0xc1,0xb1,0xa1
|
|
|
|
# CHECK: v_madmk_f32 v5, v1, 0x11213141, v255 ; encoding: [0x01,0xff,0x0b,0x2e,0x41,0x31,0x21,0x11]
|
|
0x01,0xff,0x0b,0x2e,0x41,0x31,0x21,0x11
|
|
|
|
# CHECK: v_madmk_f32 v0, 0x11213141, 0x11213141, v0 ; encoding: [0xff,0x00,0x00,0x2e,0x41,0x31,0x21,0x11]
|
|
0xff,0x00,0x00,0x2e,0x41,0x31,0x21,0x11
|
|
|
|
# CHECK: v_madak_f32 v5, v1, v2, 0x11213141 ; encoding: [0x01,0x05,0x0a,0x30,0x41,0x31,0x21,0x11]
|
|
0x01,0x05,0x0a,0x30,0x41,0x31,0x21,0x11
|
|
|
|
# CHECK: v_madak_f32 v255, v1, v2, 0x11213141 ; encoding: [0x01,0x05,0xfe,0x31,0x41,0x31,0x21,0x11]
|
|
0x01,0x05,0xfe,0x31,0x41,0x31,0x21,0x11
|
|
|
|
# CHECK: v_madak_f32 v5, v255, v2, 0x11213141 ; encoding: [0xff,0x05,0x0a,0x30,0x41,0x31,0x21,0x11]
|
|
0xff,0x05,0x0a,0x30,0x41,0x31,0x21,0x11
|
|
|
|
# CHECK: v_madak_f32 v5, 0, v2, 0x11213141 ; encoding: [0x80,0x04,0x0a,0x30,0x41,0x31,0x21,0x11]
|
|
0x80,0x04,0x0a,0x30,0x41,0x31,0x21,0x11
|
|
|
|
# CHECK: v_madak_f32 v5, -1, v2, 0x11213141 ; encoding: [0xc1,0x04,0x0a,0x30,0x41,0x31,0x21,0x11]
|
|
0xc1,0x04,0x0a,0x30,0x41,0x31,0x21,0x11
|
|
|
|
# CHECK: v_madak_f32 v5, 0.5, v2, 0x11213141 ; encoding: [0xf0,0x04,0x0a,0x30,0x41,0x31,0x21,0x11]
|
|
0xf0,0x04,0x0a,0x30,0x41,0x31,0x21,0x11
|
|
|
|
# CHECK: v_madak_f32 v5, -4.0, v2, 0x11213141 ; encoding: [0xf7,0x04,0x0a,0x30,0x41,0x31,0x21,0x11]
|
|
0xf7,0x04,0x0a,0x30,0x41,0x31,0x21,0x11
|
|
|
|
# CHECK: v_madak_f32 v5, v1, v255, 0x11213141 ; encoding: [0x01,0xff,0x0b,0x30,0x41,0x31,0x21,0x11]
|
|
0x01,0xff,0x0b,0x30,0x41,0x31,0x21,0x11
|
|
|
|
# CHECK: v_madak_f32 v5, v1, v2, 0xa1b1c1d1 ; encoding: [0x01,0x05,0x0a,0x30,0xd1,0xc1,0xb1,0xa1]
|
|
0x01,0x05,0x0a,0x30,0xd1,0xc1,0xb1,0xa1
|
|
|
|
# CHECK: v_madak_f32 v0, 0x11213141, v0, 0x11213141 ; encoding: [0xff,0x00,0x00,0x30,0x41,0x31,0x21,0x11]
|
|
0xff,0x00,0x00,0x30,0x41,0x31,0x21,0x11
|
|
|
|
# CHECK: v_add_co_u32_e32 v5, vcc, v1, v2 ; encoding: [0x01,0x05,0x0a,0x32]
|
|
0x01,0x05,0x0a,0x32
|
|
|
|
# CHECK: v_add_co_u32_e32 v255, vcc, v1, v2 ; encoding: [0x01,0x05,0xfe,0x33]
|
|
0x01,0x05,0xfe,0x33
|
|
|
|
# CHECK: v_add_co_u32_e32 v5, vcc, v255, v2 ; encoding: [0xff,0x05,0x0a,0x32]
|
|
0xff,0x05,0x0a,0x32
|
|
|
|
# CHECK: v_add_co_u32_e32 v5, vcc, s1, v2 ; encoding: [0x01,0x04,0x0a,0x32]
|
|
0x01,0x04,0x0a,0x32
|
|
|
|
# CHECK: v_add_co_u32_e32 v5, vcc, s101, v2 ; encoding: [0x65,0x04,0x0a,0x32]
|
|
0x65,0x04,0x0a,0x32
|
|
|
|
# CHECK: v_add_co_u32_e32 v5, vcc, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x32]
|
|
0x66,0x04,0x0a,0x32
|
|
|
|
# CHECK: v_add_co_u32_e32 v5, vcc, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x32]
|
|
0x67,0x04,0x0a,0x32
|
|
|
|
# CHECK: v_add_co_u32_e32 v5, vcc, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x32]
|
|
0x6a,0x04,0x0a,0x32
|
|
|
|
# CHECK: v_add_co_u32_e32 v5, vcc, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x32]
|
|
0x6b,0x04,0x0a,0x32
|
|
|
|
# CHECK: v_add_co_u32_e32 v5, vcc, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x32]
|
|
0x7c,0x04,0x0a,0x32
|
|
|
|
# CHECK: v_add_co_u32_e32 v5, vcc, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x32]
|
|
0x7e,0x04,0x0a,0x32
|
|
|
|
# CHECK: v_add_co_u32_e32 v5, vcc, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x32]
|
|
0x7f,0x04,0x0a,0x32
|
|
|
|
# CHECK: v_add_co_u32_e32 v5, vcc, 0, v2 ; encoding: [0x80,0x04,0x0a,0x32]
|
|
0x80,0x04,0x0a,0x32
|
|
|
|
# CHECK: v_add_co_u32_e32 v5, vcc, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x32]
|
|
0xc1,0x04,0x0a,0x32
|
|
|
|
# CHECK: v_add_co_u32_e32 v5, vcc, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x32]
|
|
0xf0,0x04,0x0a,0x32
|
|
|
|
# CHECK: v_add_co_u32_e32 v5, vcc, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x32]
|
|
0xf7,0x04,0x0a,0x32
|
|
|
|
# CHECK: v_add_co_u32_e32 v5, vcc, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x32,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x32,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_add_co_u32_e32 v5, vcc, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x32,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x32,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_add_co_u32_e32 v5, vcc, v1, v255 ; encoding: [0x01,0xff,0x0b,0x32]
|
|
0x01,0xff,0x0b,0x32
|
|
|
|
# CHECK: v_sub_co_u32_e32 v5, vcc, v1, v2 ; encoding: [0x01,0x05,0x0a,0x34]
|
|
0x01,0x05,0x0a,0x34
|
|
|
|
# CHECK: v_sub_co_u32_e32 v255, vcc, v1, v2 ; encoding: [0x01,0x05,0xfe,0x35]
|
|
0x01,0x05,0xfe,0x35
|
|
|
|
# CHECK: v_sub_co_u32_e32 v5, vcc, v255, v2 ; encoding: [0xff,0x05,0x0a,0x34]
|
|
0xff,0x05,0x0a,0x34
|
|
|
|
# CHECK: v_sub_co_u32_e32 v5, vcc, s1, v2 ; encoding: [0x01,0x04,0x0a,0x34]
|
|
0x01,0x04,0x0a,0x34
|
|
|
|
# CHECK: v_sub_co_u32_e32 v5, vcc, s101, v2 ; encoding: [0x65,0x04,0x0a,0x34]
|
|
0x65,0x04,0x0a,0x34
|
|
|
|
# CHECK: v_sub_co_u32_e32 v5, vcc, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x34]
|
|
0x66,0x04,0x0a,0x34
|
|
|
|
# CHECK: v_sub_co_u32_e32 v5, vcc, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x34]
|
|
0x67,0x04,0x0a,0x34
|
|
|
|
# CHECK: v_sub_co_u32_e32 v5, vcc, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x34]
|
|
0x6a,0x04,0x0a,0x34
|
|
|
|
# CHECK: v_sub_co_u32_e32 v5, vcc, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x34]
|
|
0x6b,0x04,0x0a,0x34
|
|
|
|
# CHECK: v_sub_co_u32_e32 v5, vcc, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x34]
|
|
0x7c,0x04,0x0a,0x34
|
|
|
|
# CHECK: v_sub_co_u32_e32 v5, vcc, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x34]
|
|
0x7e,0x04,0x0a,0x34
|
|
|
|
# CHECK: v_sub_co_u32_e32 v5, vcc, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x34]
|
|
0x7f,0x04,0x0a,0x34
|
|
|
|
# CHECK: v_sub_co_u32_e32 v5, vcc, 0, v2 ; encoding: [0x80,0x04,0x0a,0x34]
|
|
0x80,0x04,0x0a,0x34
|
|
|
|
# CHECK: v_sub_co_u32_e32 v5, vcc, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x34]
|
|
0xc1,0x04,0x0a,0x34
|
|
|
|
# CHECK: v_sub_co_u32_e32 v5, vcc, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x34]
|
|
0xf0,0x04,0x0a,0x34
|
|
|
|
# CHECK: v_sub_co_u32_e32 v5, vcc, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x34]
|
|
0xf7,0x04,0x0a,0x34
|
|
|
|
# CHECK: v_sub_co_u32_e32 v5, vcc, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x34,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x34,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_sub_co_u32_e32 v5, vcc, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x34,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x34,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_sub_co_u32_e32 v5, vcc, v1, v255 ; encoding: [0x01,0xff,0x0b,0x34]
|
|
0x01,0xff,0x0b,0x34
|
|
|
|
# CHECK: v_subrev_co_u32_e32 v5, vcc, v1, v2 ; encoding: [0x01,0x05,0x0a,0x36]
|
|
0x01,0x05,0x0a,0x36
|
|
|
|
# CHECK: v_subrev_co_u32_e32 v255, vcc, v1, v2 ; encoding: [0x01,0x05,0xfe,0x37]
|
|
0x01,0x05,0xfe,0x37
|
|
|
|
# CHECK: v_subrev_co_u32_e32 v5, vcc, v255, v2 ; encoding: [0xff,0x05,0x0a,0x36]
|
|
0xff,0x05,0x0a,0x36
|
|
|
|
# CHECK: v_subrev_co_u32_e32 v5, vcc, s1, v2 ; encoding: [0x01,0x04,0x0a,0x36]
|
|
0x01,0x04,0x0a,0x36
|
|
|
|
# CHECK: v_subrev_co_u32_e32 v5, vcc, s101, v2 ; encoding: [0x65,0x04,0x0a,0x36]
|
|
0x65,0x04,0x0a,0x36
|
|
|
|
# CHECK: v_subrev_co_u32_e32 v5, vcc, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x36]
|
|
0x66,0x04,0x0a,0x36
|
|
|
|
# CHECK: v_subrev_co_u32_e32 v5, vcc, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x36]
|
|
0x67,0x04,0x0a,0x36
|
|
|
|
# CHECK: v_subrev_co_u32_e32 v5, vcc, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x36]
|
|
0x6a,0x04,0x0a,0x36
|
|
|
|
# CHECK: v_subrev_co_u32_e32 v5, vcc, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x36]
|
|
0x6b,0x04,0x0a,0x36
|
|
|
|
# CHECK: v_subrev_co_u32_e32 v5, vcc, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x36]
|
|
0x7c,0x04,0x0a,0x36
|
|
|
|
# CHECK: v_subrev_co_u32_e32 v5, vcc, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x36]
|
|
0x7e,0x04,0x0a,0x36
|
|
|
|
# CHECK: v_subrev_co_u32_e32 v5, vcc, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x36]
|
|
0x7f,0x04,0x0a,0x36
|
|
|
|
# CHECK: v_subrev_co_u32_e32 v5, vcc, 0, v2 ; encoding: [0x80,0x04,0x0a,0x36]
|
|
0x80,0x04,0x0a,0x36
|
|
|
|
# CHECK: v_subrev_co_u32_e32 v5, vcc, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x36]
|
|
0xc1,0x04,0x0a,0x36
|
|
|
|
# CHECK: v_subrev_co_u32_e32 v5, vcc, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x36]
|
|
0xf0,0x04,0x0a,0x36
|
|
|
|
# CHECK: v_subrev_co_u32_e32 v5, vcc, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x36]
|
|
0xf7,0x04,0x0a,0x36
|
|
|
|
# CHECK: v_subrev_co_u32_e32 v5, vcc, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x36,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x36,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_subrev_co_u32_e32 v5, vcc, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x36,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x36,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_subrev_co_u32_e32 v5, vcc, v1, v255 ; encoding: [0x01,0xff,0x0b,0x36]
|
|
0x01,0xff,0x0b,0x36
|
|
|
|
# CHECK: v_addc_co_u32_e32 v5, vcc, v1, v2, vcc ; encoding: [0x01,0x05,0x0a,0x38]
|
|
0x01,0x05,0x0a,0x38
|
|
|
|
# CHECK: v_addc_co_u32_e32 v255, vcc, v1, v2, vcc ; encoding: [0x01,0x05,0xfe,0x39]
|
|
0x01,0x05,0xfe,0x39
|
|
|
|
# CHECK: v_addc_co_u32_e32 v5, vcc, v255, v2, vcc ; encoding: [0xff,0x05,0x0a,0x38]
|
|
0xff,0x05,0x0a,0x38
|
|
|
|
# CHECK: v_addc_co_u32_e32 v5, vcc, 0, v2, vcc ; encoding: [0x80,0x04,0x0a,0x38]
|
|
0x80,0x04,0x0a,0x38
|
|
|
|
# CHECK: v_addc_co_u32_e32 v5, vcc, -1, v2, vcc ; encoding: [0xc1,0x04,0x0a,0x38]
|
|
0xc1,0x04,0x0a,0x38
|
|
|
|
# CHECK: v_addc_co_u32_e32 v5, vcc, 0.5, v2, vcc ; encoding: [0xf0,0x04,0x0a,0x38]
|
|
0xf0,0x04,0x0a,0x38
|
|
|
|
# CHECK: v_addc_co_u32_e32 v5, vcc, -4.0, v2, vcc ; encoding: [0xf7,0x04,0x0a,0x38]
|
|
0xf7,0x04,0x0a,0x38
|
|
|
|
# CHECK: v_addc_co_u32_e32 v5, vcc, v1, v255, vcc ; encoding: [0x01,0xff,0x0b,0x38]
|
|
0x01,0xff,0x0b,0x38
|
|
|
|
# CHECK: v_subb_co_u32_e32 v5, vcc, v1, v2, vcc ; encoding: [0x01,0x05,0x0a,0x3a]
|
|
0x01,0x05,0x0a,0x3a
|
|
|
|
# CHECK: v_subb_co_u32_e32 v255, vcc, v1, v2, vcc ; encoding: [0x01,0x05,0xfe,0x3b]
|
|
0x01,0x05,0xfe,0x3b
|
|
|
|
# CHECK: v_subb_co_u32_e32 v5, vcc, v255, v2, vcc ; encoding: [0xff,0x05,0x0a,0x3a]
|
|
0xff,0x05,0x0a,0x3a
|
|
|
|
# CHECK: v_subb_co_u32_e32 v5, vcc, 0, v2, vcc ; encoding: [0x80,0x04,0x0a,0x3a]
|
|
0x80,0x04,0x0a,0x3a
|
|
|
|
# CHECK: v_subb_co_u32_e32 v5, vcc, -1, v2, vcc ; encoding: [0xc1,0x04,0x0a,0x3a]
|
|
0xc1,0x04,0x0a,0x3a
|
|
|
|
# CHECK: v_subb_co_u32_e32 v5, vcc, 0.5, v2, vcc ; encoding: [0xf0,0x04,0x0a,0x3a]
|
|
0xf0,0x04,0x0a,0x3a
|
|
|
|
# CHECK: v_subb_co_u32_e32 v5, vcc, -4.0, v2, vcc ; encoding: [0xf7,0x04,0x0a,0x3a]
|
|
0xf7,0x04,0x0a,0x3a
|
|
|
|
# CHECK: v_subb_co_u32_e32 v5, vcc, v1, v255, vcc ; encoding: [0x01,0xff,0x0b,0x3a]
|
|
0x01,0xff,0x0b,0x3a
|
|
|
|
# CHECK: v_subbrev_co_u32_e32 v5, vcc, v1, v2, vcc ; encoding: [0x01,0x05,0x0a,0x3c]
|
|
0x01,0x05,0x0a,0x3c
|
|
|
|
# CHECK: v_subbrev_co_u32_e32 v255, vcc, v1, v2, vcc ; encoding: [0x01,0x05,0xfe,0x3d]
|
|
0x01,0x05,0xfe,0x3d
|
|
|
|
# CHECK: v_subbrev_co_u32_e32 v5, vcc, v255, v2, vcc ; encoding: [0xff,0x05,0x0a,0x3c]
|
|
0xff,0x05,0x0a,0x3c
|
|
|
|
# CHECK: v_subbrev_co_u32_e32 v5, vcc, 0, v2, vcc ; encoding: [0x80,0x04,0x0a,0x3c]
|
|
0x80,0x04,0x0a,0x3c
|
|
|
|
# CHECK: v_subbrev_co_u32_e32 v5, vcc, -1, v2, vcc ; encoding: [0xc1,0x04,0x0a,0x3c]
|
|
0xc1,0x04,0x0a,0x3c
|
|
|
|
# CHECK: v_subbrev_co_u32_e32 v5, vcc, 0.5, v2, vcc ; encoding: [0xf0,0x04,0x0a,0x3c]
|
|
0xf0,0x04,0x0a,0x3c
|
|
|
|
# CHECK: v_subbrev_co_u32_e32 v5, vcc, -4.0, v2, vcc ; encoding: [0xf7,0x04,0x0a,0x3c]
|
|
0xf7,0x04,0x0a,0x3c
|
|
|
|
# CHECK: v_subbrev_co_u32_e32 v5, vcc, v1, v255, vcc ; encoding: [0x01,0xff,0x0b,0x3c]
|
|
0x01,0xff,0x0b,0x3c
|
|
|
|
# CHECK: v_add_f16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x3e]
|
|
0x01,0x05,0x0a,0x3e
|
|
|
|
# CHECK: v_add_f16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x3f]
|
|
0x01,0x05,0xfe,0x3f
|
|
|
|
# CHECK: v_add_f16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x3e]
|
|
0xff,0x05,0x0a,0x3e
|
|
|
|
# CHECK: v_add_f16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x3e]
|
|
0x01,0x04,0x0a,0x3e
|
|
|
|
# CHECK: v_add_f16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x3e]
|
|
0x65,0x04,0x0a,0x3e
|
|
|
|
# CHECK: v_add_f16_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x3e]
|
|
0x66,0x04,0x0a,0x3e
|
|
|
|
# CHECK: v_add_f16_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x3e]
|
|
0x67,0x04,0x0a,0x3e
|
|
|
|
# CHECK: v_add_f16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x3e]
|
|
0x6a,0x04,0x0a,0x3e
|
|
|
|
# CHECK: v_add_f16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x3e]
|
|
0x6b,0x04,0x0a,0x3e
|
|
|
|
# CHECK: v_add_f16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x3e]
|
|
0x7c,0x04,0x0a,0x3e
|
|
|
|
# CHECK: v_add_f16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x3e]
|
|
0x7e,0x04,0x0a,0x3e
|
|
|
|
# CHECK: v_add_f16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x3e]
|
|
0x7f,0x04,0x0a,0x3e
|
|
|
|
# CHECK: v_add_f16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x3e]
|
|
0x80,0x04,0x0a,0x3e
|
|
|
|
# CHECK: v_add_f16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x3e]
|
|
0xc1,0x04,0x0a,0x3e
|
|
|
|
# CHECK: v_add_f16_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x3e]
|
|
0xf0,0x04,0x0a,0x3e
|
|
|
|
# CHECK: v_add_f16_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x3e]
|
|
0xf7,0x04,0x0a,0x3e
|
|
|
|
# CHECK: v_add_f16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x3e,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x3e,0x0b,0xfe,0x00,0x00
|
|
|
|
# CHECK: v_add_f16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x3e,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x3e,0x56,0x34,0x00,0x00
|
|
|
|
# CHECK: v_add_f16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x3e]
|
|
0x01,0xff,0x0b,0x3e
|
|
|
|
# CHECK: v_sub_f16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x40]
|
|
0x01,0x05,0x0a,0x40
|
|
|
|
# CHECK: v_sub_f16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x41]
|
|
0x01,0x05,0xfe,0x41
|
|
|
|
# CHECK: v_sub_f16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x40]
|
|
0xff,0x05,0x0a,0x40
|
|
|
|
# CHECK: v_sub_f16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x40]
|
|
0x01,0x04,0x0a,0x40
|
|
|
|
# CHECK: v_sub_f16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x40]
|
|
0x65,0x04,0x0a,0x40
|
|
|
|
# CHECK: v_sub_f16_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x40]
|
|
0x66,0x04,0x0a,0x40
|
|
|
|
# CHECK: v_sub_f16_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x40]
|
|
0x67,0x04,0x0a,0x40
|
|
|
|
# CHECK: v_sub_f16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x40]
|
|
0x6a,0x04,0x0a,0x40
|
|
|
|
# CHECK: v_sub_f16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x40]
|
|
0x6b,0x04,0x0a,0x40
|
|
|
|
# CHECK: v_sub_f16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x40]
|
|
0x7c,0x04,0x0a,0x40
|
|
|
|
# CHECK: v_sub_f16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x40]
|
|
0x7e,0x04,0x0a,0x40
|
|
|
|
# CHECK: v_sub_f16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x40]
|
|
0x7f,0x04,0x0a,0x40
|
|
|
|
# CHECK: v_sub_f16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x40]
|
|
0x80,0x04,0x0a,0x40
|
|
|
|
# CHECK: v_sub_f16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x40]
|
|
0xc1,0x04,0x0a,0x40
|
|
|
|
# CHECK: v_sub_f16_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x40]
|
|
0xf0,0x04,0x0a,0x40
|
|
|
|
# CHECK: v_sub_f16_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x40]
|
|
0xf7,0x04,0x0a,0x40
|
|
|
|
# CHECK: v_sub_f16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x40,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x40,0x0b,0xfe,0x00,0x00
|
|
|
|
# CHECK: v_sub_f16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x40,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x40,0x56,0x34,0x00,0x00
|
|
|
|
# CHECK: v_sub_f16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x40]
|
|
0x01,0xff,0x0b,0x40
|
|
|
|
# CHECK: v_subrev_f16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x42]
|
|
0x01,0x05,0x0a,0x42
|
|
|
|
# CHECK: v_subrev_f16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x43]
|
|
0x01,0x05,0xfe,0x43
|
|
|
|
# CHECK: v_subrev_f16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x42]
|
|
0xff,0x05,0x0a,0x42
|
|
|
|
# CHECK: v_subrev_f16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x42]
|
|
0x01,0x04,0x0a,0x42
|
|
|
|
# CHECK: v_subrev_f16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x42]
|
|
0x65,0x04,0x0a,0x42
|
|
|
|
# CHECK: v_subrev_f16_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x42]
|
|
0x66,0x04,0x0a,0x42
|
|
|
|
# CHECK: v_subrev_f16_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x42]
|
|
0x67,0x04,0x0a,0x42
|
|
|
|
# CHECK: v_subrev_f16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x42]
|
|
0x6a,0x04,0x0a,0x42
|
|
|
|
# CHECK: v_subrev_f16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x42]
|
|
0x6b,0x04,0x0a,0x42
|
|
|
|
# CHECK: v_subrev_f16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x42]
|
|
0x7c,0x04,0x0a,0x42
|
|
|
|
# CHECK: v_subrev_f16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x42]
|
|
0x7e,0x04,0x0a,0x42
|
|
|
|
# CHECK: v_subrev_f16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x42]
|
|
0x7f,0x04,0x0a,0x42
|
|
|
|
# CHECK: v_subrev_f16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x42]
|
|
0x80,0x04,0x0a,0x42
|
|
|
|
# CHECK: v_subrev_f16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x42]
|
|
0xc1,0x04,0x0a,0x42
|
|
|
|
# CHECK: v_subrev_f16_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x42]
|
|
0xf0,0x04,0x0a,0x42
|
|
|
|
# CHECK: v_subrev_f16_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x42]
|
|
0xf7,0x04,0x0a,0x42
|
|
|
|
# CHECK: v_subrev_f16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x42,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x42,0x0b,0xfe,0x00,0x00
|
|
|
|
# CHECK: v_subrev_f16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x42,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x42,0x56,0x34,0x00,0x00
|
|
|
|
# CHECK: v_subrev_f16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x42]
|
|
0x01,0xff,0x0b,0x42
|
|
|
|
# CHECK: v_mul_f16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x44]
|
|
0x01,0x05,0x0a,0x44
|
|
|
|
# CHECK: v_mul_f16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x45]
|
|
0x01,0x05,0xfe,0x45
|
|
|
|
# CHECK: v_mul_f16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x44]
|
|
0xff,0x05,0x0a,0x44
|
|
|
|
# CHECK: v_mul_f16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x44]
|
|
0x01,0x04,0x0a,0x44
|
|
|
|
# CHECK: v_mul_f16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x44]
|
|
0x65,0x04,0x0a,0x44
|
|
|
|
# CHECK: v_mul_f16_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x44]
|
|
0x66,0x04,0x0a,0x44
|
|
|
|
# CHECK: v_mul_f16_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x44]
|
|
0x67,0x04,0x0a,0x44
|
|
|
|
# CHECK: v_mul_f16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x44]
|
|
0x6a,0x04,0x0a,0x44
|
|
|
|
# CHECK: v_mul_f16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x44]
|
|
0x6b,0x04,0x0a,0x44
|
|
|
|
# CHECK: v_mul_f16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x44]
|
|
0x7c,0x04,0x0a,0x44
|
|
|
|
# CHECK: v_mul_f16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x44]
|
|
0x7e,0x04,0x0a,0x44
|
|
|
|
# CHECK: v_mul_f16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x44]
|
|
0x7f,0x04,0x0a,0x44
|
|
|
|
# CHECK: v_mul_f16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x44]
|
|
0x80,0x04,0x0a,0x44
|
|
|
|
# CHECK: v_mul_f16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x44]
|
|
0xc1,0x04,0x0a,0x44
|
|
|
|
# CHECK: v_mul_f16_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x44]
|
|
0xf0,0x04,0x0a,0x44
|
|
|
|
# CHECK: v_mul_f16_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x44]
|
|
0xf7,0x04,0x0a,0x44
|
|
|
|
# CHECK: v_mul_f16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x44,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x44,0x0b,0xfe,0x00,0x00
|
|
|
|
# CHECK: v_mul_f16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x44,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x44,0x56,0x34,0x00,0x00
|
|
|
|
# CHECK: v_mul_f16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x44]
|
|
0x01,0xff,0x0b,0x44
|
|
|
|
# CHECK: v_mac_f16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x46]
|
|
0x01,0x05,0x0a,0x46
|
|
|
|
# CHECK: v_mac_f16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x47]
|
|
0x01,0x05,0xfe,0x47
|
|
|
|
# CHECK: v_mac_f16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x46]
|
|
0xff,0x05,0x0a,0x46
|
|
|
|
# CHECK: v_mac_f16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x46]
|
|
0x01,0x04,0x0a,0x46
|
|
|
|
# CHECK: v_mac_f16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x46]
|
|
0x65,0x04,0x0a,0x46
|
|
|
|
# CHECK: v_mac_f16_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x46]
|
|
0x66,0x04,0x0a,0x46
|
|
|
|
# CHECK: v_mac_f16_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x46]
|
|
0x67,0x04,0x0a,0x46
|
|
|
|
# CHECK: v_mac_f16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x46]
|
|
0x6a,0x04,0x0a,0x46
|
|
|
|
# CHECK: v_mac_f16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x46]
|
|
0x6b,0x04,0x0a,0x46
|
|
|
|
# CHECK: v_mac_f16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x46]
|
|
0x7c,0x04,0x0a,0x46
|
|
|
|
# CHECK: v_mac_f16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x46]
|
|
0x7e,0x04,0x0a,0x46
|
|
|
|
# CHECK: v_mac_f16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x46]
|
|
0x7f,0x04,0x0a,0x46
|
|
|
|
# CHECK: v_mac_f16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x46]
|
|
0x80,0x04,0x0a,0x46
|
|
|
|
# CHECK: v_mac_f16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x46]
|
|
0xc1,0x04,0x0a,0x46
|
|
|
|
# CHECK: v_mac_f16_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x46]
|
|
0xf0,0x04,0x0a,0x46
|
|
|
|
# CHECK: v_mac_f16_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x46]
|
|
0xf7,0x04,0x0a,0x46
|
|
|
|
# CHECK: v_mac_f16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x46,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x46,0x0b,0xfe,0x00,0x00
|
|
|
|
# CHECK: v_mac_f16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x46,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x46,0x56,0x34,0x00,0x00
|
|
|
|
# CHECK: v_mac_f16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x46]
|
|
0x01,0xff,0x0b,0x46
|
|
|
|
# CHECK: v_madmk_f16 v5, v1, 0x1121, v3 ; encoding: [0x01,0x07,0x0a,0x48,0x21,0x11,0x00,0x00]
|
|
0x01,0x07,0x0a,0x48,0x21,0x11,0x00,0x00
|
|
|
|
# CHECK: v_madmk_f16 v255, v1, 0x1121, v3 ; encoding: [0x01,0x07,0xfe,0x49,0x21,0x11,0x00,0x00]
|
|
0x01,0x07,0xfe,0x49,0x21,0x11,0x00,0x00
|
|
|
|
# CHECK: v_madmk_f16 v5, v255, 0x1121, v3 ; encoding: [0xff,0x07,0x0a,0x48,0x21,0x11,0x00,0x00]
|
|
0xff,0x07,0x0a,0x48,0x21,0x11,0x00,0x00
|
|
|
|
# CHECK: v_madmk_f16 v5, 0, 0x1121, v3 ; encoding: [0x80,0x06,0x0a,0x48,0x21,0x11,0x00,0x00]
|
|
0x80,0x06,0x0a,0x48,0x21,0x11,0x00,0x00
|
|
|
|
# CHECK: v_madmk_f16 v5, -1, 0x1121, v3 ; encoding: [0xc1,0x06,0x0a,0x48,0x21,0x11,0x00,0x00]
|
|
0xc1,0x06,0x0a,0x48,0x21,0x11,0x00,0x00
|
|
|
|
# CHECK: v_madmk_f16 v5, 0.5, 0x1121, v3 ; encoding: [0xf0,0x06,0x0a,0x48,0x21,0x11,0x00,0x00]
|
|
0xf0,0x06,0x0a,0x48,0x21,0x11,0x00,0x00
|
|
|
|
# CHECK: v_madmk_f16 v5, -4.0, 0x1121, v3 ; encoding: [0xf7,0x06,0x0a,0x48,0x21,0x11,0x00,0x00]
|
|
0xf7,0x06,0x0a,0x48,0x21,0x11,0x00,0x00
|
|
|
|
# CHECK: v_madmk_f16 v5, v1, 0xa1b1, v3 ; encoding: [0x01,0x07,0x0a,0x48,0xb1,0xa1,0x00,0x00]
|
|
0x01,0x07,0x0a,0x48,0xb1,0xa1,0x00,0x00
|
|
|
|
# CHECK: v_madmk_f16 v5, v1, 0x1121, v255 ; encoding: [0x01,0xff,0x0b,0x48,0x21,0x11,0x00,0x00]
|
|
0x01,0xff,0x0b,0x48,0x21,0x11,0x00,0x00
|
|
|
|
# CHECK: v_madmk_f16 v5, 0x1121, 0x1121, v255 ; encoding: [0xff,0xfe,0x0b,0x48,0x21,0x11,0x00,0x00]
|
|
0xff,0xfe,0x0b,0x48,0x21,0x11,0x00,0x00
|
|
|
|
# CHECK: v_madak_f16 v5, v1, v2, 0x1121 ; encoding: [0x01,0x05,0x0a,0x4a,0x21,0x11,0x00,0x00]
|
|
0x01,0x05,0x0a,0x4a,0x21,0x11,0x00,0x00
|
|
|
|
# CHECK: v_madak_f16 v255, v1, v2, 0x1121 ; encoding: [0x01,0x05,0xfe,0x4b,0x21,0x11,0x00,0x00]
|
|
0x01,0x05,0xfe,0x4b,0x21,0x11,0x00,0x00
|
|
|
|
# CHECK: v_madak_f16 v5, v255, v2, 0x1121 ; encoding: [0xff,0x05,0x0a,0x4a,0x21,0x11,0x00,0x00]
|
|
0xff,0x05,0x0a,0x4a,0x21,0x11,0x00,0x00
|
|
|
|
# CHECK: v_madak_f16 v5, 0, v2, 0x1121 ; encoding: [0x80,0x04,0x0a,0x4a,0x21,0x11,0x00,0x00]
|
|
0x80,0x04,0x0a,0x4a,0x21,0x11,0x00,0x00
|
|
|
|
# CHECK: v_madak_f16 v5, -1, v2, 0x1121 ; encoding: [0xc1,0x04,0x0a,0x4a,0x21,0x11,0x00,0x00]
|
|
0xc1,0x04,0x0a,0x4a,0x21,0x11,0x00,0x00
|
|
|
|
# CHECK: v_madak_f16 v5, 0.5, v2, 0x1121 ; encoding: [0xf0,0x04,0x0a,0x4a,0x21,0x11,0x00,0x00]
|
|
0xf0,0x04,0x0a,0x4a,0x21,0x11,0x00,0x00
|
|
|
|
# CHECK: v_madak_f16 v5, -4.0, v2, 0x1121 ; encoding: [0xf7,0x04,0x0a,0x4a,0x21,0x11,0x00,0x00]
|
|
0xf7,0x04,0x0a,0x4a,0x21,0x11,0x00,0x00
|
|
|
|
# CHECK: v_madak_f16 v5, v1, v255, 0x1121 ; encoding: [0x01,0xff,0x0b,0x4a,0x21,0x11,0x00,0x00]
|
|
0x01,0xff,0x0b,0x4a,0x21,0x11,0x00,0x00
|
|
|
|
# CHECK: v_madak_f16 v5, v1, v2, 0xa1b1 ; encoding: [0x01,0x05,0x0a,0x4a,0xb1,0xa1,0x00,0x00]
|
|
0x01,0x05,0x0a,0x4a,0xb1,0xa1,0x00,0x00
|
|
|
|
# CHECK: v_madak_f16 v5, 0x1121, v2, 0x1121 ; encoding: [0xff,0x04,0x0a,0x4a,0x21,0x11,0x00,0x00]
|
|
0xff,0x04,0x0a,0x4a,0x21,0x11,0x00,0x00
|
|
|
|
# CHECK: v_add_u16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x4c]
|
|
0x01,0x05,0x0a,0x4c
|
|
|
|
# CHECK: v_add_u16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x4d]
|
|
0x01,0x05,0xfe,0x4d
|
|
|
|
# CHECK: v_add_u16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x4c]
|
|
0xff,0x05,0x0a,0x4c
|
|
|
|
# CHECK: v_add_u16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x4c]
|
|
0x01,0x04,0x0a,0x4c
|
|
|
|
# CHECK: v_add_u16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x4c]
|
|
0x65,0x04,0x0a,0x4c
|
|
|
|
# CHECK: v_add_u16_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x4c]
|
|
0x66,0x04,0x0a,0x4c
|
|
|
|
# CHECK: v_add_u16_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x4c]
|
|
0x67,0x04,0x0a,0x4c
|
|
|
|
# CHECK: v_add_u16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x4c]
|
|
0x6a,0x04,0x0a,0x4c
|
|
|
|
# CHECK: v_add_u16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x4c]
|
|
0x6b,0x04,0x0a,0x4c
|
|
|
|
# CHECK: v_add_u16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x4c]
|
|
0x7c,0x04,0x0a,0x4c
|
|
|
|
# CHECK: v_add_u16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x4c]
|
|
0x7e,0x04,0x0a,0x4c
|
|
|
|
# CHECK: v_add_u16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x4c]
|
|
0x7f,0x04,0x0a,0x4c
|
|
|
|
# CHECK: v_add_u16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x4c]
|
|
0x80,0x04,0x0a,0x4c
|
|
|
|
# CHECK: v_add_u16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x4c]
|
|
0xc1,0x04,0x0a,0x4c
|
|
|
|
# CHECK: v_add_u16_e32 v5, 0x3800, v2 ; encoding: [0xff,0x04,0x0a,0x4c,0x00,0x38,0x00,0x00]
|
|
0xf0,0x04,0x0a,0x4c
|
|
|
|
# CHECK: v_add_u16_e32 v5, 0xc400, v2 ; encoding: [0xff,0x04,0x0a,0x4c,0x00,0xc4,0x00,0x00]
|
|
0xf7,0x04,0x0a,0x4c
|
|
|
|
# CHECK: v_add_u16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x4c,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x4c,0x0b,0xfe,0x00,0x00
|
|
|
|
# CHECK: v_add_u16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x4c,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x4c,0x56,0x34,0x00,0x00
|
|
|
|
# CHECK: v_add_u16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x4c]
|
|
0x01,0xff,0x0b,0x4c
|
|
|
|
# CHECK: v_sub_u16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x4e]
|
|
0x01,0x05,0x0a,0x4e
|
|
|
|
# CHECK: v_sub_u16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x4f]
|
|
0x01,0x05,0xfe,0x4f
|
|
|
|
# CHECK: v_sub_u16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x4e]
|
|
0xff,0x05,0x0a,0x4e
|
|
|
|
# CHECK: v_sub_u16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x4e]
|
|
0x01,0x04,0x0a,0x4e
|
|
|
|
# CHECK: v_sub_u16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x4e]
|
|
0x65,0x04,0x0a,0x4e
|
|
|
|
# CHECK: v_sub_u16_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x4e]
|
|
0x66,0x04,0x0a,0x4e
|
|
|
|
# CHECK: v_sub_u16_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x4e]
|
|
0x67,0x04,0x0a,0x4e
|
|
|
|
# CHECK: v_sub_u16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x4e]
|
|
0x6a,0x04,0x0a,0x4e
|
|
|
|
# CHECK: v_sub_u16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x4e]
|
|
0x6b,0x04,0x0a,0x4e
|
|
|
|
# CHECK: v_sub_u16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x4e]
|
|
0x7c,0x04,0x0a,0x4e
|
|
|
|
# CHECK: v_sub_u16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x4e]
|
|
0x7e,0x04,0x0a,0x4e
|
|
|
|
# CHECK: v_sub_u16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x4e]
|
|
0x7f,0x04,0x0a,0x4e
|
|
|
|
# CHECK: v_sub_u16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x4e]
|
|
0x80,0x04,0x0a,0x4e
|
|
|
|
# CHECK: v_sub_u16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x4e]
|
|
0xc1,0x04,0x0a,0x4e
|
|
|
|
# CHECK: v_sub_u16_e32 v5, 0x3800, v2 ; encoding: [0xff,0x04,0x0a,0x4e,0x00,0x38,0x00,0x00]
|
|
0xf0,0x04,0x0a,0x4e
|
|
|
|
# CHECK: v_sub_u16_e32 v5, 0xc400, v2 ; encoding: [0xff,0x04,0x0a,0x4e,0x00,0xc4,0x00,0x00]
|
|
0xf7,0x04,0x0a,0x4e
|
|
|
|
# CHECK: v_sub_u16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x4e,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x4e,0x0b,0xfe,0x00,0x00
|
|
|
|
# CHECK: v_sub_u16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x4e,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x4e,0x56,0x34,0x00,0x00
|
|
|
|
# CHECK: v_sub_u16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x4e]
|
|
0x01,0xff,0x0b,0x4e
|
|
|
|
# CHECK: v_subrev_u16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x50]
|
|
0x01,0x05,0x0a,0x50
|
|
|
|
# CHECK: v_subrev_u16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x51]
|
|
0x01,0x05,0xfe,0x51
|
|
|
|
# CHECK: v_subrev_u16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x50]
|
|
0xff,0x05,0x0a,0x50
|
|
|
|
# CHECK: v_subrev_u16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x50]
|
|
0x01,0x04,0x0a,0x50
|
|
|
|
# CHECK: v_subrev_u16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x50]
|
|
0x65,0x04,0x0a,0x50
|
|
|
|
# CHECK: v_subrev_u16_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x50]
|
|
0x66,0x04,0x0a,0x50
|
|
|
|
# CHECK: v_subrev_u16_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x50]
|
|
0x67,0x04,0x0a,0x50
|
|
|
|
# CHECK: v_subrev_u16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x50]
|
|
0x6a,0x04,0x0a,0x50
|
|
|
|
# CHECK: v_subrev_u16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x50]
|
|
0x6b,0x04,0x0a,0x50
|
|
|
|
# CHECK: v_subrev_u16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x50]
|
|
0x7c,0x04,0x0a,0x50
|
|
|
|
# CHECK: v_subrev_u16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x50]
|
|
0x7e,0x04,0x0a,0x50
|
|
|
|
# CHECK: v_subrev_u16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x50]
|
|
0x7f,0x04,0x0a,0x50
|
|
|
|
# CHECK: v_subrev_u16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x50]
|
|
0x80,0x04,0x0a,0x50
|
|
|
|
# CHECK: v_subrev_u16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x50]
|
|
0xc1,0x04,0x0a,0x50
|
|
|
|
# CHECK: v_subrev_u16_e32 v5, 0x3800, v2 ; encoding: [0xff,0x04,0x0a,0x50,0x00,0x38,0x00,0x00]
|
|
0xf0,0x04,0x0a,0x50
|
|
|
|
# CHECK: v_subrev_u16_e32 v5, 0xc400, v2 ; encoding: [0xff,0x04,0x0a,0x50,0x00,0xc4,0x00,0x00]
|
|
0xf7,0x04,0x0a,0x50
|
|
|
|
# CHECK: v_subrev_u16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x50,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x50,0x0b,0xfe,0x00,0x00
|
|
|
|
# CHECK: v_subrev_u16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x50,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x50,0x56,0x34,0x00,0x00
|
|
|
|
# CHECK: v_subrev_u16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x50]
|
|
0x01,0xff,0x0b,0x50
|
|
|
|
# CHECK: v_mul_lo_u16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x52]
|
|
0x01,0x05,0x0a,0x52
|
|
|
|
# CHECK: v_mul_lo_u16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x53]
|
|
0x01,0x05,0xfe,0x53
|
|
|
|
# CHECK: v_mul_lo_u16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x52]
|
|
0xff,0x05,0x0a,0x52
|
|
|
|
# CHECK: v_mul_lo_u16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x52]
|
|
0x01,0x04,0x0a,0x52
|
|
|
|
# CHECK: v_mul_lo_u16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x52]
|
|
0x65,0x04,0x0a,0x52
|
|
|
|
# CHECK: v_mul_lo_u16_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x52]
|
|
0x66,0x04,0x0a,0x52
|
|
|
|
# CHECK: v_mul_lo_u16_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x52]
|
|
0x67,0x04,0x0a,0x52
|
|
|
|
# CHECK: v_mul_lo_u16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x52]
|
|
0x6a,0x04,0x0a,0x52
|
|
|
|
# CHECK: v_mul_lo_u16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x52]
|
|
0x6b,0x04,0x0a,0x52
|
|
|
|
# CHECK: v_mul_lo_u16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x52]
|
|
0x7c,0x04,0x0a,0x52
|
|
|
|
# CHECK: v_mul_lo_u16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x52]
|
|
0x7e,0x04,0x0a,0x52
|
|
|
|
# CHECK: v_mul_lo_u16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x52]
|
|
0x7f,0x04,0x0a,0x52
|
|
|
|
# CHECK: v_mul_lo_u16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x52]
|
|
0x80,0x04,0x0a,0x52
|
|
|
|
# CHECK: v_mul_lo_u16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x52]
|
|
0xc1,0x04,0x0a,0x52
|
|
|
|
# CHECK: v_mul_lo_u16_e32 v5, 0x3800, v2 ; encoding: [0xff,0x04,0x0a,0x52,0x00,0x38,0x00,0x00]
|
|
0xf0,0x04,0x0a,0x52
|
|
|
|
# CHECK: v_mul_lo_u16_e32 v5, 0xc400, v2 ; encoding: [0xff,0x04,0x0a,0x52,0x00,0xc4,0x00,0x00]
|
|
0xf7,0x04,0x0a,0x52
|
|
|
|
# CHECK: v_mul_lo_u16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x52,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x52,0x0b,0xfe,0x00,0x00
|
|
|
|
# CHECK: v_mul_lo_u16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x52,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x52,0x56,0x34,0x00,0x00
|
|
|
|
# CHECK: v_mul_lo_u16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x52]
|
|
0x01,0xff,0x0b,0x52
|
|
|
|
# CHECK: v_lshlrev_b16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x54]
|
|
0x01,0x05,0x0a,0x54
|
|
|
|
# CHECK: v_lshlrev_b16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x55]
|
|
0x01,0x05,0xfe,0x55
|
|
|
|
# CHECK: v_lshlrev_b16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x54]
|
|
0xff,0x05,0x0a,0x54
|
|
|
|
# CHECK: v_lshlrev_b16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x54]
|
|
0x01,0x04,0x0a,0x54
|
|
|
|
# CHECK: v_lshlrev_b16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x54]
|
|
0x65,0x04,0x0a,0x54
|
|
|
|
# CHECK: v_lshlrev_b16_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x54]
|
|
0x66,0x04,0x0a,0x54
|
|
|
|
# CHECK: v_lshlrev_b16_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x54]
|
|
0x67,0x04,0x0a,0x54
|
|
|
|
# CHECK: v_lshlrev_b16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x54]
|
|
0x6a,0x04,0x0a,0x54
|
|
|
|
# CHECK: v_lshlrev_b16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x54]
|
|
0x6b,0x04,0x0a,0x54
|
|
|
|
# CHECK: v_lshlrev_b16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x54]
|
|
0x7c,0x04,0x0a,0x54
|
|
|
|
# CHECK: v_lshlrev_b16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x54]
|
|
0x7e,0x04,0x0a,0x54
|
|
|
|
# CHECK: v_lshlrev_b16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x54]
|
|
0x7f,0x04,0x0a,0x54
|
|
|
|
# CHECK: v_lshlrev_b16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x54]
|
|
0x80,0x04,0x0a,0x54
|
|
|
|
# CHECK: v_lshlrev_b16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x54]
|
|
0xc1,0x04,0x0a,0x54
|
|
|
|
# CHECK: v_lshlrev_b16_e32 v5, 0x3800, v2 ; encoding: [0xff,0x04,0x0a,0x54,0x00,0x38,0x00,0x00]
|
|
0xf0,0x04,0x0a,0x54
|
|
|
|
# CHECK: v_lshlrev_b16_e32 v5, 0xc400, v2 ; encoding: [0xff,0x04,0x0a,0x54,0x00,0xc4,0x00,0x00]
|
|
0xf7,0x04,0x0a,0x54
|
|
|
|
# CHECK: v_lshlrev_b16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x54,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x54,0x0b,0xfe,0x00,0x00
|
|
|
|
# CHECK: v_lshlrev_b16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x54,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x54,0x56,0x34,0x00,0x00
|
|
|
|
# CHECK: v_lshlrev_b16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x54]
|
|
0x01,0xff,0x0b,0x54
|
|
|
|
# CHECK: v_lshrrev_b16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x56]
|
|
0x01,0x05,0x0a,0x56
|
|
|
|
# CHECK: v_lshrrev_b16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x57]
|
|
0x01,0x05,0xfe,0x57
|
|
|
|
# CHECK: v_lshrrev_b16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x56]
|
|
0xff,0x05,0x0a,0x56
|
|
|
|
# CHECK: v_lshrrev_b16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x56]
|
|
0x01,0x04,0x0a,0x56
|
|
|
|
# CHECK: v_lshrrev_b16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x56]
|
|
0x65,0x04,0x0a,0x56
|
|
|
|
# CHECK: v_lshrrev_b16_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x56]
|
|
0x66,0x04,0x0a,0x56
|
|
|
|
# CHECK: v_lshrrev_b16_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x56]
|
|
0x67,0x04,0x0a,0x56
|
|
|
|
# CHECK: v_lshrrev_b16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x56]
|
|
0x6a,0x04,0x0a,0x56
|
|
|
|
# CHECK: v_lshrrev_b16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x56]
|
|
0x6b,0x04,0x0a,0x56
|
|
|
|
# CHECK: v_lshrrev_b16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x56]
|
|
0x7c,0x04,0x0a,0x56
|
|
|
|
# CHECK: v_lshrrev_b16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x56]
|
|
0x7e,0x04,0x0a,0x56
|
|
|
|
# CHECK: v_lshrrev_b16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x56]
|
|
0x7f,0x04,0x0a,0x56
|
|
|
|
# CHECK: v_lshrrev_b16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x56]
|
|
0x80,0x04,0x0a,0x56
|
|
|
|
# CHECK: v_lshrrev_b16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x56]
|
|
0xc1,0x04,0x0a,0x56
|
|
|
|
# CHECK: v_lshrrev_b16_e32 v5, 0x3800, v2 ; encoding: [0xff,0x04,0x0a,0x56,0x00,0x38,0x00,0x00]
|
|
0xf0,0x04,0x0a,0x56
|
|
|
|
# CHECK: v_lshrrev_b16_e32 v5, 0xc400, v2 ; encoding: [0xff,0x04,0x0a,0x56,0x00,0xc4,0x00,0x00]
|
|
0xf7,0x04,0x0a,0x56
|
|
|
|
# CHECK: v_lshrrev_b16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x56,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x56,0x0b,0xfe,0x00,0x00
|
|
|
|
# CHECK: v_lshrrev_b16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x56,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x56,0x56,0x34,0x00,0x00
|
|
|
|
# CHECK: v_lshrrev_b16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x56]
|
|
0x01,0xff,0x0b,0x56
|
|
|
|
# CHECK: v_ashrrev_i16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x58]
|
|
0x01,0x05,0x0a,0x58
|
|
|
|
# CHECK: v_ashrrev_i16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x59]
|
|
0x01,0x05,0xfe,0x59
|
|
|
|
# CHECK: v_ashrrev_i16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x58]
|
|
0xff,0x05,0x0a,0x58
|
|
|
|
# CHECK: v_ashrrev_i16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x58]
|
|
0x01,0x04,0x0a,0x58
|
|
|
|
# CHECK: v_ashrrev_i16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x58]
|
|
0x65,0x04,0x0a,0x58
|
|
|
|
# CHECK: v_ashrrev_i16_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x58]
|
|
0x66,0x04,0x0a,0x58
|
|
|
|
# CHECK: v_ashrrev_i16_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x58]
|
|
0x67,0x04,0x0a,0x58
|
|
|
|
# CHECK: v_ashrrev_i16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x58]
|
|
0x6a,0x04,0x0a,0x58
|
|
|
|
# CHECK: v_ashrrev_i16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x58]
|
|
0x6b,0x04,0x0a,0x58
|
|
|
|
# CHECK: v_ashrrev_i16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x58]
|
|
0x7c,0x04,0x0a,0x58
|
|
|
|
# CHECK: v_ashrrev_i16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x58]
|
|
0x7e,0x04,0x0a,0x58
|
|
|
|
# CHECK: v_ashrrev_i16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x58]
|
|
0x7f,0x04,0x0a,0x58
|
|
|
|
# CHECK: v_ashrrev_i16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x58]
|
|
0x80,0x04,0x0a,0x58
|
|
|
|
# CHECK: v_ashrrev_i16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x58]
|
|
0xc1,0x04,0x0a,0x58
|
|
|
|
# CHECK: v_ashrrev_i16_e32 v5, 0x3800, v2 ; encoding: [0xff,0x04,0x0a,0x58,0x00,0x38,0x00,0x00]
|
|
0xf0,0x04,0x0a,0x58
|
|
|
|
# CHECK: v_ashrrev_i16_e32 v5, 0xc400, v2 ; encoding: [0xff,0x04,0x0a,0x58,0x00,0xc4,0x00,0x00]
|
|
0xf7,0x04,0x0a,0x58
|
|
|
|
# CHECK: v_ashrrev_i16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x58,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x58,0x0b,0xfe,0x00,0x00
|
|
|
|
# CHECK: v_ashrrev_i16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x58,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x58,0x56,0x34,0x00,0x00
|
|
|
|
# CHECK: v_ashrrev_i16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x58]
|
|
0x01,0xff,0x0b,0x58
|
|
|
|
# CHECK: v_max_f16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x5a]
|
|
0x01,0x05,0x0a,0x5a
|
|
|
|
# CHECK: v_max_f16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x5b]
|
|
0x01,0x05,0xfe,0x5b
|
|
|
|
# CHECK: v_max_f16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x5a]
|
|
0xff,0x05,0x0a,0x5a
|
|
|
|
# CHECK: v_max_f16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x5a]
|
|
0x01,0x04,0x0a,0x5a
|
|
|
|
# CHECK: v_max_f16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x5a]
|
|
0x65,0x04,0x0a,0x5a
|
|
|
|
# CHECK: v_max_f16_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x5a]
|
|
0x66,0x04,0x0a,0x5a
|
|
|
|
# CHECK: v_max_f16_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x5a]
|
|
0x67,0x04,0x0a,0x5a
|
|
|
|
# CHECK: v_max_f16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x5a]
|
|
0x6a,0x04,0x0a,0x5a
|
|
|
|
# CHECK: v_max_f16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x5a]
|
|
0x6b,0x04,0x0a,0x5a
|
|
|
|
# CHECK: v_max_f16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x5a]
|
|
0x7c,0x04,0x0a,0x5a
|
|
|
|
# CHECK: v_max_f16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x5a]
|
|
0x7e,0x04,0x0a,0x5a
|
|
|
|
# CHECK: v_max_f16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x5a]
|
|
0x7f,0x04,0x0a,0x5a
|
|
|
|
# CHECK: v_max_f16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x5a]
|
|
0x80,0x04,0x0a,0x5a
|
|
|
|
# CHECK: v_max_f16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x5a]
|
|
0xc1,0x04,0x0a,0x5a
|
|
|
|
# CHECK: v_max_f16_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x5a]
|
|
0xf0,0x04,0x0a,0x5a
|
|
|
|
# CHECK: v_max_f16_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x5a]
|
|
0xf7,0x04,0x0a,0x5a
|
|
|
|
# CHECK: v_max_f16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x5a,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x5a,0x0b,0xfe,0x00,0x00
|
|
|
|
# CHECK: v_max_f16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x5a,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x5a,0x56,0x34,0x00,0x00
|
|
|
|
# CHECK: v_max_f16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x5a]
|
|
0x01,0xff,0x0b,0x5a
|
|
|
|
# CHECK: v_min_f16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x5c]
|
|
0x01,0x05,0x0a,0x5c
|
|
|
|
# CHECK: v_min_f16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x5d]
|
|
0x01,0x05,0xfe,0x5d
|
|
|
|
# CHECK: v_min_f16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x5c]
|
|
0xff,0x05,0x0a,0x5c
|
|
|
|
# CHECK: v_min_f16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x5c]
|
|
0x01,0x04,0x0a,0x5c
|
|
|
|
# CHECK: v_min_f16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x5c]
|
|
0x65,0x04,0x0a,0x5c
|
|
|
|
# CHECK: v_min_f16_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x5c]
|
|
0x66,0x04,0x0a,0x5c
|
|
|
|
# CHECK: v_min_f16_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x5c]
|
|
0x67,0x04,0x0a,0x5c
|
|
|
|
# CHECK: v_min_f16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x5c]
|
|
0x6a,0x04,0x0a,0x5c
|
|
|
|
# CHECK: v_min_f16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x5c]
|
|
0x6b,0x04,0x0a,0x5c
|
|
|
|
# CHECK: v_min_f16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x5c]
|
|
0x7c,0x04,0x0a,0x5c
|
|
|
|
# CHECK: v_min_f16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x5c]
|
|
0x7e,0x04,0x0a,0x5c
|
|
|
|
# CHECK: v_min_f16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x5c]
|
|
0x7f,0x04,0x0a,0x5c
|
|
|
|
# CHECK: v_min_f16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x5c]
|
|
0x80,0x04,0x0a,0x5c
|
|
|
|
# CHECK: v_min_f16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x5c]
|
|
0xc1,0x04,0x0a,0x5c
|
|
|
|
# CHECK: v_min_f16_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x5c]
|
|
0xf0,0x04,0x0a,0x5c
|
|
|
|
# CHECK: v_min_f16_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x5c]
|
|
0xf7,0x04,0x0a,0x5c
|
|
|
|
# CHECK: v_min_f16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x5c,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x5c,0x0b,0xfe,0x00,0x00
|
|
|
|
# CHECK: v_min_f16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x5c,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x5c,0x56,0x34,0x00,0x00
|
|
|
|
# CHECK: v_min_f16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x5c]
|
|
0x01,0xff,0x0b,0x5c
|
|
|
|
# CHECK: v_max_u16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x5e]
|
|
0x01,0x05,0x0a,0x5e
|
|
|
|
# CHECK: v_max_u16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x5f]
|
|
0x01,0x05,0xfe,0x5f
|
|
|
|
# CHECK: v_max_u16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x5e]
|
|
0xff,0x05,0x0a,0x5e
|
|
|
|
# CHECK: v_max_u16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x5e]
|
|
0x01,0x04,0x0a,0x5e
|
|
|
|
# CHECK: v_max_u16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x5e]
|
|
0x65,0x04,0x0a,0x5e
|
|
|
|
# CHECK: v_max_u16_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x5e]
|
|
0x66,0x04,0x0a,0x5e
|
|
|
|
# CHECK: v_max_u16_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x5e]
|
|
0x67,0x04,0x0a,0x5e
|
|
|
|
# CHECK: v_max_u16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x5e]
|
|
0x6a,0x04,0x0a,0x5e
|
|
|
|
# CHECK: v_max_u16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x5e]
|
|
0x6b,0x04,0x0a,0x5e
|
|
|
|
# CHECK: v_max_u16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x5e]
|
|
0x7c,0x04,0x0a,0x5e
|
|
|
|
# CHECK: v_max_u16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x5e]
|
|
0x7e,0x04,0x0a,0x5e
|
|
|
|
# CHECK: v_max_u16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x5e]
|
|
0x7f,0x04,0x0a,0x5e
|
|
|
|
# CHECK: v_max_u16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x5e]
|
|
0x80,0x04,0x0a,0x5e
|
|
|
|
# CHECK: v_max_u16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x5e]
|
|
0xc1,0x04,0x0a,0x5e
|
|
|
|
# CHECK: v_max_u16_e32 v5, 0x3800, v2 ; encoding: [0xff,0x04,0x0a,0x5e,0x00,0x38,0x00,0x00]
|
|
0xf0,0x04,0x0a,0x5e
|
|
|
|
# CHECK: v_max_u16_e32 v5, 0xc400, v2 ; encoding: [0xff,0x04,0x0a,0x5e,0x00,0xc4,0x00,0x00]
|
|
0xf7,0x04,0x0a,0x5e
|
|
|
|
# CHECK: v_max_u16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x5e,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x5e,0x0b,0xfe,0x00,0x00
|
|
|
|
# CHECK: v_max_u16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x5e,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x5e,0x56,0x34,0x00,0x00
|
|
|
|
# CHECK: v_max_u16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x5e]
|
|
0x01,0xff,0x0b,0x5e
|
|
|
|
# CHECK: v_max_i16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x60]
|
|
0x01,0x05,0x0a,0x60
|
|
|
|
# CHECK: v_max_i16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x61]
|
|
0x01,0x05,0xfe,0x61
|
|
|
|
# CHECK: v_max_i16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x60]
|
|
0xff,0x05,0x0a,0x60
|
|
|
|
# CHECK: v_max_i16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x60]
|
|
0x01,0x04,0x0a,0x60
|
|
|
|
# CHECK: v_max_i16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x60]
|
|
0x65,0x04,0x0a,0x60
|
|
|
|
# CHECK: v_max_i16_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x60]
|
|
0x66,0x04,0x0a,0x60
|
|
|
|
# CHECK: v_max_i16_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x60]
|
|
0x67,0x04,0x0a,0x60
|
|
|
|
# CHECK: v_max_i16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x60]
|
|
0x6a,0x04,0x0a,0x60
|
|
|
|
# CHECK: v_max_i16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x60]
|
|
0x6b,0x04,0x0a,0x60
|
|
|
|
# CHECK: v_max_i16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x60]
|
|
0x7c,0x04,0x0a,0x60
|
|
|
|
# CHECK: v_max_i16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x60]
|
|
0x7e,0x04,0x0a,0x60
|
|
|
|
# CHECK: v_max_i16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x60]
|
|
0x7f,0x04,0x0a,0x60
|
|
|
|
# CHECK: v_max_i16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x60]
|
|
0x80,0x04,0x0a,0x60
|
|
|
|
# CHECK: v_max_i16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x60]
|
|
0xc1,0x04,0x0a,0x60
|
|
|
|
# CHECK: v_max_i16_e32 v5, 0x3800, v2 ; encoding: [0xff,0x04,0x0a,0x60,0x00,0x38,0x00,0x00]
|
|
0xf0,0x04,0x0a,0x60
|
|
|
|
# CHECK: v_max_i16_e32 v5, 0xc400, v2 ; encoding: [0xff,0x04,0x0a,0x60,0x00,0xc4,0x00,0x00]
|
|
0xf7,0x04,0x0a,0x60
|
|
|
|
# CHECK: v_max_i16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x60,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x60,0x0b,0xfe,0x00,0x00
|
|
|
|
# CHECK: v_max_i16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x60,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x60,0x56,0x34,0x00,0x00
|
|
|
|
# CHECK: v_max_i16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x60]
|
|
0x01,0xff,0x0b,0x60
|
|
|
|
# CHECK: v_min_u16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x62]
|
|
0x01,0x05,0x0a,0x62
|
|
|
|
# CHECK: v_min_u16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x63]
|
|
0x01,0x05,0xfe,0x63
|
|
|
|
# CHECK: v_min_u16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x62]
|
|
0xff,0x05,0x0a,0x62
|
|
|
|
# CHECK: v_min_u16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x62]
|
|
0x01,0x04,0x0a,0x62
|
|
|
|
# CHECK: v_min_u16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x62]
|
|
0x65,0x04,0x0a,0x62
|
|
|
|
# CHECK: v_min_u16_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x62]
|
|
0x66,0x04,0x0a,0x62
|
|
|
|
# CHECK: v_min_u16_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x62]
|
|
0x67,0x04,0x0a,0x62
|
|
|
|
# CHECK: v_min_u16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x62]
|
|
0x6a,0x04,0x0a,0x62
|
|
|
|
# CHECK: v_min_u16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x62]
|
|
0x6b,0x04,0x0a,0x62
|
|
|
|
# CHECK: v_min_u16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x62]
|
|
0x7c,0x04,0x0a,0x62
|
|
|
|
# CHECK: v_min_u16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x62]
|
|
0x7e,0x04,0x0a,0x62
|
|
|
|
# CHECK: v_min_u16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x62]
|
|
0x7f,0x04,0x0a,0x62
|
|
|
|
# CHECK: v_min_u16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x62]
|
|
0x80,0x04,0x0a,0x62
|
|
|
|
# CHECK: v_min_u16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x62]
|
|
0xc1,0x04,0x0a,0x62
|
|
|
|
# CHECK: v_min_u16_e32 v5, 0x3800, v2 ; encoding: [0xff,0x04,0x0a,0x62,0x00,0x38,0x00,0x00]
|
|
0xf0,0x04,0x0a,0x62
|
|
|
|
# CHECK: v_min_u16_e32 v5, 0xc400, v2 ; encoding: [0xff,0x04,0x0a,0x62,0x00,0xc4,0x00,0x00]
|
|
0xf7,0x04,0x0a,0x62
|
|
|
|
# CHECK: v_min_u16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x62,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x62,0x0b,0xfe,0x00,0x00
|
|
|
|
# CHECK: v_min_u16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x62,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x62,0x56,0x34,0x00,0x00
|
|
|
|
# CHECK: v_min_u16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x62]
|
|
0x01,0xff,0x0b,0x62
|
|
|
|
# CHECK: v_min_i16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x64]
|
|
0x01,0x05,0x0a,0x64
|
|
|
|
# CHECK: v_min_i16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x65]
|
|
0x01,0x05,0xfe,0x65
|
|
|
|
# CHECK: v_min_i16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x64]
|
|
0xff,0x05,0x0a,0x64
|
|
|
|
# CHECK: v_min_i16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x64]
|
|
0x01,0x04,0x0a,0x64
|
|
|
|
# CHECK: v_min_i16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x64]
|
|
0x65,0x04,0x0a,0x64
|
|
|
|
# CHECK: v_min_i16_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x64]
|
|
0x66,0x04,0x0a,0x64
|
|
|
|
# CHECK: v_min_i16_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x64]
|
|
0x67,0x04,0x0a,0x64
|
|
|
|
# CHECK: v_min_i16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x64]
|
|
0x6a,0x04,0x0a,0x64
|
|
|
|
# CHECK: v_min_i16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x64]
|
|
0x6b,0x04,0x0a,0x64
|
|
|
|
# CHECK: v_min_i16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x64]
|
|
0x7c,0x04,0x0a,0x64
|
|
|
|
# CHECK: v_min_i16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x64]
|
|
0x7e,0x04,0x0a,0x64
|
|
|
|
# CHECK: v_min_i16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x64]
|
|
0x7f,0x04,0x0a,0x64
|
|
|
|
# CHECK: v_min_i16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x64]
|
|
0x80,0x04,0x0a,0x64
|
|
|
|
# CHECK: v_min_i16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x64]
|
|
0xc1,0x04,0x0a,0x64
|
|
|
|
# CHECK: v_min_i16_e32 v5, 0x3800, v2 ; encoding: [0xff,0x04,0x0a,0x64,0x00,0x38,0x00,0x00]
|
|
0xf0,0x04,0x0a,0x64
|
|
|
|
# CHECK: v_min_i16_e32 v5, 0xc400, v2 ; encoding: [0xff,0x04,0x0a,0x64,0x00,0xc4,0x00,0x00]
|
|
0xf7,0x04,0x0a,0x64
|
|
|
|
# CHECK: v_min_i16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x64,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x64,0x0b,0xfe,0x00,0x00
|
|
|
|
# CHECK: v_min_i16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x64,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x64,0x56,0x34,0x00,0x00
|
|
|
|
# CHECK: v_min_i16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x64]
|
|
0x01,0xff,0x0b,0x64
|
|
|
|
# CHECK: v_ldexp_f16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x66]
|
|
0x01,0x05,0x0a,0x66
|
|
|
|
# CHECK: v_ldexp_f16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x67]
|
|
0x01,0x05,0xfe,0x67
|
|
|
|
# CHECK: v_ldexp_f16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x66]
|
|
0xff,0x05,0x0a,0x66
|
|
|
|
# CHECK: v_ldexp_f16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x66]
|
|
0x01,0x04,0x0a,0x66
|
|
|
|
# CHECK: v_ldexp_f16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x66]
|
|
0x65,0x04,0x0a,0x66
|
|
|
|
# CHECK: v_ldexp_f16_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x66]
|
|
0x66,0x04,0x0a,0x66
|
|
|
|
# CHECK: v_ldexp_f16_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x66]
|
|
0x67,0x04,0x0a,0x66
|
|
|
|
# CHECK: v_ldexp_f16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x66]
|
|
0x6a,0x04,0x0a,0x66
|
|
|
|
# CHECK: v_ldexp_f16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x66]
|
|
0x6b,0x04,0x0a,0x66
|
|
|
|
# CHECK: v_ldexp_f16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x66]
|
|
0x7c,0x04,0x0a,0x66
|
|
|
|
# CHECK: v_ldexp_f16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x66]
|
|
0x7e,0x04,0x0a,0x66
|
|
|
|
# CHECK: v_ldexp_f16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x66]
|
|
0x7f,0x04,0x0a,0x66
|
|
|
|
# CHECK: v_ldexp_f16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x66]
|
|
0x80,0x04,0x0a,0x66
|
|
|
|
# CHECK: v_ldexp_f16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x66]
|
|
0xc1,0x04,0x0a,0x66
|
|
|
|
# CHECK: v_ldexp_f16_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x66]
|
|
0xf0,0x04,0x0a,0x66
|
|
|
|
# CHECK: v_ldexp_f16_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x66]
|
|
0xf7,0x04,0x0a,0x66
|
|
|
|
# CHECK: v_ldexp_f16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x66,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x66,0x0b,0xfe,0x00,0x00
|
|
|
|
# CHECK: v_ldexp_f16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x66,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x66,0x56,0x34,0x00,0x00
|
|
|
|
# CHECK: v_ldexp_f16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x66]
|
|
0x01,0xff,0x0b,0x66
|
|
|
|
# CHECK: v_add_u32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x68]
|
|
0x01,0x05,0x0a,0x68
|
|
|
|
# CHECK: v_add_u32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x69]
|
|
0x01,0x05,0xfe,0x69
|
|
|
|
# CHECK: v_add_u32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x68]
|
|
0xff,0x05,0x0a,0x68
|
|
|
|
# CHECK: v_add_u32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x68]
|
|
0x01,0x04,0x0a,0x68
|
|
|
|
# CHECK: v_add_u32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x68]
|
|
0x65,0x04,0x0a,0x68
|
|
|
|
# CHECK: v_add_u32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x68]
|
|
0x66,0x04,0x0a,0x68
|
|
|
|
# CHECK: v_add_u32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x68]
|
|
0x67,0x04,0x0a,0x68
|
|
|
|
# CHECK: v_add_u32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x68]
|
|
0x6a,0x04,0x0a,0x68
|
|
|
|
# CHECK: v_add_u32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x68]
|
|
0x6b,0x04,0x0a,0x68
|
|
|
|
# CHECK: v_add_u32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x68]
|
|
0x7c,0x04,0x0a,0x68
|
|
|
|
# CHECK: v_add_u32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x68]
|
|
0x7e,0x04,0x0a,0x68
|
|
|
|
# CHECK: v_add_u32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x68]
|
|
0x7f,0x04,0x0a,0x68
|
|
|
|
# CHECK: v_add_u32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x68]
|
|
0x80,0x04,0x0a,0x68
|
|
|
|
# CHECK: v_add_u32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x68]
|
|
0xc1,0x04,0x0a,0x68
|
|
|
|
# CHECK: v_add_u32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x68]
|
|
0xf0,0x04,0x0a,0x68
|
|
|
|
# CHECK: v_add_u32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x68]
|
|
0xf7,0x04,0x0a,0x68
|
|
|
|
# CHECK: v_add_u32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x68,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x68,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_add_u32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x68,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x68,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_add_u32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x68]
|
|
0x01,0xff,0x0b,0x68
|
|
|
|
# CHECK: v_sub_u32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x6a]
|
|
0x01,0x05,0x0a,0x6a
|
|
|
|
# CHECK: v_sub_u32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x6b]
|
|
0x01,0x05,0xfe,0x6b
|
|
|
|
# CHECK: v_sub_u32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x6a]
|
|
0xff,0x05,0x0a,0x6a
|
|
|
|
# CHECK: v_sub_u32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x6a]
|
|
0x01,0x04,0x0a,0x6a
|
|
|
|
# CHECK: v_sub_u32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x6a]
|
|
0x65,0x04,0x0a,0x6a
|
|
|
|
# CHECK: v_sub_u32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x6a]
|
|
0x66,0x04,0x0a,0x6a
|
|
|
|
# CHECK: v_sub_u32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x6a]
|
|
0x67,0x04,0x0a,0x6a
|
|
|
|
# CHECK: v_sub_u32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x6a]
|
|
0x6a,0x04,0x0a,0x6a
|
|
|
|
# CHECK: v_sub_u32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x6a]
|
|
0x6b,0x04,0x0a,0x6a
|
|
|
|
# CHECK: v_sub_u32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x6a]
|
|
0x7c,0x04,0x0a,0x6a
|
|
|
|
# CHECK: v_sub_u32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x6a]
|
|
0x7e,0x04,0x0a,0x6a
|
|
|
|
# CHECK: v_sub_u32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x6a]
|
|
0x7f,0x04,0x0a,0x6a
|
|
|
|
# CHECK: v_sub_u32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x6a]
|
|
0x80,0x04,0x0a,0x6a
|
|
|
|
# CHECK: v_sub_u32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x6a]
|
|
0xc1,0x04,0x0a,0x6a
|
|
|
|
# CHECK: v_sub_u32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x6a]
|
|
0xf0,0x04,0x0a,0x6a
|
|
|
|
# CHECK: v_sub_u32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x6a]
|
|
0xf7,0x04,0x0a,0x6a
|
|
|
|
# CHECK: v_sub_u32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x6a,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x6a,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_sub_u32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x6a,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x6a,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_sub_u32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x6a]
|
|
0x01,0xff,0x0b,0x6a
|
|
|
|
# CHECK: v_subrev_u32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x6c]
|
|
0x01,0x05,0x0a,0x6c
|
|
|
|
# CHECK: v_subrev_u32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x6d]
|
|
0x01,0x05,0xfe,0x6d
|
|
|
|
# CHECK: v_subrev_u32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x6c]
|
|
0xff,0x05,0x0a,0x6c
|
|
|
|
# CHECK: v_subrev_u32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x6c]
|
|
0x01,0x04,0x0a,0x6c
|
|
|
|
# CHECK: v_subrev_u32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x6c]
|
|
0x65,0x04,0x0a,0x6c
|
|
|
|
# CHECK: v_subrev_u32_e32 v5, flat_scratch_lo, v2 ; encoding: [0x66,0x04,0x0a,0x6c]
|
|
0x66,0x04,0x0a,0x6c
|
|
|
|
# CHECK: v_subrev_u32_e32 v5, flat_scratch_hi, v2 ; encoding: [0x67,0x04,0x0a,0x6c]
|
|
0x67,0x04,0x0a,0x6c
|
|
|
|
# CHECK: v_subrev_u32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x6c]
|
|
0x6a,0x04,0x0a,0x6c
|
|
|
|
# CHECK: v_subrev_u32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x6c]
|
|
0x6b,0x04,0x0a,0x6c
|
|
|
|
# CHECK: v_subrev_u32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x6c]
|
|
0x7c,0x04,0x0a,0x6c
|
|
|
|
# CHECK: v_subrev_u32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x6c]
|
|
0x7e,0x04,0x0a,0x6c
|
|
|
|
# CHECK: v_subrev_u32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x6c]
|
|
0x7f,0x04,0x0a,0x6c
|
|
|
|
# CHECK: v_subrev_u32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x6c]
|
|
0x80,0x04,0x0a,0x6c
|
|
|
|
# CHECK: v_subrev_u32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x6c]
|
|
0xc1,0x04,0x0a,0x6c
|
|
|
|
# CHECK: v_subrev_u32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x6c]
|
|
0xf0,0x04,0x0a,0x6c
|
|
|
|
# CHECK: v_subrev_u32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x6c]
|
|
0xf7,0x04,0x0a,0x6c
|
|
|
|
# CHECK: v_subrev_u32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x6c,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x6c,0x56,0x34,0x12,0xaf
|
|
|
|
# CHECK: v_subrev_u32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x6c,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x6c,0x73,0x72,0x71,0x3f
|
|
|
|
# CHECK: v_subrev_u32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x6c]
|
|
0x01,0xff,0x0b,0x6c
|