2306 lines
85 KiB
Text
2306 lines
85 KiB
Text
# RUN: llvm-mc -triple=amdgcn -mcpu=gfx1010 -mattr=+wavefrontsize32,-wavefrontsize64 -disassemble -show-encoding < %s | FileCheck -strict-whitespace -check-prefixes=GFX10,W32 %s
|
|
# RUN: llvm-mc -triple=amdgcn -mcpu=gfx1010 -mattr=-wavefrontsize32,+wavefrontsize64 -disassemble -show-encoding < %s | FileCheck -strict-whitespace -check-prefixes=GFX10,W64 %s
|
|
|
|
|
|
# W32: v_add_co_ci_u32_e32 v255, vcc_lo, v1, v2, vcc_lo ; encoding: [0x01,0x05,0xfe,0x51]
|
|
# W64: v_add_co_ci_u32_e32 v255, vcc, v1, v2, vcc ; encoding: [0x01,0x05,0xfe,0x51]
|
|
0x01,0x05,0xfe,0x51
|
|
|
|
# W32: v_add_co_ci_u32_e32 v5, vcc_lo, -1, v2, vcc_lo ; encoding: [0xc1,0x04,0x0a,0x50]
|
|
# W64: v_add_co_ci_u32_e32 v5, vcc, -1, v2, vcc ; encoding: [0xc1,0x04,0x0a,0x50]
|
|
0xc1,0x04,0x0a,0x50
|
|
|
|
# W32: v_add_co_ci_u32_e32 v5, vcc_lo, -4.0, v2, vcc_lo ; encoding: [0xf7,0x04,0x0a,0x50]
|
|
# W64: v_add_co_ci_u32_e32 v5, vcc, -4.0, v2, vcc ; encoding: [0xf7,0x04,0x0a,0x50]
|
|
0xf7,0x04,0x0a,0x50
|
|
|
|
# W32: v_add_co_ci_u32_e32 v5, vcc_lo, 0, v2, vcc_lo ; encoding: [0x80,0x04,0x0a,0x50]
|
|
# W64: v_add_co_ci_u32_e32 v5, vcc, 0, v2, vcc ; encoding: [0x80,0x04,0x0a,0x50]
|
|
0x80,0x04,0x0a,0x50
|
|
|
|
# W32: v_add_co_ci_u32_e32 v5, vcc_lo, 0.5, v2, vcc_lo ; encoding: [0xf0,0x04,0x0a,0x50]
|
|
# W64: v_add_co_ci_u32_e32 v5, vcc, 0.5, v2, vcc ; encoding: [0xf0,0x04,0x0a,0x50]
|
|
0xf0,0x04,0x0a,0x50
|
|
|
|
# W32: v_add_co_ci_u32_e32 v5, vcc_lo, v1, v2, vcc_lo ; encoding: [0x01,0x05,0x0a,0x50]
|
|
# W64: v_add_co_ci_u32_e32 v5, vcc, v1, v2, vcc ; encoding: [0x01,0x05,0x0a,0x50]
|
|
0x01,0x05,0x0a,0x50
|
|
|
|
# W32: v_add_co_ci_u32_e32 v5, vcc_lo, v1, v255, vcc_lo ; encoding: [0x01,0xff,0x0b,0x50]
|
|
# W64: v_add_co_ci_u32_e32 v5, vcc, v1, v255, vcc ; encoding: [0x01,0xff,0x0b,0x50]
|
|
0x01,0xff,0x0b,0x50
|
|
|
|
# W32: v_add_co_ci_u32_e32 v5, vcc_lo, v255, v2, vcc_lo ; encoding: [0xff,0x05,0x0a,0x50]
|
|
# W64: v_add_co_ci_u32_e32 v5, vcc, v255, v2, vcc ; encoding: [0xff,0x05,0x0a,0x50]
|
|
0xff,0x05,0x0a,0x50
|
|
|
|
# GFX10: v_add_f16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x65]
|
|
0x01,0x05,0xfe,0x65
|
|
|
|
# GFX10: v_add_f16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x64]
|
|
0xc1,0x04,0x0a,0x64
|
|
|
|
# GFX10: v_add_f16_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x64]
|
|
0xf7,0x04,0x0a,0x64
|
|
|
|
# GFX10: v_add_f16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x64]
|
|
0x80,0x04,0x0a,0x64
|
|
|
|
# GFX10: v_add_f16_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x64]
|
|
0xf0,0x04,0x0a,0x64
|
|
|
|
# GFX10: v_add_f16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x64,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x64,0x56,0x34,0x00,0x00
|
|
|
|
# GFX10: v_add_f16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x64,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x64,0x0b,0xfe,0x00,0x00
|
|
|
|
# GFX10: v_add_f16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x64]
|
|
0x7f,0x04,0x0a,0x64
|
|
|
|
# GFX10: v_add_f16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x64]
|
|
0x7e,0x04,0x0a,0x64
|
|
|
|
# GFX10: v_add_f16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x64]
|
|
0x7c,0x04,0x0a,0x64
|
|
|
|
# GFX10: v_add_f16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x64]
|
|
0x01,0x04,0x0a,0x64
|
|
|
|
# GFX10: v_add_f16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x64]
|
|
0x65,0x04,0x0a,0x64
|
|
|
|
# GFX10: v_add_f16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x64]
|
|
0x01,0x05,0x0a,0x64
|
|
|
|
# GFX10: v_add_f16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x64]
|
|
0x01,0xff,0x0b,0x64
|
|
|
|
# GFX10: v_add_f16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x64]
|
|
0xff,0x05,0x0a,0x64
|
|
|
|
# GFX10: v_add_f16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x64]
|
|
0x6b,0x04,0x0a,0x64
|
|
|
|
# GFX10: v_add_f16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x64]
|
|
0x6a,0x04,0x0a,0x64
|
|
|
|
# GFX10: v_add_f32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x07]
|
|
0x01,0x05,0xfe,0x07
|
|
|
|
# GFX10: v_add_f32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x06]
|
|
0xc1,0x04,0x0a,0x06
|
|
|
|
# GFX10: v_add_f32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x06]
|
|
0xf7,0x04,0x0a,0x06
|
|
|
|
# GFX10: v_add_f32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x06]
|
|
0x80,0x04,0x0a,0x06
|
|
|
|
# GFX10: v_add_f32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x06]
|
|
0xf0,0x04,0x0a,0x06
|
|
|
|
# GFX10: v_add_f32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x06,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x06,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_add_f32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x06,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x06,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_add_f32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x06]
|
|
0x7f,0x04,0x0a,0x06
|
|
|
|
# GFX10: v_add_f32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x06]
|
|
0x7e,0x04,0x0a,0x06
|
|
|
|
# GFX10: v_add_f32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x06]
|
|
0x7c,0x04,0x0a,0x06
|
|
|
|
# GFX10: v_add_f32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x06]
|
|
0x01,0x04,0x0a,0x06
|
|
|
|
# GFX10: v_add_f32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x06]
|
|
0x65,0x04,0x0a,0x06
|
|
|
|
# GFX10: v_add_f32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x06]
|
|
0x01,0x05,0x0a,0x06
|
|
|
|
# GFX10: v_add_f32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x06]
|
|
0x01,0xff,0x0b,0x06
|
|
|
|
# GFX10: v_add_f32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x06]
|
|
0xff,0x05,0x0a,0x06
|
|
|
|
# GFX10: v_add_f32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x06]
|
|
0x6b,0x04,0x0a,0x06
|
|
|
|
# GFX10: v_add_f32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x06]
|
|
0x6a,0x04,0x0a,0x06
|
|
|
|
# GFX10: v_add_nc_u32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x4b]
|
|
0x01,0x05,0xfe,0x4b
|
|
|
|
# GFX10: v_add_nc_u32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x4a]
|
|
0xc1,0x04,0x0a,0x4a
|
|
|
|
# GFX10: v_add_nc_u32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x4a]
|
|
0xf7,0x04,0x0a,0x4a
|
|
|
|
# GFX10: v_add_nc_u32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x4a]
|
|
0x80,0x04,0x0a,0x4a
|
|
|
|
# GFX10: v_add_nc_u32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x4a]
|
|
0xf0,0x04,0x0a,0x4a
|
|
|
|
# GFX10: v_add_nc_u32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x4a,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x4a,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_add_nc_u32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x4a,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x4a,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_add_nc_u32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x4a]
|
|
0x7f,0x04,0x0a,0x4a
|
|
|
|
# GFX10: v_add_nc_u32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x4a]
|
|
0x7e,0x04,0x0a,0x4a
|
|
|
|
# GFX10: v_add_nc_u32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x4a]
|
|
0x7c,0x04,0x0a,0x4a
|
|
|
|
# GFX10: v_add_nc_u32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x4a]
|
|
0x01,0x04,0x0a,0x4a
|
|
|
|
# GFX10: v_add_nc_u32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x4a]
|
|
0x65,0x04,0x0a,0x4a
|
|
|
|
# GFX10: v_add_nc_u32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x4a]
|
|
0x01,0x05,0x0a,0x4a
|
|
|
|
# GFX10: v_add_nc_u32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x4a]
|
|
0x01,0xff,0x0b,0x4a
|
|
|
|
# GFX10: v_add_nc_u32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x4a]
|
|
0xff,0x05,0x0a,0x4a
|
|
|
|
# GFX10: v_add_nc_u32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x4a]
|
|
0x6b,0x04,0x0a,0x4a
|
|
|
|
# GFX10: v_add_nc_u32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x4a]
|
|
0x6a,0x04,0x0a,0x4a
|
|
|
|
# GFX10: v_and_b32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x37]
|
|
0x01,0x05,0xfe,0x37
|
|
|
|
# GFX10: v_and_b32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x36]
|
|
0xc1,0x04,0x0a,0x36
|
|
|
|
# GFX10: v_and_b32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x36]
|
|
0xf7,0x04,0x0a,0x36
|
|
|
|
# GFX10: v_and_b32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x36]
|
|
0x80,0x04,0x0a,0x36
|
|
|
|
# GFX10: v_and_b32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x36]
|
|
0xf0,0x04,0x0a,0x36
|
|
|
|
# GFX10: v_and_b32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x36,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x36,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_and_b32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x36,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x36,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_and_b32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x36]
|
|
0x7f,0x04,0x0a,0x36
|
|
|
|
# GFX10: v_and_b32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x36]
|
|
0x7e,0x04,0x0a,0x36
|
|
|
|
# GFX10: v_and_b32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x36]
|
|
0x7c,0x04,0x0a,0x36
|
|
|
|
# GFX10: v_and_b32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x36]
|
|
0x01,0x04,0x0a,0x36
|
|
|
|
# GFX10: v_and_b32_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x36]
|
|
0x67,0x04,0x0a,0x36
|
|
|
|
# GFX10: v_and_b32_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x36]
|
|
0x77,0x04,0x0a,0x36
|
|
|
|
# GFX10: v_and_b32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x36]
|
|
0x01,0x05,0x0a,0x36
|
|
|
|
# GFX10: v_and_b32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x36]
|
|
0x01,0xff,0x0b,0x36
|
|
|
|
# GFX10: v_and_b32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x36]
|
|
0xff,0x05,0x0a,0x36
|
|
|
|
# GFX10: v_and_b32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x36]
|
|
0x6b,0x04,0x0a,0x36
|
|
|
|
# GFX10: v_and_b32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x36]
|
|
0x6a,0x04,0x0a,0x36
|
|
|
|
# GFX10: v_ashrrev_i32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x31]
|
|
0x01,0x05,0xfe,0x31
|
|
|
|
# GFX10: v_ashrrev_i32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x30]
|
|
0xc1,0x04,0x0a,0x30
|
|
|
|
# GFX10: v_ashrrev_i32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x30]
|
|
0xf7,0x04,0x0a,0x30
|
|
|
|
# GFX10: v_ashrrev_i32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x30]
|
|
0x80,0x04,0x0a,0x30
|
|
|
|
# GFX10: v_ashrrev_i32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x30]
|
|
0xf0,0x04,0x0a,0x30
|
|
|
|
# GFX10: v_ashrrev_i32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x30,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x30,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_ashrrev_i32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x30,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x30,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_ashrrev_i32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x30]
|
|
0x7f,0x04,0x0a,0x30
|
|
|
|
# GFX10: v_ashrrev_i32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x30]
|
|
0x7e,0x04,0x0a,0x30
|
|
|
|
# GFX10: v_ashrrev_i32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x30]
|
|
0x7c,0x04,0x0a,0x30
|
|
|
|
# GFX10: v_ashrrev_i32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x30]
|
|
0x01,0x04,0x0a,0x30
|
|
|
|
# GFX10: v_ashrrev_i32_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x30]
|
|
0x67,0x04,0x0a,0x30
|
|
|
|
# GFX10: v_ashrrev_i32_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x30]
|
|
0x77,0x04,0x0a,0x30
|
|
|
|
# GFX10: v_ashrrev_i32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x30]
|
|
0x01,0x05,0x0a,0x30
|
|
|
|
# GFX10: v_ashrrev_i32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x30]
|
|
0x01,0xff,0x0b,0x30
|
|
|
|
# GFX10: v_ashrrev_i32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x30]
|
|
0xff,0x05,0x0a,0x30
|
|
|
|
# GFX10: v_ashrrev_i32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x30]
|
|
0x6b,0x04,0x0a,0x30
|
|
|
|
# GFX10: v_ashrrev_i32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x30]
|
|
0x6a,0x04,0x0a,0x30
|
|
|
|
# W32: v_cndmask_b32_e32 v255, v1, v2, vcc_lo ; encoding: [0x01,0x05,0xfe,0x03]
|
|
# W64: v_cndmask_b32_e32 v255, v1, v2, vcc ; encoding: [0x01,0x05,0xfe,0x03]
|
|
0x01,0x05,0xfe,0x03
|
|
|
|
# W32: v_cndmask_b32_e32 v5, -1, v2, vcc_lo ; encoding: [0xc1,0x04,0x0a,0x02]
|
|
# W64: v_cndmask_b32_e32 v5, -1, v2, vcc ; encoding: [0xc1,0x04,0x0a,0x02]
|
|
0xc1,0x04,0x0a,0x02
|
|
|
|
# W32: v_cndmask_b32_e32 v5, -4.0, v2, vcc_lo ; encoding: [0xf7,0x04,0x0a,0x02]
|
|
# W64: v_cndmask_b32_e32 v5, -4.0, v2, vcc ; encoding: [0xf7,0x04,0x0a,0x02]
|
|
0xf7,0x04,0x0a,0x02
|
|
|
|
# W32: v_cndmask_b32_e32 v5, 0, v2, vcc_lo ; encoding: [0x80,0x04,0x0a,0x02]
|
|
# W64: v_cndmask_b32_e32 v5, 0, v2, vcc ; encoding: [0x80,0x04,0x0a,0x02]
|
|
0x80,0x04,0x0a,0x02
|
|
|
|
# W32: v_cndmask_b32_e32 v5, 0.5, v2, vcc_lo ; encoding: [0xf0,0x04,0x0a,0x02]
|
|
# W64: v_cndmask_b32_e32 v5, 0.5, v2, vcc ; encoding: [0xf0,0x04,0x0a,0x02]
|
|
0xf0,0x04,0x0a,0x02
|
|
|
|
# W32: v_cndmask_b32_e32 v5, v1, v2, vcc_lo ; encoding: [0x01,0x05,0x0a,0x02]
|
|
# W64: v_cndmask_b32_e32 v5, v1, v2, vcc ; encoding: [0x01,0x05,0x0a,0x02]
|
|
0x01,0x05,0x0a,0x02
|
|
|
|
# W32: v_cndmask_b32_e32 v5, v1, v255, vcc_lo ; encoding: [0x01,0xff,0x0b,0x02]
|
|
# W64: v_cndmask_b32_e32 v5, v1, v255, vcc ; encoding: [0x01,0xff,0x0b,0x02]
|
|
0x01,0xff,0x0b,0x02
|
|
|
|
# W32: v_cndmask_b32_e32 v5, v255, v2, vcc_lo ; encoding: [0xff,0x05,0x0a,0x02]
|
|
# W64: v_cndmask_b32_e32 v5, v255, v2, vcc ; encoding: [0xff,0x05,0x0a,0x02]
|
|
0xff,0x05,0x0a,0x02
|
|
|
|
# GFX10: v_cvt_pkrtz_f16_f32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x5f]
|
|
0x01,0x05,0xfe,0x5f
|
|
|
|
# GFX10: v_cvt_pkrtz_f16_f32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x5e]
|
|
0xc1,0x04,0x0a,0x5e
|
|
|
|
# GFX10: v_cvt_pkrtz_f16_f32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x5e]
|
|
0xf7,0x04,0x0a,0x5e
|
|
|
|
# GFX10: v_cvt_pkrtz_f16_f32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x5e]
|
|
0x80,0x04,0x0a,0x5e
|
|
|
|
# GFX10: v_cvt_pkrtz_f16_f32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x5e]
|
|
0xf0,0x04,0x0a,0x5e
|
|
|
|
# GFX10: v_cvt_pkrtz_f16_f32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x5e,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x5e,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_cvt_pkrtz_f16_f32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x5e,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x5e,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_cvt_pkrtz_f16_f32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x5e]
|
|
0x7f,0x04,0x0a,0x5e
|
|
|
|
# GFX10: v_cvt_pkrtz_f16_f32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x5e]
|
|
0x7e,0x04,0x0a,0x5e
|
|
|
|
# GFX10: v_cvt_pkrtz_f16_f32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x5e]
|
|
0x7c,0x04,0x0a,0x5e
|
|
|
|
# GFX10: v_cvt_pkrtz_f16_f32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x5e]
|
|
0x01,0x04,0x0a,0x5e
|
|
|
|
# GFX10: v_cvt_pkrtz_f16_f32_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x5e]
|
|
0x67,0x04,0x0a,0x5e
|
|
|
|
# GFX10: v_cvt_pkrtz_f16_f32_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x5e]
|
|
0x77,0x04,0x0a,0x5e
|
|
|
|
# GFX10: v_cvt_pkrtz_f16_f32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x5e]
|
|
0x01,0x05,0x0a,0x5e
|
|
|
|
# GFX10: v_cvt_pkrtz_f16_f32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x5e]
|
|
0x01,0xff,0x0b,0x5e
|
|
|
|
# GFX10: v_cvt_pkrtz_f16_f32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x5e]
|
|
0xff,0x05,0x0a,0x5e
|
|
|
|
# GFX10: v_cvt_pkrtz_f16_f32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x5e]
|
|
0x6b,0x04,0x0a,0x5e
|
|
|
|
# GFX10: v_cvt_pkrtz_f16_f32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x5e]
|
|
0x6a,0x04,0x0a,0x5e
|
|
|
|
# GFX10: v_fmaak_f16 v255, v1, v2, 0x1121 ; encoding: [0x01,0x05,0xfe,0x71,0x21,0x11,0x00,0x00]
|
|
0x01,0x05,0xfe,0x71,0x21,0x11,0x00,0x00
|
|
|
|
# GFX10: v_fmaak_f32 v255, v1, v2, 0x11213141 ; encoding: [0x01,0x05,0xfe,0x5b,0x41,0x31,0x21,0x11]
|
|
0x01,0x05,0xfe,0x5b,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_fmaak_f16 v5, -1, v2, 0x1121 ; encoding: [0xc1,0x04,0x0a,0x70,0x21,0x11,0x00,0x00]
|
|
0xc1,0x04,0x0a,0x70,0x21,0x11,0x00,0x00
|
|
|
|
# GFX10: v_fmaak_f16 v5, 0x1121, v2, 0x1121 ; encoding: [0xff,0x04,0x0a,0x70,0x21,0x11,0x00,0x00]
|
|
0xff,0x04,0x0a,0x70,0x21,0x11,0x00,0x00
|
|
|
|
# GFX10: v_fmaak_f32 v5, -1, v2, 0x11213141 ; encoding: [0xc1,0x04,0x0a,0x5a,0x41,0x31,0x21,0x11]
|
|
0xc1,0x04,0x0a,0x5a,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_fmaak_f16 v5, -4.0, v2, 0x1121 ; encoding: [0xf7,0x04,0x0a,0x70,0x21,0x11,0x00,0x00]
|
|
0xf7,0x04,0x0a,0x70,0x21,0x11,0x00,0x00
|
|
|
|
# GFX10: v_fmaak_f32 v5, -4.0, v2, 0x11213141 ; encoding: [0xf7,0x04,0x0a,0x5a,0x41,0x31,0x21,0x11]
|
|
0xf7,0x04,0x0a,0x5a,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_fmaak_f16 v5, 0, v2, 0x1121 ; encoding: [0x80,0x04,0x0a,0x70,0x21,0x11,0x00,0x00]
|
|
0x80,0x04,0x0a,0x70,0x21,0x11,0x00,0x00
|
|
|
|
# GFX10: v_fmaak_f32 v5, 0, v2, 0x11213141 ; encoding: [0x80,0x04,0x0a,0x5a,0x41,0x31,0x21,0x11]
|
|
0x80,0x04,0x0a,0x5a,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_fmaak_f32 v5, 0x11213141, v2, 0x11213141 ; encoding: [0xff,0x04,0x0a,0x5a,0x41,0x31,0x21,0x11]
|
|
0xff,0x04,0x0a,0x5a,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_fmaak_f16 v5, 0.5, v2, 0x1121 ; encoding: [0xf0,0x04,0x0a,0x70,0x21,0x11,0x00,0x00]
|
|
0xf0,0x04,0x0a,0x70,0x21,0x11,0x00,0x00
|
|
|
|
# GFX10: v_fmaak_f32 v5, 0.5, v2, 0x11213141 ; encoding: [0xf0,0x04,0x0a,0x5a,0x41,0x31,0x21,0x11]
|
|
0xf0,0x04,0x0a,0x5a,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_fmaak_f16 v5, v1, v2, 0x1121 ; encoding: [0x01,0x05,0x0a,0x70,0x21,0x11,0x00,0x00]
|
|
0x01,0x05,0x0a,0x70,0x21,0x11,0x00,0x00
|
|
|
|
# GFX10: v_fmaak_f32 v5, v1, v2, 0x11213141 ; encoding: [0x01,0x05,0x0a,0x5a,0x41,0x31,0x21,0x11]
|
|
0x01,0x05,0x0a,0x5a,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_fmaak_f16 v5, v1, v2, 0xa1b1 ; encoding: [0x01,0x05,0x0a,0x70,0xb1,0xa1,0x00,0x00]
|
|
0x01,0x05,0x0a,0x70,0xb1,0xa1,0x00,0x00
|
|
|
|
# GFX10: v_fmaak_f32 v5, v1, v2, 0xa1b1c1d1 ; encoding: [0x01,0x05,0x0a,0x5a,0xd1,0xc1,0xb1,0xa1]
|
|
0x01,0x05,0x0a,0x5a,0xd1,0xc1,0xb1,0xa1
|
|
|
|
# GFX10: v_fmaak_f16 v5, v1, v255, 0x1121 ; encoding: [0x01,0xff,0x0b,0x70,0x21,0x11,0x00,0x00]
|
|
0x01,0xff,0x0b,0x70,0x21,0x11,0x00,0x00
|
|
|
|
# GFX10: v_fmaak_f32 v5, v1, v255, 0x11213141 ; encoding: [0x01,0xff,0x0b,0x5a,0x41,0x31,0x21,0x11]
|
|
0x01,0xff,0x0b,0x5a,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_fmaak_f16 v5, v255, v2, 0x1121 ; encoding: [0xff,0x05,0x0a,0x70,0x21,0x11,0x00,0x00]
|
|
0xff,0x05,0x0a,0x70,0x21,0x11,0x00,0x00
|
|
|
|
# GFX10: v_fmaak_f32 v5, v255, v2, 0x11213141 ; encoding: [0xff,0x05,0x0a,0x5a,0x41,0x31,0x21,0x11]
|
|
0xff,0x05,0x0a,0x5a,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_fmac_f16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x6d]
|
|
0x01,0x05,0xfe,0x6d
|
|
|
|
# GFX10: v_fmac_f16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x6c]
|
|
0xc1,0x04,0x0a,0x6c
|
|
|
|
# GFX10: v_fmac_f16_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x6c]
|
|
0xf7,0x04,0x0a,0x6c
|
|
|
|
# GFX10: v_fmac_f16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x6c]
|
|
0x80,0x04,0x0a,0x6c
|
|
|
|
# GFX10: v_fmac_f16_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x6c]
|
|
0xf0,0x04,0x0a,0x6c
|
|
|
|
# GFX10: v_fmac_f16_e32 v5, 0x1234, v2 ; encoding: [0xff,0x04,0x0a,0x6c,0x34,0x12,0x00,0x00]
|
|
0xff,0x04,0x0a,0x6c,0x34,0x12,0x00,0x00
|
|
|
|
# GFX10: v_fmac_f16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x6c]
|
|
0x7f,0x04,0x0a,0x6c
|
|
|
|
# GFX10: v_fmac_f16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x6c]
|
|
0x7e,0x04,0x0a,0x6c
|
|
|
|
# GFX10: v_fmac_f16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x6c]
|
|
0x7c,0x04,0x0a,0x6c
|
|
|
|
# GFX10: v_fmac_f16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x6c]
|
|
0x01,0x04,0x0a,0x6c
|
|
|
|
# GFX10: v_fmac_f16_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x6c]
|
|
0x67,0x04,0x0a,0x6c
|
|
|
|
# GFX10: v_fmac_f16_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x6c]
|
|
0x77,0x04,0x0a,0x6c
|
|
|
|
# GFX10: v_fmac_f16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x6c]
|
|
0x01,0x05,0x0a,0x6c
|
|
|
|
# GFX10: v_fmac_f16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x6c]
|
|
0x01,0xff,0x0b,0x6c
|
|
|
|
# GFX10: v_fmac_f16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x6c]
|
|
0xff,0x05,0x0a,0x6c
|
|
|
|
# GFX10: v_fmac_f16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x6c]
|
|
0x6b,0x04,0x0a,0x6c
|
|
|
|
# GFX10: v_fmac_f16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x6c]
|
|
0x6a,0x04,0x0a,0x6c
|
|
|
|
# GFX10: v_fmac_f32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x57]
|
|
0x01,0x05,0xfe,0x57
|
|
|
|
# GFX10: v_fmac_f32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x56]
|
|
0xc1,0x04,0x0a,0x56
|
|
|
|
# GFX10: v_fmac_f32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x56]
|
|
0xf7,0x04,0x0a,0x56
|
|
|
|
# GFX10: v_fmac_f32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x56]
|
|
0x80,0x04,0x0a,0x56
|
|
|
|
# GFX10: v_fmac_f32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x56]
|
|
0xf0,0x04,0x0a,0x56
|
|
|
|
# GFX10: v_fmac_f32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x56,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x56,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_fmac_f32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x56,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x56,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_fmac_f32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x56]
|
|
0x7f,0x04,0x0a,0x56
|
|
|
|
# GFX10: v_fmac_f32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x56]
|
|
0x7e,0x04,0x0a,0x56
|
|
|
|
# GFX10: v_fmac_f32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x56]
|
|
0x7c,0x04,0x0a,0x56
|
|
|
|
# GFX10: v_fmac_f32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x56]
|
|
0x01,0x04,0x0a,0x56
|
|
|
|
# GFX10: v_fmac_f32_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x56]
|
|
0x67,0x04,0x0a,0x56
|
|
|
|
# GFX10: v_fmac_f32_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x56]
|
|
0x77,0x04,0x0a,0x56
|
|
|
|
# GFX10: v_fmac_f32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x56]
|
|
0x01,0x05,0x0a,0x56
|
|
|
|
# GFX10: v_fmac_f32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x56]
|
|
0x01,0xff,0x0b,0x56
|
|
|
|
# GFX10: v_fmac_f32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x56]
|
|
0xff,0x05,0x0a,0x56
|
|
|
|
# GFX10: v_fmac_f32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x56]
|
|
0x6b,0x04,0x0a,0x56
|
|
|
|
# GFX10: v_fmac_f32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x56]
|
|
0x6a,0x04,0x0a,0x56
|
|
|
|
# GFX10: v_fmamk_f16 v255, v1, 0x1121, v3 ; encoding: [0x01,0x07,0xfe,0x6f,0x21,0x11,0x00,0x00]
|
|
0x01,0x07,0xfe,0x6f,0x21,0x11,0x00,0x00
|
|
|
|
# GFX10: v_fmamk_f16 v255, 0x1121, 0x1121, v3 ; encoding: [0xff,0x06,0xfe,0x6f,0x21,0x11,0x00,0x00]
|
|
0xff,0x06,0xfe,0x6f,0x21,0x11,0x00,0x00
|
|
|
|
# GFX10: v_fmamk_f32 v255, v1, 0x11213141, v3 ; encoding: [0x01,0x07,0xfe,0x59,0x41,0x31,0x21,0x11]
|
|
0x01,0x07,0xfe,0x59,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_fmamk_f16 v5, -1, 0x1121, v3 ; encoding: [0xc1,0x06,0x0a,0x6e,0x21,0x11,0x00,0x00]
|
|
0xc1,0x06,0x0a,0x6e,0x21,0x11,0x00,0x00
|
|
|
|
# GFX10: v_fmamk_f32 v5, -1, 0x11213141, v3 ; encoding: [0xc1,0x06,0x0a,0x58,0x41,0x31,0x21,0x11]
|
|
0xc1,0x06,0x0a,0x58,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_fmamk_f32 v5, 0x11213141, 0x11213141, v3 ; encoding: [0xff,0x06,0x0a,0x58,0x41,0x31,0x21,0x11]
|
|
0xff,0x06,0x0a,0x58,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_fmamk_f16 v5, -4.0, 0x1121, v3 ; encoding: [0xf7,0x06,0x0a,0x6e,0x21,0x11,0x00,0x00]
|
|
0xf7,0x06,0x0a,0x6e,0x21,0x11,0x00,0x00
|
|
|
|
# GFX10: v_fmamk_f32 v5, -4.0, 0x11213141, v3 ; encoding: [0xf7,0x06,0x0a,0x58,0x41,0x31,0x21,0x11]
|
|
0xf7,0x06,0x0a,0x58,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_fmamk_f16 v5, 0, 0x1121, v3 ; encoding: [0x80,0x06,0x0a,0x6e,0x21,0x11,0x00,0x00]
|
|
0x80,0x06,0x0a,0x6e,0x21,0x11,0x00,0x00
|
|
|
|
# GFX10: v_fmamk_f32 v5, 0, 0x11213141, v3 ; encoding: [0x80,0x06,0x0a,0x58,0x41,0x31,0x21,0x11]
|
|
0x80,0x06,0x0a,0x58,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_fmamk_f16 v5, 0.5, 0x1121, v3 ; encoding: [0xf0,0x06,0x0a,0x6e,0x21,0x11,0x00,0x00]
|
|
0xf0,0x06,0x0a,0x6e,0x21,0x11,0x00,0x00
|
|
|
|
# GFX10: v_fmamk_f32 v5, 0.5, 0x11213141, v3 ; encoding: [0xf0,0x06,0x0a,0x58,0x41,0x31,0x21,0x11]
|
|
0xf0,0x06,0x0a,0x58,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_fmamk_f16 v5, v1, 0x1121, v255 ; encoding: [0x01,0xff,0x0b,0x6e,0x21,0x11,0x00,0x00]
|
|
0x01,0xff,0x0b,0x6e,0x21,0x11,0x00,0x00
|
|
|
|
# GFX10: v_fmamk_f16 v5, v1, 0x1121, v3 ; encoding: [0x01,0x07,0x0a,0x6e,0x21,0x11,0x00,0x00]
|
|
0x01,0x07,0x0a,0x6e,0x21,0x11,0x00,0x00
|
|
|
|
# GFX10: v_fmamk_f32 v5, v1, 0x11213141, v255 ; encoding: [0x01,0xff,0x0b,0x58,0x41,0x31,0x21,0x11]
|
|
0x01,0xff,0x0b,0x58,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_fmamk_f32 v5, v1, 0x11213141, v3 ; encoding: [0x01,0x07,0x0a,0x58,0x41,0x31,0x21,0x11]
|
|
0x01,0x07,0x0a,0x58,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_fmamk_f16 v5, v1, 0xa1b1, v3 ; encoding: [0x01,0x07,0x0a,0x6e,0xb1,0xa1,0x00,0x00]
|
|
0x01,0x07,0x0a,0x6e,0xb1,0xa1,0x00,0x00
|
|
|
|
# GFX10: v_fmamk_f32 v5, v1, 0xa1b1c1d1, v3 ; encoding: [0x01,0x07,0x0a,0x58,0xd1,0xc1,0xb1,0xa1]
|
|
0x01,0x07,0x0a,0x58,0xd1,0xc1,0xb1,0xa1
|
|
|
|
# GFX10: v_fmamk_f16 v5, v255, 0x1121, v3 ; encoding: [0xff,0x07,0x0a,0x6e,0x21,0x11,0x00,0x00]
|
|
0xff,0x07,0x0a,0x6e,0x21,0x11,0x00,0x00
|
|
|
|
# GFX10: v_fmamk_f32 v5, v255, 0x11213141, v3 ; encoding: [0xff,0x07,0x0a,0x58,0x41,0x31,0x21,0x11]
|
|
0xff,0x07,0x0a,0x58,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_ldexp_f16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x77]
|
|
0x01,0x05,0xfe,0x77
|
|
|
|
# GFX10: v_ldexp_f16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x76]
|
|
0xc1,0x04,0x0a,0x76
|
|
|
|
# GFX10: v_ldexp_f16_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x76]
|
|
0xf7,0x04,0x0a,0x76
|
|
|
|
# GFX10: v_ldexp_f16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x76]
|
|
0x80,0x04,0x0a,0x76
|
|
|
|
# GFX10: v_ldexp_f16_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x76]
|
|
0xf0,0x04,0x0a,0x76
|
|
|
|
# GFX10: v_ldexp_f16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x76,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x76,0x56,0x34,0x00,0x00
|
|
|
|
# GFX10: v_ldexp_f16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x76,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x76,0x0b,0xfe,0x00,0x00
|
|
|
|
# GFX10: v_ldexp_f16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x76]
|
|
0x7f,0x04,0x0a,0x76
|
|
|
|
# GFX10: v_ldexp_f16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x76]
|
|
0x7e,0x04,0x0a,0x76
|
|
|
|
# GFX10: v_ldexp_f16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x76]
|
|
0x7c,0x04,0x0a,0x76
|
|
|
|
# GFX10: v_ldexp_f16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x76]
|
|
0x01,0x04,0x0a,0x76
|
|
|
|
# GFX10: v_ldexp_f16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x76]
|
|
0x65,0x04,0x0a,0x76
|
|
|
|
# GFX10: v_ldexp_f16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x76]
|
|
0x01,0x05,0x0a,0x76
|
|
|
|
# GFX10: v_ldexp_f16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x76]
|
|
0x01,0xff,0x0b,0x76
|
|
|
|
# GFX10: v_ldexp_f16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x76]
|
|
0xff,0x05,0x0a,0x76
|
|
|
|
# GFX10: v_ldexp_f16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x76]
|
|
0x6b,0x04,0x0a,0x76
|
|
|
|
# GFX10: v_ldexp_f16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x76]
|
|
0x6a,0x04,0x0a,0x76
|
|
|
|
# GFX10: v_lshlrev_b32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x35]
|
|
0x01,0x05,0xfe,0x35
|
|
|
|
# GFX10: v_lshlrev_b32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x34]
|
|
0xc1,0x04,0x0a,0x34
|
|
|
|
# GFX10: v_lshlrev_b32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x34]
|
|
0xf7,0x04,0x0a,0x34
|
|
|
|
# GFX10: v_lshlrev_b32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x34]
|
|
0x80,0x04,0x0a,0x34
|
|
|
|
# GFX10: v_lshlrev_b32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x34]
|
|
0xf0,0x04,0x0a,0x34
|
|
|
|
# GFX10: v_lshlrev_b32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x34,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x34,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_lshlrev_b32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x34,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x34,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_lshlrev_b32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x34]
|
|
0x7f,0x04,0x0a,0x34
|
|
|
|
# GFX10: v_lshlrev_b32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x34]
|
|
0x7e,0x04,0x0a,0x34
|
|
|
|
# GFX10: v_lshlrev_b32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x34]
|
|
0x7c,0x04,0x0a,0x34
|
|
|
|
# GFX10: v_lshlrev_b32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x34]
|
|
0x01,0x04,0x0a,0x34
|
|
|
|
# GFX10: v_lshlrev_b32_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x34]
|
|
0x67,0x04,0x0a,0x34
|
|
|
|
# GFX10: v_lshlrev_b32_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x34]
|
|
0x77,0x04,0x0a,0x34
|
|
|
|
# GFX10: v_lshlrev_b32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x34]
|
|
0x01,0x05,0x0a,0x34
|
|
|
|
# GFX10: v_lshlrev_b32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x34]
|
|
0x01,0xff,0x0b,0x34
|
|
|
|
# GFX10: v_lshlrev_b32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x34]
|
|
0xff,0x05,0x0a,0x34
|
|
|
|
# GFX10: v_lshlrev_b32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x34]
|
|
0x6b,0x04,0x0a,0x34
|
|
|
|
# GFX10: v_lshlrev_b32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x34]
|
|
0x6a,0x04,0x0a,0x34
|
|
|
|
# GFX10: v_lshrrev_b32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x2d]
|
|
0x01,0x05,0xfe,0x2d
|
|
|
|
# GFX10: v_lshrrev_b32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x2c]
|
|
0xc1,0x04,0x0a,0x2c
|
|
|
|
# GFX10: v_lshrrev_b32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x2c]
|
|
0xf7,0x04,0x0a,0x2c
|
|
|
|
# GFX10: v_lshrrev_b32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x2c]
|
|
0x80,0x04,0x0a,0x2c
|
|
|
|
# GFX10: v_lshrrev_b32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x2c]
|
|
0xf0,0x04,0x0a,0x2c
|
|
|
|
# GFX10: v_lshrrev_b32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x2c,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x2c,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_lshrrev_b32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x2c,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x2c,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_lshrrev_b32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x2c]
|
|
0x7f,0x04,0x0a,0x2c
|
|
|
|
# GFX10: v_lshrrev_b32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x2c]
|
|
0x7e,0x04,0x0a,0x2c
|
|
|
|
# GFX10: v_lshrrev_b32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x2c]
|
|
0x7c,0x04,0x0a,0x2c
|
|
|
|
# GFX10: v_lshrrev_b32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x2c]
|
|
0x01,0x04,0x0a,0x2c
|
|
|
|
# GFX10: v_lshrrev_b32_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x2c]
|
|
0x67,0x04,0x0a,0x2c
|
|
|
|
# GFX10: v_lshrrev_b32_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x2c]
|
|
0x77,0x04,0x0a,0x2c
|
|
|
|
# GFX10: v_lshrrev_b32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x2c]
|
|
0x01,0x05,0x0a,0x2c
|
|
|
|
# GFX10: v_lshrrev_b32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x2c]
|
|
0x01,0xff,0x0b,0x2c
|
|
|
|
# GFX10: v_lshrrev_b32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x2c]
|
|
0xff,0x05,0x0a,0x2c
|
|
|
|
# GFX10: v_lshrrev_b32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x2c]
|
|
0x6b,0x04,0x0a,0x2c
|
|
|
|
# GFX10: v_lshrrev_b32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x2c]
|
|
0x6a,0x04,0x0a,0x2c
|
|
|
|
# GFX10: v_mac_f32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x3f]
|
|
0x01,0x05,0xfe,0x3f
|
|
|
|
# GFX10: v_mac_f32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x3e]
|
|
0xc1,0x04,0x0a,0x3e
|
|
|
|
# GFX10: v_mac_f32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x3e]
|
|
0xf7,0x04,0x0a,0x3e
|
|
|
|
# GFX10: v_mac_f32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x3e]
|
|
0x80,0x04,0x0a,0x3e
|
|
|
|
# GFX10: v_mac_f32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x3e]
|
|
0xf0,0x04,0x0a,0x3e
|
|
|
|
# GFX10: v_mac_f32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x3e,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x3e,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_mac_f32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x3e,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x3e,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_mac_f32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x3e]
|
|
0x7f,0x04,0x0a,0x3e
|
|
|
|
# GFX10: v_mac_f32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x3e]
|
|
0x7e,0x04,0x0a,0x3e
|
|
|
|
# GFX10: v_mac_f32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x3e]
|
|
0x7c,0x04,0x0a,0x3e
|
|
|
|
# GFX10: v_mac_f32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x3e]
|
|
0x01,0x04,0x0a,0x3e
|
|
|
|
# GFX10: v_mac_f32_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x3e]
|
|
0x67,0x04,0x0a,0x3e
|
|
|
|
# GFX10: v_mac_f32_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x3e]
|
|
0x77,0x04,0x0a,0x3e
|
|
|
|
# GFX10: v_mac_f32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x3e]
|
|
0x01,0x05,0x0a,0x3e
|
|
|
|
# GFX10: v_mac_f32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x3e]
|
|
0x01,0xff,0x0b,0x3e
|
|
|
|
# GFX10: v_mac_f32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x3e]
|
|
0xff,0x05,0x0a,0x3e
|
|
|
|
# GFX10: v_mac_f32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x3e]
|
|
0x6b,0x04,0x0a,0x3e
|
|
|
|
# GFX10: v_mac_f32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x3e]
|
|
0x6a,0x04,0x0a,0x3e
|
|
|
|
# GFX10: v_mac_legacy_f32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x0d]
|
|
0x01,0x05,0xfe,0x0d
|
|
|
|
# GFX10: v_mac_legacy_f32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x0c]
|
|
0xc1,0x04,0x0a,0x0c
|
|
|
|
# GFX10: v_mac_legacy_f32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x0c]
|
|
0xf7,0x04,0x0a,0x0c
|
|
|
|
# GFX10: v_mac_legacy_f32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x0c]
|
|
0x80,0x04,0x0a,0x0c
|
|
|
|
# GFX10: v_mac_legacy_f32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x0c]
|
|
0xf0,0x04,0x0a,0x0c
|
|
|
|
# GFX10: v_mac_legacy_f32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x0c,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x0c,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_mac_legacy_f32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x0c,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x0c,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_mac_legacy_f32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x0c]
|
|
0x7f,0x04,0x0a,0x0c
|
|
|
|
# GFX10: v_mac_legacy_f32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x0c]
|
|
0x7e,0x04,0x0a,0x0c
|
|
|
|
# GFX10: v_mac_legacy_f32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x0c]
|
|
0x7c,0x04,0x0a,0x0c
|
|
|
|
# GFX10: v_mac_legacy_f32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x0c]
|
|
0x01,0x04,0x0a,0x0c
|
|
|
|
# GFX10: v_mac_legacy_f32_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x0c]
|
|
0x67,0x04,0x0a,0x0c
|
|
|
|
# GFX10: v_mac_legacy_f32_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x0c]
|
|
0x77,0x04,0x0a,0x0c
|
|
|
|
# GFX10: v_mac_legacy_f32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x0c]
|
|
0x01,0x05,0x0a,0x0c
|
|
|
|
# GFX10: v_mac_legacy_f32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x0c]
|
|
0x01,0xff,0x0b,0x0c
|
|
|
|
# GFX10: v_mac_legacy_f32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x0c]
|
|
0xff,0x05,0x0a,0x0c
|
|
|
|
# GFX10: v_mac_legacy_f32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x0c]
|
|
0x6b,0x04,0x0a,0x0c
|
|
|
|
# GFX10: v_mac_legacy_f32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x0c]
|
|
0x6a,0x04,0x0a,0x0c
|
|
|
|
# GFX10: v_madak_f32 v255, v1, v2, 0x11213141 ; encoding: [0x01,0x05,0xfe,0x43,0x41,0x31,0x21,0x11]
|
|
0x01,0x05,0xfe,0x43,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_madak_f32 v5, -1, v2, 0x11213141 ; encoding: [0xc1,0x04,0x0a,0x42,0x41,0x31,0x21,0x11]
|
|
0xc1,0x04,0x0a,0x42,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_madak_f32 v5, -4.0, v2, 0x11213141 ; encoding: [0xf7,0x04,0x0a,0x42,0x41,0x31,0x21,0x11]
|
|
0xf7,0x04,0x0a,0x42,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_madak_f32 v5, 0, v2, 0x11213141 ; encoding: [0x80,0x04,0x0a,0x42,0x41,0x31,0x21,0x11]
|
|
0x80,0x04,0x0a,0x42,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_madak_f32 v5, 0.5, v2, 0x11213141 ; encoding: [0xf0,0x04,0x0a,0x42,0x41,0x31,0x21,0x11]
|
|
0xf0,0x04,0x0a,0x42,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_madak_f32 v5, v1, v2, 0x11213141 ; encoding: [0x01,0x05,0x0a,0x42,0x41,0x31,0x21,0x11]
|
|
0x01,0x05,0x0a,0x42,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_madak_f32 v5, v1, v2, 0xa1b1c1d1 ; encoding: [0x01,0x05,0x0a,0x42,0xd1,0xc1,0xb1,0xa1]
|
|
0x01,0x05,0x0a,0x42,0xd1,0xc1,0xb1,0xa1
|
|
|
|
# GFX10: v_madak_f32 v5, v1, v255, 0x11213141 ; encoding: [0x01,0xff,0x0b,0x42,0x41,0x31,0x21,0x11]
|
|
0x01,0xff,0x0b,0x42,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_madak_f32 v5, v255, v2, 0x11213141 ; encoding: [0xff,0x05,0x0a,0x42,0x41,0x31,0x21,0x11]
|
|
0xff,0x05,0x0a,0x42,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_madmk_f32 v255, v1, 0x11213141, v3 ; encoding: [0x01,0x07,0xfe,0x41,0x41,0x31,0x21,0x11]
|
|
0x01,0x07,0xfe,0x41,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_madmk_f32 v5, -1, 0x11213141, v3 ; encoding: [0xc1,0x06,0x0a,0x40,0x41,0x31,0x21,0x11]
|
|
0xc1,0x06,0x0a,0x40,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_madmk_f32 v5, -4.0, 0x11213141, v3 ; encoding: [0xf7,0x06,0x0a,0x40,0x41,0x31,0x21,0x11]
|
|
0xf7,0x06,0x0a,0x40,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_madmk_f32 v5, 0, 0x11213141, v3 ; encoding: [0x80,0x06,0x0a,0x40,0x41,0x31,0x21,0x11]
|
|
0x80,0x06,0x0a,0x40,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_madmk_f32 v5, 0.5, 0x11213141, v3 ; encoding: [0xf0,0x06,0x0a,0x40,0x41,0x31,0x21,0x11]
|
|
0xf0,0x06,0x0a,0x40,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_madmk_f32 v5, v1, 0x11213141, v255 ; encoding: [0x01,0xff,0x0b,0x40,0x41,0x31,0x21,0x11]
|
|
0x01,0xff,0x0b,0x40,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_madmk_f32 v5, v1, 0x11213141, v3 ; encoding: [0x01,0x07,0x0a,0x40,0x41,0x31,0x21,0x11]
|
|
0x01,0x07,0x0a,0x40,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_madmk_f32 v5, v1, 0xa1b1c1d1, v3 ; encoding: [0x01,0x07,0x0a,0x40,0xd1,0xc1,0xb1,0xa1]
|
|
0x01,0x07,0x0a,0x40,0xd1,0xc1,0xb1,0xa1
|
|
|
|
# GFX10: v_madmk_f32 v5, v255, 0x11213141, v3 ; encoding: [0xff,0x07,0x0a,0x40,0x41,0x31,0x21,0x11]
|
|
0xff,0x07,0x0a,0x40,0x41,0x31,0x21,0x11
|
|
|
|
# GFX10: v_max_f16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x73]
|
|
0x01,0x05,0xfe,0x73
|
|
|
|
# GFX10: v_max_f16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x72]
|
|
0xc1,0x04,0x0a,0x72
|
|
|
|
# GFX10: v_max_f16_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x72]
|
|
0xf7,0x04,0x0a,0x72
|
|
|
|
# GFX10: v_max_f16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x72]
|
|
0x80,0x04,0x0a,0x72
|
|
|
|
# GFX10: v_max_f16_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x72]
|
|
0xf0,0x04,0x0a,0x72
|
|
|
|
# GFX10: v_max_f16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x72,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x72,0x56,0x34,0x00,0x00
|
|
|
|
# GFX10: v_max_f16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x72,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x72,0x0b,0xfe,0x00,0x00
|
|
|
|
# GFX10: v_max_f16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x72]
|
|
0x7f,0x04,0x0a,0x72
|
|
|
|
# GFX10: v_max_f16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x72]
|
|
0x7e,0x04,0x0a,0x72
|
|
|
|
# GFX10: v_max_f16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x72]
|
|
0x7c,0x04,0x0a,0x72
|
|
|
|
# GFX10: v_max_f16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x72]
|
|
0x01,0x04,0x0a,0x72
|
|
|
|
# GFX10: v_max_f16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x72]
|
|
0x65,0x04,0x0a,0x72
|
|
|
|
# GFX10: v_max_f16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x72]
|
|
0x01,0x05,0x0a,0x72
|
|
|
|
# GFX10: v_max_f16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x72]
|
|
0x01,0xff,0x0b,0x72
|
|
|
|
# GFX10: v_max_f16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x72]
|
|
0xff,0x05,0x0a,0x72
|
|
|
|
# GFX10: v_max_f16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x72]
|
|
0x6b,0x04,0x0a,0x72
|
|
|
|
# GFX10: v_max_f16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x72]
|
|
0x6a,0x04,0x0a,0x72
|
|
|
|
# GFX10: v_max_f32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x21]
|
|
0x01,0x05,0xfe,0x21
|
|
|
|
# GFX10: v_max_f32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x20]
|
|
0xc1,0x04,0x0a,0x20
|
|
|
|
# GFX10: v_max_f32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x20]
|
|
0xf7,0x04,0x0a,0x20
|
|
|
|
# GFX10: v_max_f32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x20]
|
|
0x80,0x04,0x0a,0x20
|
|
|
|
# GFX10: v_max_f32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x20]
|
|
0xf0,0x04,0x0a,0x20
|
|
|
|
# GFX10: v_max_f32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x20,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x20,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_max_f32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x20,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x20,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_max_f32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x20]
|
|
0x7f,0x04,0x0a,0x20
|
|
|
|
# GFX10: v_max_f32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x20]
|
|
0x7e,0x04,0x0a,0x20
|
|
|
|
# GFX10: v_max_f32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x20]
|
|
0x7c,0x04,0x0a,0x20
|
|
|
|
# GFX10: v_max_f32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x20]
|
|
0x01,0x04,0x0a,0x20
|
|
|
|
# GFX10: v_max_f32_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x20]
|
|
0x67,0x04,0x0a,0x20
|
|
|
|
# GFX10: v_max_f32_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x20]
|
|
0x77,0x04,0x0a,0x20
|
|
|
|
# GFX10: v_max_f32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x20]
|
|
0x01,0x05,0x0a,0x20
|
|
|
|
# GFX10: v_max_f32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x20]
|
|
0x01,0xff,0x0b,0x20
|
|
|
|
# GFX10: v_max_f32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x20]
|
|
0xff,0x05,0x0a,0x20
|
|
|
|
# GFX10: v_max_f32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x20]
|
|
0x6b,0x04,0x0a,0x20
|
|
|
|
# GFX10: v_max_f32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x20]
|
|
0x6a,0x04,0x0a,0x20
|
|
|
|
# GFX10: v_max_i32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x25]
|
|
0x01,0x05,0xfe,0x25
|
|
|
|
# GFX10: v_max_i32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x24]
|
|
0xc1,0x04,0x0a,0x24
|
|
|
|
# GFX10: v_max_i32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x24]
|
|
0xf7,0x04,0x0a,0x24
|
|
|
|
# GFX10: v_max_i32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x24]
|
|
0x80,0x04,0x0a,0x24
|
|
|
|
# GFX10: v_max_i32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x24]
|
|
0xf0,0x04,0x0a,0x24
|
|
|
|
# GFX10: v_max_i32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x24,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x24,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_max_i32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x24,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x24,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_max_i32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x24]
|
|
0x7f,0x04,0x0a,0x24
|
|
|
|
# GFX10: v_max_i32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x24]
|
|
0x7e,0x04,0x0a,0x24
|
|
|
|
# GFX10: v_max_i32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x24]
|
|
0x7c,0x04,0x0a,0x24
|
|
|
|
# GFX10: v_max_i32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x24]
|
|
0x01,0x04,0x0a,0x24
|
|
|
|
# GFX10: v_max_i32_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x24]
|
|
0x67,0x04,0x0a,0x24
|
|
|
|
# GFX10: v_max_i32_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x24]
|
|
0x77,0x04,0x0a,0x24
|
|
|
|
# GFX10: v_max_i32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x24]
|
|
0x01,0x05,0x0a,0x24
|
|
|
|
# GFX10: v_max_i32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x24]
|
|
0x01,0xff,0x0b,0x24
|
|
|
|
# GFX10: v_max_i32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x24]
|
|
0xff,0x05,0x0a,0x24
|
|
|
|
# GFX10: v_max_i32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x24]
|
|
0x6b,0x04,0x0a,0x24
|
|
|
|
# GFX10: v_max_i32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x24]
|
|
0x6a,0x04,0x0a,0x24
|
|
|
|
# GFX10: v_max_u32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x29]
|
|
0x01,0x05,0xfe,0x29
|
|
|
|
# GFX10: v_max_u32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x28]
|
|
0xc1,0x04,0x0a,0x28
|
|
|
|
# GFX10: v_max_u32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x28]
|
|
0xf7,0x04,0x0a,0x28
|
|
|
|
# GFX10: v_max_u32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x28]
|
|
0x80,0x04,0x0a,0x28
|
|
|
|
# GFX10: v_max_u32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x28]
|
|
0xf0,0x04,0x0a,0x28
|
|
|
|
# GFX10: v_max_u32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x28,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x28,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_max_u32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x28,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x28,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_max_u32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x28]
|
|
0x7f,0x04,0x0a,0x28
|
|
|
|
# GFX10: v_max_u32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x28]
|
|
0x7e,0x04,0x0a,0x28
|
|
|
|
# GFX10: v_max_u32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x28]
|
|
0x7c,0x04,0x0a,0x28
|
|
|
|
# GFX10: v_max_u32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x28]
|
|
0x01,0x04,0x0a,0x28
|
|
|
|
# GFX10: v_max_u32_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x28]
|
|
0x67,0x04,0x0a,0x28
|
|
|
|
# GFX10: v_max_u32_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x28]
|
|
0x77,0x04,0x0a,0x28
|
|
|
|
# GFX10: v_max_u32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x28]
|
|
0x01,0x05,0x0a,0x28
|
|
|
|
# GFX10: v_max_u32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x28]
|
|
0x01,0xff,0x0b,0x28
|
|
|
|
# GFX10: v_max_u32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x28]
|
|
0xff,0x05,0x0a,0x28
|
|
|
|
# GFX10: v_max_u32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x28]
|
|
0x6b,0x04,0x0a,0x28
|
|
|
|
# GFX10: v_max_u32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x28]
|
|
0x6a,0x04,0x0a,0x28
|
|
|
|
# GFX10: v_min_f16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x75]
|
|
0x01,0x05,0xfe,0x75
|
|
|
|
# GFX10: v_min_f16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x74]
|
|
0xc1,0x04,0x0a,0x74
|
|
|
|
# GFX10: v_min_f16_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x74]
|
|
0xf7,0x04,0x0a,0x74
|
|
|
|
# GFX10: v_min_f16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x74]
|
|
0x80,0x04,0x0a,0x74
|
|
|
|
# GFX10: v_min_f16_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x74]
|
|
0xf0,0x04,0x0a,0x74
|
|
|
|
# GFX10: v_min_f16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x74,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x74,0x56,0x34,0x00,0x00
|
|
|
|
# GFX10: v_min_f16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x74,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x74,0x0b,0xfe,0x00,0x00
|
|
|
|
# GFX10: v_min_f16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x74]
|
|
0x7f,0x04,0x0a,0x74
|
|
|
|
# GFX10: v_min_f16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x74]
|
|
0x7e,0x04,0x0a,0x74
|
|
|
|
# GFX10: v_min_f16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x74]
|
|
0x7c,0x04,0x0a,0x74
|
|
|
|
# GFX10: v_min_f16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x74]
|
|
0x01,0x04,0x0a,0x74
|
|
|
|
# GFX10: v_min_f16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x74]
|
|
0x65,0x04,0x0a,0x74
|
|
|
|
# GFX10: v_min_f16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x74]
|
|
0x01,0x05,0x0a,0x74
|
|
|
|
# GFX10: v_min_f16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x74]
|
|
0x01,0xff,0x0b,0x74
|
|
|
|
# GFX10: v_min_f16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x74]
|
|
0xff,0x05,0x0a,0x74
|
|
|
|
# GFX10: v_min_f16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x74]
|
|
0x6b,0x04,0x0a,0x74
|
|
|
|
# GFX10: v_min_f16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x74]
|
|
0x6a,0x04,0x0a,0x74
|
|
|
|
# GFX10: v_min_f32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x1f]
|
|
0x01,0x05,0xfe,0x1f
|
|
|
|
# GFX10: v_min_f32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x1e]
|
|
0xc1,0x04,0x0a,0x1e
|
|
|
|
# GFX10: v_min_f32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x1e]
|
|
0xf7,0x04,0x0a,0x1e
|
|
|
|
# GFX10: v_min_f32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x1e]
|
|
0x80,0x04,0x0a,0x1e
|
|
|
|
# GFX10: v_min_f32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x1e]
|
|
0xf0,0x04,0x0a,0x1e
|
|
|
|
# GFX10: v_min_f32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x1e,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x1e,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_min_f32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x1e,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x1e,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_min_f32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x1e]
|
|
0x7f,0x04,0x0a,0x1e
|
|
|
|
# GFX10: v_min_f32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x1e]
|
|
0x7e,0x04,0x0a,0x1e
|
|
|
|
# GFX10: v_min_f32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x1e]
|
|
0x7c,0x04,0x0a,0x1e
|
|
|
|
# GFX10: v_min_f32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x1e]
|
|
0x01,0x04,0x0a,0x1e
|
|
|
|
# GFX10: v_min_f32_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x1e]
|
|
0x67,0x04,0x0a,0x1e
|
|
|
|
# GFX10: v_min_f32_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x1e]
|
|
0x77,0x04,0x0a,0x1e
|
|
|
|
# GFX10: v_min_f32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x1e]
|
|
0x01,0x05,0x0a,0x1e
|
|
|
|
# GFX10: v_min_f32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x1e]
|
|
0x01,0xff,0x0b,0x1e
|
|
|
|
# GFX10: v_min_f32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x1e]
|
|
0xff,0x05,0x0a,0x1e
|
|
|
|
# GFX10: v_min_f32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x1e]
|
|
0x6b,0x04,0x0a,0x1e
|
|
|
|
# GFX10: v_min_f32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x1e]
|
|
0x6a,0x04,0x0a,0x1e
|
|
|
|
# GFX10: v_min_i32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x23]
|
|
0x01,0x05,0xfe,0x23
|
|
|
|
# GFX10: v_min_i32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x22]
|
|
0xc1,0x04,0x0a,0x22
|
|
|
|
# GFX10: v_min_i32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x22]
|
|
0xf7,0x04,0x0a,0x22
|
|
|
|
# GFX10: v_min_i32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x22]
|
|
0x80,0x04,0x0a,0x22
|
|
|
|
# GFX10: v_min_i32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x22]
|
|
0xf0,0x04,0x0a,0x22
|
|
|
|
# GFX10: v_min_i32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x22,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x22,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_min_i32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x22,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x22,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_min_i32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x22]
|
|
0x7f,0x04,0x0a,0x22
|
|
|
|
# GFX10: v_min_i32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x22]
|
|
0x7e,0x04,0x0a,0x22
|
|
|
|
# GFX10: v_min_i32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x22]
|
|
0x7c,0x04,0x0a,0x22
|
|
|
|
# GFX10: v_min_i32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x22]
|
|
0x01,0x04,0x0a,0x22
|
|
|
|
# GFX10: v_min_i32_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x22]
|
|
0x67,0x04,0x0a,0x22
|
|
|
|
# GFX10: v_min_i32_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x22]
|
|
0x77,0x04,0x0a,0x22
|
|
|
|
# GFX10: v_min_i32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x22]
|
|
0x01,0x05,0x0a,0x22
|
|
|
|
# GFX10: v_min_i32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x22]
|
|
0x01,0xff,0x0b,0x22
|
|
|
|
# GFX10: v_min_i32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x22]
|
|
0xff,0x05,0x0a,0x22
|
|
|
|
# GFX10: v_min_i32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x22]
|
|
0x6b,0x04,0x0a,0x22
|
|
|
|
# GFX10: v_min_i32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x22]
|
|
0x6a,0x04,0x0a,0x22
|
|
|
|
# GFX10: v_min_u32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x27]
|
|
0x01,0x05,0xfe,0x27
|
|
|
|
# GFX10: v_min_u32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x26]
|
|
0xc1,0x04,0x0a,0x26
|
|
|
|
# GFX10: v_min_u32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x26]
|
|
0xf7,0x04,0x0a,0x26
|
|
|
|
# GFX10: v_min_u32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x26]
|
|
0x80,0x04,0x0a,0x26
|
|
|
|
# GFX10: v_min_u32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x26]
|
|
0xf0,0x04,0x0a,0x26
|
|
|
|
# GFX10: v_min_u32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x26,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x26,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_min_u32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x26,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x26,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_min_u32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x26]
|
|
0x7f,0x04,0x0a,0x26
|
|
|
|
# GFX10: v_min_u32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x26]
|
|
0x7e,0x04,0x0a,0x26
|
|
|
|
# GFX10: v_min_u32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x26]
|
|
0x7c,0x04,0x0a,0x26
|
|
|
|
# GFX10: v_min_u32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x26]
|
|
0x01,0x04,0x0a,0x26
|
|
|
|
# GFX10: v_min_u32_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x26]
|
|
0x67,0x04,0x0a,0x26
|
|
|
|
# GFX10: v_min_u32_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x26]
|
|
0x77,0x04,0x0a,0x26
|
|
|
|
# GFX10: v_min_u32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x26]
|
|
0x01,0x05,0x0a,0x26
|
|
|
|
# GFX10: v_min_u32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x26]
|
|
0x01,0xff,0x0b,0x26
|
|
|
|
# GFX10: v_min_u32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x26]
|
|
0xff,0x05,0x0a,0x26
|
|
|
|
# GFX10: v_min_u32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x26]
|
|
0x6b,0x04,0x0a,0x26
|
|
|
|
# GFX10: v_min_u32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x26]
|
|
0x6a,0x04,0x0a,0x26
|
|
|
|
# GFX10: v_mul_f16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x6b]
|
|
0x01,0x05,0xfe,0x6b
|
|
|
|
# GFX10: v_mul_f16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x6a]
|
|
0xc1,0x04,0x0a,0x6a
|
|
|
|
# GFX10: v_mul_f16_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x6a]
|
|
0xf7,0x04,0x0a,0x6a
|
|
|
|
# GFX10: v_mul_f16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x6a]
|
|
0x80,0x04,0x0a,0x6a
|
|
|
|
# GFX10: v_mul_f16_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x6a]
|
|
0xf0,0x04,0x0a,0x6a
|
|
|
|
# GFX10: v_mul_f16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x6a,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x6a,0x56,0x34,0x00,0x00
|
|
|
|
# GFX10: v_mul_f16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x6a,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x6a,0x0b,0xfe,0x00,0x00
|
|
|
|
# GFX10: v_mul_f16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x6a]
|
|
0x7f,0x04,0x0a,0x6a
|
|
|
|
# GFX10: v_mul_f16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x6a]
|
|
0x7e,0x04,0x0a,0x6a
|
|
|
|
# GFX10: v_mul_f16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x6a]
|
|
0x7c,0x04,0x0a,0x6a
|
|
|
|
# GFX10: v_mul_f16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x6a]
|
|
0x01,0x04,0x0a,0x6a
|
|
|
|
# GFX10: v_mul_f16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x6a]
|
|
0x65,0x04,0x0a,0x6a
|
|
|
|
# GFX10: v_mul_f16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x6a]
|
|
0x01,0x05,0x0a,0x6a
|
|
|
|
# GFX10: v_mul_f16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x6a]
|
|
0x01,0xff,0x0b,0x6a
|
|
|
|
# GFX10: v_mul_f16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x6a]
|
|
0xff,0x05,0x0a,0x6a
|
|
|
|
# GFX10: v_mul_f16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x6a]
|
|
0x6b,0x04,0x0a,0x6a
|
|
|
|
# GFX10: v_mul_f16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x6a]
|
|
0x6a,0x04,0x0a,0x6a
|
|
|
|
# GFX10: v_mul_f32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x11]
|
|
0x01,0x05,0xfe,0x11
|
|
|
|
# GFX10: v_mul_f32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x10]
|
|
0xc1,0x04,0x0a,0x10
|
|
|
|
# GFX10: v_mul_f32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x10]
|
|
0xf7,0x04,0x0a,0x10
|
|
|
|
# GFX10: v_mul_f32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x10]
|
|
0x80,0x04,0x0a,0x10
|
|
|
|
# GFX10: v_mul_f32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x10]
|
|
0xf0,0x04,0x0a,0x10
|
|
|
|
# GFX10: v_mul_f32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x10,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x10,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_mul_f32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x10,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x10,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_mul_f32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x10]
|
|
0x7f,0x04,0x0a,0x10
|
|
|
|
# GFX10: v_mul_f32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x10]
|
|
0x7e,0x04,0x0a,0x10
|
|
|
|
# GFX10: v_mul_f32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x10]
|
|
0x7c,0x04,0x0a,0x10
|
|
|
|
# GFX10: v_mul_f32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x10]
|
|
0x01,0x04,0x0a,0x10
|
|
|
|
# GFX10: v_mul_f32_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x10]
|
|
0x67,0x04,0x0a,0x10
|
|
|
|
# GFX10: v_mul_f32_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x10]
|
|
0x77,0x04,0x0a,0x10
|
|
|
|
# GFX10: v_mul_f32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x10]
|
|
0x01,0x05,0x0a,0x10
|
|
|
|
# GFX10: v_mul_f32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x10]
|
|
0x01,0xff,0x0b,0x10
|
|
|
|
# GFX10: v_mul_f32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x10]
|
|
0xff,0x05,0x0a,0x10
|
|
|
|
# GFX10: v_mul_f32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x10]
|
|
0x6b,0x04,0x0a,0x10
|
|
|
|
# GFX10: v_mul_f32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x10]
|
|
0x6a,0x04,0x0a,0x10
|
|
|
|
# GFX10: v_mul_hi_i32_i24_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x15]
|
|
0x01,0x05,0xfe,0x15
|
|
|
|
# GFX10: v_mul_hi_i32_i24_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x14]
|
|
0xc1,0x04,0x0a,0x14
|
|
|
|
# GFX10: v_mul_hi_i32_i24_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x14]
|
|
0xf7,0x04,0x0a,0x14
|
|
|
|
# GFX10: v_mul_hi_i32_i24_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x14]
|
|
0x80,0x04,0x0a,0x14
|
|
|
|
# GFX10: v_mul_hi_i32_i24_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x14]
|
|
0xf0,0x04,0x0a,0x14
|
|
|
|
# GFX10: v_mul_hi_i32_i24_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x14,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x14,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_mul_hi_i32_i24_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x14,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x14,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_mul_hi_i32_i24_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x14]
|
|
0x7f,0x04,0x0a,0x14
|
|
|
|
# GFX10: v_mul_hi_i32_i24_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x14]
|
|
0x7e,0x04,0x0a,0x14
|
|
|
|
# GFX10: v_mul_hi_i32_i24_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x14]
|
|
0x7c,0x04,0x0a,0x14
|
|
|
|
# GFX10: v_mul_hi_i32_i24_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x14]
|
|
0x01,0x04,0x0a,0x14
|
|
|
|
# GFX10: v_mul_hi_i32_i24_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x14]
|
|
0x67,0x04,0x0a,0x14
|
|
|
|
# GFX10: v_mul_hi_i32_i24_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x14]
|
|
0x77,0x04,0x0a,0x14
|
|
|
|
# GFX10: v_mul_hi_i32_i24_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x14]
|
|
0x01,0x05,0x0a,0x14
|
|
|
|
# GFX10: v_mul_hi_i32_i24_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x14]
|
|
0x01,0xff,0x0b,0x14
|
|
|
|
# GFX10: v_mul_hi_i32_i24_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x14]
|
|
0xff,0x05,0x0a,0x14
|
|
|
|
# GFX10: v_mul_hi_i32_i24_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x14]
|
|
0x6b,0x04,0x0a,0x14
|
|
|
|
# GFX10: v_mul_hi_i32_i24_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x14]
|
|
0x6a,0x04,0x0a,0x14
|
|
|
|
# GFX10: v_mul_hi_u32_u24_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x19]
|
|
0x01,0x05,0xfe,0x19
|
|
|
|
# GFX10: v_mul_hi_u32_u24_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x18]
|
|
0xc1,0x04,0x0a,0x18
|
|
|
|
# GFX10: v_mul_hi_u32_u24_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x18]
|
|
0xf7,0x04,0x0a,0x18
|
|
|
|
# GFX10: v_mul_hi_u32_u24_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x18]
|
|
0x80,0x04,0x0a,0x18
|
|
|
|
# GFX10: v_mul_hi_u32_u24_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x18]
|
|
0xf0,0x04,0x0a,0x18
|
|
|
|
# GFX10: v_mul_hi_u32_u24_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x18,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x18,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_mul_hi_u32_u24_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x18,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x18,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_mul_hi_u32_u24_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x18]
|
|
0x7f,0x04,0x0a,0x18
|
|
|
|
# GFX10: v_mul_hi_u32_u24_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x18]
|
|
0x7e,0x04,0x0a,0x18
|
|
|
|
# GFX10: v_mul_hi_u32_u24_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x18]
|
|
0x7c,0x04,0x0a,0x18
|
|
|
|
# GFX10: v_mul_hi_u32_u24_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x18]
|
|
0x01,0x04,0x0a,0x18
|
|
|
|
# GFX10: v_mul_hi_u32_u24_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x18]
|
|
0x67,0x04,0x0a,0x18
|
|
|
|
# GFX10: v_mul_hi_u32_u24_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x18]
|
|
0x77,0x04,0x0a,0x18
|
|
|
|
# GFX10: v_mul_hi_u32_u24_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x18]
|
|
0x01,0x05,0x0a,0x18
|
|
|
|
# GFX10: v_mul_hi_u32_u24_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x18]
|
|
0x01,0xff,0x0b,0x18
|
|
|
|
# GFX10: v_mul_hi_u32_u24_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x18]
|
|
0xff,0x05,0x0a,0x18
|
|
|
|
# GFX10: v_mul_hi_u32_u24_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x18]
|
|
0x6b,0x04,0x0a,0x18
|
|
|
|
# GFX10: v_mul_hi_u32_u24_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x18]
|
|
0x6a,0x04,0x0a,0x18
|
|
|
|
# GFX10: v_mul_i32_i24_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x13]
|
|
0x01,0x05,0xfe,0x13
|
|
|
|
# GFX10: v_mul_i32_i24_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x12]
|
|
0xc1,0x04,0x0a,0x12
|
|
|
|
# GFX10: v_mul_i32_i24_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x12]
|
|
0xf7,0x04,0x0a,0x12
|
|
|
|
# GFX10: v_mul_i32_i24_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x12]
|
|
0x80,0x04,0x0a,0x12
|
|
|
|
# GFX10: v_mul_i32_i24_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x12]
|
|
0xf0,0x04,0x0a,0x12
|
|
|
|
# GFX10: v_mul_i32_i24_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x12,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x12,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_mul_i32_i24_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x12,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x12,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_mul_i32_i24_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x12]
|
|
0x7f,0x04,0x0a,0x12
|
|
|
|
# GFX10: v_mul_i32_i24_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x12]
|
|
0x7e,0x04,0x0a,0x12
|
|
|
|
# GFX10: v_mul_i32_i24_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x12]
|
|
0x7c,0x04,0x0a,0x12
|
|
|
|
# GFX10: v_mul_i32_i24_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x12]
|
|
0x01,0x04,0x0a,0x12
|
|
|
|
# GFX10: v_mul_i32_i24_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x12]
|
|
0x67,0x04,0x0a,0x12
|
|
|
|
# GFX10: v_mul_i32_i24_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x12]
|
|
0x77,0x04,0x0a,0x12
|
|
|
|
# GFX10: v_mul_i32_i24_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x12]
|
|
0x01,0x05,0x0a,0x12
|
|
|
|
# GFX10: v_mul_i32_i24_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x12]
|
|
0x01,0xff,0x0b,0x12
|
|
|
|
# GFX10: v_mul_i32_i24_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x12]
|
|
0xff,0x05,0x0a,0x12
|
|
|
|
# GFX10: v_mul_i32_i24_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x12]
|
|
0x6b,0x04,0x0a,0x12
|
|
|
|
# GFX10: v_mul_i32_i24_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x12]
|
|
0x6a,0x04,0x0a,0x12
|
|
|
|
# GFX10: v_mul_legacy_f32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x0f]
|
|
0x01,0x05,0xfe,0x0f
|
|
|
|
# GFX10: v_mul_legacy_f32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x0e]
|
|
0xc1,0x04,0x0a,0x0e
|
|
|
|
# GFX10: v_mul_legacy_f32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x0e]
|
|
0xf7,0x04,0x0a,0x0e
|
|
|
|
# GFX10: v_mul_legacy_f32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x0e]
|
|
0x80,0x04,0x0a,0x0e
|
|
|
|
# GFX10: v_mul_legacy_f32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x0e]
|
|
0xf0,0x04,0x0a,0x0e
|
|
|
|
# GFX10: v_mul_legacy_f32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x0e,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x0e,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_mul_legacy_f32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x0e,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x0e,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_mul_legacy_f32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x0e]
|
|
0x7f,0x04,0x0a,0x0e
|
|
|
|
# GFX10: v_mul_legacy_f32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x0e]
|
|
0x7e,0x04,0x0a,0x0e
|
|
|
|
# GFX10: v_mul_legacy_f32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x0e]
|
|
0x7c,0x04,0x0a,0x0e
|
|
|
|
# GFX10: v_mul_legacy_f32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x0e]
|
|
0x01,0x04,0x0a,0x0e
|
|
|
|
# GFX10: v_mul_legacy_f32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x0e]
|
|
0x65,0x04,0x0a,0x0e
|
|
|
|
# GFX10: v_mul_legacy_f32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x0e]
|
|
0x01,0x05,0x0a,0x0e
|
|
|
|
# GFX10: v_mul_legacy_f32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x0e]
|
|
0x01,0xff,0x0b,0x0e
|
|
|
|
# GFX10: v_mul_legacy_f32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x0e]
|
|
0xff,0x05,0x0a,0x0e
|
|
|
|
# GFX10: v_mul_legacy_f32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x0e]
|
|
0x6b,0x04,0x0a,0x0e
|
|
|
|
# GFX10: v_mul_legacy_f32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x0e]
|
|
0x6a,0x04,0x0a,0x0e
|
|
|
|
# GFX10: v_mul_u32_u24_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x17]
|
|
0x01,0x05,0xfe,0x17
|
|
|
|
# GFX10: v_mul_u32_u24_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x16]
|
|
0xc1,0x04,0x0a,0x16
|
|
|
|
# GFX10: v_mul_u32_u24_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x16]
|
|
0xf7,0x04,0x0a,0x16
|
|
|
|
# GFX10: v_mul_u32_u24_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x16]
|
|
0x80,0x04,0x0a,0x16
|
|
|
|
# GFX10: v_mul_u32_u24_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x16]
|
|
0xf0,0x04,0x0a,0x16
|
|
|
|
# GFX10: v_mul_u32_u24_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x16,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x16,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_mul_u32_u24_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x16,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x16,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_mul_u32_u24_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x16]
|
|
0x7f,0x04,0x0a,0x16
|
|
|
|
# GFX10: v_mul_u32_u24_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x16]
|
|
0x7e,0x04,0x0a,0x16
|
|
|
|
# GFX10: v_mul_u32_u24_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x16]
|
|
0x7c,0x04,0x0a,0x16
|
|
|
|
# GFX10: v_mul_u32_u24_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x16]
|
|
0x01,0x04,0x0a,0x16
|
|
|
|
# GFX10: v_mul_u32_u24_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x16]
|
|
0x67,0x04,0x0a,0x16
|
|
|
|
# GFX10: v_mul_u32_u24_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x16]
|
|
0x77,0x04,0x0a,0x16
|
|
|
|
# GFX10: v_mul_u32_u24_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x16]
|
|
0x01,0x05,0x0a,0x16
|
|
|
|
# GFX10: v_mul_u32_u24_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x16]
|
|
0x01,0xff,0x0b,0x16
|
|
|
|
# GFX10: v_mul_u32_u24_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x16]
|
|
0xff,0x05,0x0a,0x16
|
|
|
|
# GFX10: v_mul_u32_u24_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x16]
|
|
0x6b,0x04,0x0a,0x16
|
|
|
|
# GFX10: v_mul_u32_u24_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x16]
|
|
0x6a,0x04,0x0a,0x16
|
|
|
|
# GFX10: v_or_b32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x39]
|
|
0x01,0x05,0xfe,0x39
|
|
|
|
# GFX10: v_or_b32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x38]
|
|
0xc1,0x04,0x0a,0x38
|
|
|
|
# GFX10: v_or_b32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x38]
|
|
0xf7,0x04,0x0a,0x38
|
|
|
|
# GFX10: v_or_b32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x38]
|
|
0x80,0x04,0x0a,0x38
|
|
|
|
# GFX10: v_or_b32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x38]
|
|
0xf0,0x04,0x0a,0x38
|
|
|
|
# GFX10: v_or_b32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x38,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x38,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_or_b32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x38,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x38,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_or_b32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x38]
|
|
0x7f,0x04,0x0a,0x38
|
|
|
|
# GFX10: v_or_b32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x38]
|
|
0x7e,0x04,0x0a,0x38
|
|
|
|
# GFX10: v_or_b32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x38]
|
|
0x7c,0x04,0x0a,0x38
|
|
|
|
# GFX10: v_or_b32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x38]
|
|
0x01,0x04,0x0a,0x38
|
|
|
|
# GFX10: v_or_b32_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x38]
|
|
0x67,0x04,0x0a,0x38
|
|
|
|
# GFX10: v_or_b32_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x38]
|
|
0x77,0x04,0x0a,0x38
|
|
|
|
# GFX10: v_or_b32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x38]
|
|
0x01,0x05,0x0a,0x38
|
|
|
|
# GFX10: v_or_b32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x38]
|
|
0x01,0xff,0x0b,0x38
|
|
|
|
# GFX10: v_or_b32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x38]
|
|
0xff,0x05,0x0a,0x38
|
|
|
|
# GFX10: v_or_b32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x38]
|
|
0x6b,0x04,0x0a,0x38
|
|
|
|
# GFX10: v_or_b32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x38]
|
|
0x6a,0x04,0x0a,0x38
|
|
|
|
# GFX10: v_pk_fmac_f16 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x79]
|
|
0x01,0x05,0xfe,0x79
|
|
|
|
# GFX10: v_pk_fmac_f16 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x78]
|
|
0xc1,0x04,0x0a,0x78
|
|
|
|
# GFX10: v_pk_fmac_f16 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x78]
|
|
0xf7,0x04,0x0a,0x78
|
|
|
|
# GFX10: v_pk_fmac_f16 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x78]
|
|
0x80,0x04,0x0a,0x78
|
|
|
|
# GFX10: v_pk_fmac_f16 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x78]
|
|
0xf0,0x04,0x0a,0x78
|
|
|
|
# GFX10: v_pk_fmac_f16 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x78]
|
|
0x7f,0x04,0x0a,0x78
|
|
|
|
# GFX10: v_pk_fmac_f16 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x78]
|
|
0x7e,0x04,0x0a,0x78
|
|
|
|
# GFX10: v_pk_fmac_f16 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x78]
|
|
0x7c,0x04,0x0a,0x78
|
|
|
|
# GFX10: v_pk_fmac_f16 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x78]
|
|
0x01,0x04,0x0a,0x78
|
|
|
|
# GFX10: v_pk_fmac_f16 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x78]
|
|
0x67,0x04,0x0a,0x78
|
|
|
|
# GFX10: v_pk_fmac_f16 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x78]
|
|
0x77,0x04,0x0a,0x78
|
|
|
|
# GFX10: v_pk_fmac_f16 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x78]
|
|
0x01,0x05,0x0a,0x78
|
|
|
|
# GFX10: v_pk_fmac_f16 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x78]
|
|
0x01,0xff,0x0b,0x78
|
|
|
|
# GFX10: v_pk_fmac_f16 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x78]
|
|
0xff,0x05,0x0a,0x78
|
|
|
|
# GFX10: v_pk_fmac_f16 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x78]
|
|
0x6b,0x04,0x0a,0x78
|
|
|
|
# GFX10: v_pk_fmac_f16 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x78]
|
|
0x6a,0x04,0x0a,0x78
|
|
|
|
# W32: v_sub_co_ci_u32_e32 v255, vcc_lo, v1, v2, vcc_lo ; encoding: [0x01,0x05,0xfe,0x53]
|
|
# W64: v_sub_co_ci_u32_e32 v255, vcc, v1, v2, vcc ; encoding: [0x01,0x05,0xfe,0x53]
|
|
0x01,0x05,0xfe,0x53
|
|
|
|
# W32: v_sub_co_ci_u32_e32 v5, vcc_lo, -1, v2, vcc_lo ; encoding: [0xc1,0x04,0x0a,0x52]
|
|
# W64: v_sub_co_ci_u32_e32 v5, vcc, -1, v2, vcc ; encoding: [0xc1,0x04,0x0a,0x52]
|
|
0xc1,0x04,0x0a,0x52
|
|
|
|
# W32: v_sub_co_ci_u32_e32 v5, vcc_lo, -4.0, v2, vcc_lo ; encoding: [0xf7,0x04,0x0a,0x52]
|
|
# W64: v_sub_co_ci_u32_e32 v5, vcc, -4.0, v2, vcc ; encoding: [0xf7,0x04,0x0a,0x52]
|
|
0xf7,0x04,0x0a,0x52
|
|
|
|
# W32: v_sub_co_ci_u32_e32 v5, vcc_lo, 0, v2, vcc_lo ; encoding: [0x80,0x04,0x0a,0x52]
|
|
# W64: v_sub_co_ci_u32_e32 v5, vcc, 0, v2, vcc ; encoding: [0x80,0x04,0x0a,0x52]
|
|
0x80,0x04,0x0a,0x52
|
|
|
|
# W32: v_sub_co_ci_u32_e32 v5, vcc_lo, 0.5, v2, vcc_lo ; encoding: [0xf0,0x04,0x0a,0x52]
|
|
# W64: v_sub_co_ci_u32_e32 v5, vcc, 0.5, v2, vcc ; encoding: [0xf0,0x04,0x0a,0x52]
|
|
0xf0,0x04,0x0a,0x52
|
|
|
|
# W32: v_sub_co_ci_u32_e32 v5, vcc_lo, v1, v2, vcc_lo ; encoding: [0x01,0x05,0x0a,0x52]
|
|
# W64: v_sub_co_ci_u32_e32 v5, vcc, v1, v2, vcc ; encoding: [0x01,0x05,0x0a,0x52]
|
|
0x01,0x05,0x0a,0x52
|
|
|
|
# W32: v_sub_co_ci_u32_e32 v5, vcc_lo, v1, v255, vcc_lo ; encoding: [0x01,0xff,0x0b,0x52]
|
|
# W64: v_sub_co_ci_u32_e32 v5, vcc, v1, v255, vcc ; encoding: [0x01,0xff,0x0b,0x52]
|
|
0x01,0xff,0x0b,0x52
|
|
|
|
# W32: v_sub_co_ci_u32_e32 v5, vcc_lo, v255, v2, vcc_lo ; encoding: [0xff,0x05,0x0a,0x52]
|
|
# W64: v_sub_co_ci_u32_e32 v5, vcc, v255, v2, vcc ; encoding: [0xff,0x05,0x0a,0x52]
|
|
0xff,0x05,0x0a,0x52
|
|
|
|
# GFX10: v_sub_f16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x67]
|
|
0x01,0x05,0xfe,0x67
|
|
|
|
# GFX10: v_sub_f16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x66]
|
|
0xc1,0x04,0x0a,0x66
|
|
|
|
# GFX10: v_sub_f16_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x66]
|
|
0xf7,0x04,0x0a,0x66
|
|
|
|
# GFX10: v_sub_f16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x66]
|
|
0x80,0x04,0x0a,0x66
|
|
|
|
# GFX10: v_sub_f16_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x66]
|
|
0xf0,0x04,0x0a,0x66
|
|
|
|
# GFX10: v_sub_f16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x66,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x66,0x56,0x34,0x00,0x00
|
|
|
|
# GFX10: v_sub_f16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x66,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x66,0x0b,0xfe,0x00,0x00
|
|
|
|
# GFX10: v_sub_f16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x66]
|
|
0x7f,0x04,0x0a,0x66
|
|
|
|
# GFX10: v_sub_f16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x66]
|
|
0x7e,0x04,0x0a,0x66
|
|
|
|
# GFX10: v_sub_f16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x66]
|
|
0x7c,0x04,0x0a,0x66
|
|
|
|
# GFX10: v_sub_f16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x66]
|
|
0x01,0x04,0x0a,0x66
|
|
|
|
# GFX10: v_sub_f16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x66]
|
|
0x65,0x04,0x0a,0x66
|
|
|
|
# GFX10: v_sub_f16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x66]
|
|
0x01,0x05,0x0a,0x66
|
|
|
|
# GFX10: v_sub_f16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x66]
|
|
0x01,0xff,0x0b,0x66
|
|
|
|
# GFX10: v_sub_f16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x66]
|
|
0xff,0x05,0x0a,0x66
|
|
|
|
# GFX10: v_sub_f16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x66]
|
|
0x6b,0x04,0x0a,0x66
|
|
|
|
# GFX10: v_sub_f16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x66]
|
|
0x6a,0x04,0x0a,0x66
|
|
|
|
# GFX10: v_sub_f32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x09]
|
|
0x01,0x05,0xfe,0x09
|
|
|
|
# GFX10: v_sub_f32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x08]
|
|
0xc1,0x04,0x0a,0x08
|
|
|
|
# GFX10: v_sub_f32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x08]
|
|
0xf7,0x04,0x0a,0x08
|
|
|
|
# GFX10: v_sub_f32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x08]
|
|
0x80,0x04,0x0a,0x08
|
|
|
|
# GFX10: v_sub_f32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x08]
|
|
0xf0,0x04,0x0a,0x08
|
|
|
|
# GFX10: v_sub_f32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x08,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x08,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_sub_f32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x08,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x08,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_sub_f32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x08]
|
|
0x7f,0x04,0x0a,0x08
|
|
|
|
# GFX10: v_sub_f32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x08]
|
|
0x7e,0x04,0x0a,0x08
|
|
|
|
# GFX10: v_sub_f32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x08]
|
|
0x7c,0x04,0x0a,0x08
|
|
|
|
# GFX10: v_sub_f32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x08]
|
|
0x01,0x04,0x0a,0x08
|
|
|
|
# GFX10: v_sub_f32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x08]
|
|
0x65,0x04,0x0a,0x08
|
|
|
|
# GFX10: v_sub_f32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x08]
|
|
0x01,0x05,0x0a,0x08
|
|
|
|
# GFX10: v_sub_f32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x08]
|
|
0x01,0xff,0x0b,0x08
|
|
|
|
# GFX10: v_sub_f32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x08]
|
|
0xff,0x05,0x0a,0x08
|
|
|
|
# GFX10: v_sub_f32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x08]
|
|
0x6b,0x04,0x0a,0x08
|
|
|
|
# GFX10: v_sub_f32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x08]
|
|
0x6a,0x04,0x0a,0x08
|
|
|
|
# GFX10: v_sub_nc_u32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x4d]
|
|
0x01,0x05,0xfe,0x4d
|
|
|
|
# GFX10: v_sub_nc_u32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x4c]
|
|
0xc1,0x04,0x0a,0x4c
|
|
|
|
# GFX10: v_sub_nc_u32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x4c]
|
|
0xf7,0x04,0x0a,0x4c
|
|
|
|
# GFX10: v_sub_nc_u32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x4c]
|
|
0x80,0x04,0x0a,0x4c
|
|
|
|
# GFX10: v_sub_nc_u32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x4c]
|
|
0xf0,0x04,0x0a,0x4c
|
|
|
|
# GFX10: v_sub_nc_u32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x4c,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x4c,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_sub_nc_u32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x4c,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x4c,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_sub_nc_u32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x4c]
|
|
0x7f,0x04,0x0a,0x4c
|
|
|
|
# GFX10: v_sub_nc_u32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x4c]
|
|
0x7e,0x04,0x0a,0x4c
|
|
|
|
# GFX10: v_sub_nc_u32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x4c]
|
|
0x7c,0x04,0x0a,0x4c
|
|
|
|
# GFX10: v_sub_nc_u32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x4c]
|
|
0x01,0x04,0x0a,0x4c
|
|
|
|
# GFX10: v_sub_nc_u32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x4c]
|
|
0x65,0x04,0x0a,0x4c
|
|
|
|
# GFX10: v_sub_nc_u32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x4c]
|
|
0x01,0x05,0x0a,0x4c
|
|
|
|
# GFX10: v_sub_nc_u32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x4c]
|
|
0x01,0xff,0x0b,0x4c
|
|
|
|
# GFX10: v_sub_nc_u32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x4c]
|
|
0xff,0x05,0x0a,0x4c
|
|
|
|
# GFX10: v_sub_nc_u32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x4c]
|
|
0x6b,0x04,0x0a,0x4c
|
|
|
|
# GFX10: v_sub_nc_u32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x4c]
|
|
0x6a,0x04,0x0a,0x4c
|
|
|
|
# W32: v_subrev_co_ci_u32_e32 v255, vcc_lo, v1, v2, vcc_lo ; encoding: [0x01,0x05,0xfe,0x55]
|
|
# W64: v_subrev_co_ci_u32_e32 v255, vcc, v1, v2, vcc ; encoding: [0x01,0x05,0xfe,0x55]
|
|
0x01,0x05,0xfe,0x55
|
|
|
|
# W32: v_subrev_co_ci_u32_e32 v5, vcc_lo, -1, v2, vcc_lo ; encoding: [0xc1,0x04,0x0a,0x54]
|
|
# W64: v_subrev_co_ci_u32_e32 v5, vcc, -1, v2, vcc ; encoding: [0xc1,0x04,0x0a,0x54]
|
|
0xc1,0x04,0x0a,0x54
|
|
|
|
# W32: v_subrev_co_ci_u32_e32 v5, vcc_lo, -4.0, v2, vcc_lo ; encoding: [0xf7,0x04,0x0a,0x54]
|
|
# W64: v_subrev_co_ci_u32_e32 v5, vcc, -4.0, v2, vcc ; encoding: [0xf7,0x04,0x0a,0x54]
|
|
0xf7,0x04,0x0a,0x54
|
|
|
|
# W32: v_subrev_co_ci_u32_e32 v5, vcc_lo, 0, v2, vcc_lo ; encoding: [0x80,0x04,0x0a,0x54]
|
|
# W64: v_subrev_co_ci_u32_e32 v5, vcc, 0, v2, vcc ; encoding: [0x80,0x04,0x0a,0x54]
|
|
0x80,0x04,0x0a,0x54
|
|
|
|
# W32: v_subrev_co_ci_u32_e32 v5, vcc_lo, 0.5, v2, vcc_lo ; encoding: [0xf0,0x04,0x0a,0x54]
|
|
# W64: v_subrev_co_ci_u32_e32 v5, vcc, 0.5, v2, vcc ; encoding: [0xf0,0x04,0x0a,0x54]
|
|
0xf0,0x04,0x0a,0x54
|
|
|
|
# W32: v_subrev_co_ci_u32_e32 v5, vcc_lo, v1, v2, vcc_lo ; encoding: [0x01,0x05,0x0a,0x54]
|
|
# W64: v_subrev_co_ci_u32_e32 v5, vcc, v1, v2, vcc ; encoding: [0x01,0x05,0x0a,0x54]
|
|
0x01,0x05,0x0a,0x54
|
|
|
|
# W32: v_subrev_co_ci_u32_e32 v5, vcc_lo, v1, v255, vcc_lo ; encoding: [0x01,0xff,0x0b,0x54]
|
|
# W64: v_subrev_co_ci_u32_e32 v5, vcc, v1, v255, vcc ; encoding: [0x01,0xff,0x0b,0x54]
|
|
0x01,0xff,0x0b,0x54
|
|
|
|
# W32: v_subrev_co_ci_u32_e32 v5, vcc_lo, v255, v2, vcc_lo ; encoding: [0xff,0x05,0x0a,0x54]
|
|
# W64: v_subrev_co_ci_u32_e32 v5, vcc, v255, v2, vcc ; encoding: [0xff,0x05,0x0a,0x54]
|
|
0xff,0x05,0x0a,0x54
|
|
|
|
# GFX10: v_subrev_f16_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x69]
|
|
0x01,0x05,0xfe,0x69
|
|
|
|
# GFX10: v_subrev_f16_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x68]
|
|
0xc1,0x04,0x0a,0x68
|
|
|
|
# GFX10: v_subrev_f16_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x68]
|
|
0xf7,0x04,0x0a,0x68
|
|
|
|
# GFX10: v_subrev_f16_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x68]
|
|
0x80,0x04,0x0a,0x68
|
|
|
|
# GFX10: v_subrev_f16_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x68]
|
|
0xf0,0x04,0x0a,0x68
|
|
|
|
# GFX10: v_subrev_f16_e32 v5, 0x3456, v2 ; encoding: [0xff,0x04,0x0a,0x68,0x56,0x34,0x00,0x00]
|
|
0xff,0x04,0x0a,0x68,0x56,0x34,0x00,0x00
|
|
|
|
# GFX10: v_subrev_f16_e32 v5, 0xfe0b, v2 ; encoding: [0xff,0x04,0x0a,0x68,0x0b,0xfe,0x00,0x00]
|
|
0xff,0x04,0x0a,0x68,0x0b,0xfe,0x00,0x00
|
|
|
|
# GFX10: v_subrev_f16_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x68]
|
|
0x7f,0x04,0x0a,0x68
|
|
|
|
# GFX10: v_subrev_f16_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x68]
|
|
0x7e,0x04,0x0a,0x68
|
|
|
|
# GFX10: v_subrev_f16_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x68]
|
|
0x7c,0x04,0x0a,0x68
|
|
|
|
# GFX10: v_subrev_f16_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x68]
|
|
0x01,0x04,0x0a,0x68
|
|
|
|
# GFX10: v_subrev_f16_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x68]
|
|
0x65,0x04,0x0a,0x68
|
|
|
|
# GFX10: v_subrev_f16_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x68]
|
|
0x01,0x05,0x0a,0x68
|
|
|
|
# GFX10: v_subrev_f16_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x68]
|
|
0x01,0xff,0x0b,0x68
|
|
|
|
# GFX10: v_subrev_f16_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x68]
|
|
0xff,0x05,0x0a,0x68
|
|
|
|
# GFX10: v_subrev_f16_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x68]
|
|
0x6b,0x04,0x0a,0x68
|
|
|
|
# GFX10: v_subrev_f16_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x68]
|
|
0x6a,0x04,0x0a,0x68
|
|
|
|
# GFX10: v_subrev_f32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x0b]
|
|
0x01,0x05,0xfe,0x0b
|
|
|
|
# GFX10: v_subrev_f32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x0a]
|
|
0xc1,0x04,0x0a,0x0a
|
|
|
|
# GFX10: v_subrev_f32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x0a]
|
|
0xf7,0x04,0x0a,0x0a
|
|
|
|
# GFX10: v_subrev_f32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x0a]
|
|
0x80,0x04,0x0a,0x0a
|
|
|
|
# GFX10: v_subrev_f32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x0a]
|
|
0xf0,0x04,0x0a,0x0a
|
|
|
|
# GFX10: v_subrev_f32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x0a,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x0a,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_subrev_f32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x0a,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x0a,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_subrev_f32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x0a]
|
|
0x7f,0x04,0x0a,0x0a
|
|
|
|
# GFX10: v_subrev_f32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x0a]
|
|
0x7e,0x04,0x0a,0x0a
|
|
|
|
# GFX10: v_subrev_f32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x0a]
|
|
0x7c,0x04,0x0a,0x0a
|
|
|
|
# GFX10: v_subrev_f32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x0a]
|
|
0x01,0x04,0x0a,0x0a
|
|
|
|
# GFX10: v_subrev_f32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x0a]
|
|
0x65,0x04,0x0a,0x0a
|
|
|
|
# GFX10: v_subrev_f32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x0a]
|
|
0x01,0x05,0x0a,0x0a
|
|
|
|
# GFX10: v_subrev_f32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x0a]
|
|
0x01,0xff,0x0b,0x0a
|
|
|
|
# GFX10: v_subrev_f32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x0a]
|
|
0xff,0x05,0x0a,0x0a
|
|
|
|
# GFX10: v_subrev_f32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x0a]
|
|
0x6b,0x04,0x0a,0x0a
|
|
|
|
# GFX10: v_subrev_f32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x0a]
|
|
0x6a,0x04,0x0a,0x0a
|
|
|
|
# GFX10: v_subrev_nc_u32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x4f]
|
|
0x01,0x05,0xfe,0x4f
|
|
|
|
# GFX10: v_subrev_nc_u32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x4e]
|
|
0xc1,0x04,0x0a,0x4e
|
|
|
|
# GFX10: v_subrev_nc_u32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x4e]
|
|
0xf7,0x04,0x0a,0x4e
|
|
|
|
# GFX10: v_subrev_nc_u32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x4e]
|
|
0x80,0x04,0x0a,0x4e
|
|
|
|
# GFX10: v_subrev_nc_u32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x4e]
|
|
0xf0,0x04,0x0a,0x4e
|
|
|
|
# GFX10: v_subrev_nc_u32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x4e,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x4e,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_subrev_nc_u32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x4e,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x4e,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_subrev_nc_u32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x4e]
|
|
0x7f,0x04,0x0a,0x4e
|
|
|
|
# GFX10: v_subrev_nc_u32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x4e]
|
|
0x7e,0x04,0x0a,0x4e
|
|
|
|
# GFX10: v_subrev_nc_u32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x4e]
|
|
0x7c,0x04,0x0a,0x4e
|
|
|
|
# GFX10: v_subrev_nc_u32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x4e]
|
|
0x01,0x04,0x0a,0x4e
|
|
|
|
# GFX10: v_subrev_nc_u32_e32 v5, s101, v2 ; encoding: [0x65,0x04,0x0a,0x4e]
|
|
0x65,0x04,0x0a,0x4e
|
|
|
|
# GFX10: v_subrev_nc_u32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x4e]
|
|
0x01,0x05,0x0a,0x4e
|
|
|
|
# GFX10: v_subrev_nc_u32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x4e]
|
|
0x01,0xff,0x0b,0x4e
|
|
|
|
# GFX10: v_subrev_nc_u32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x4e]
|
|
0xff,0x05,0x0a,0x4e
|
|
|
|
# GFX10: v_subrev_nc_u32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x4e]
|
|
0x6b,0x04,0x0a,0x4e
|
|
|
|
# GFX10: v_subrev_nc_u32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x4e]
|
|
0x6a,0x04,0x0a,0x4e
|
|
|
|
# GFX10: v_xnor_b32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x3d]
|
|
0x01,0x05,0xfe,0x3d
|
|
|
|
# GFX10: v_xnor_b32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x3c]
|
|
0xc1,0x04,0x0a,0x3c
|
|
|
|
# GFX10: v_xnor_b32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x3c]
|
|
0xf7,0x04,0x0a,0x3c
|
|
|
|
# GFX10: v_xnor_b32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x3c]
|
|
0x80,0x04,0x0a,0x3c
|
|
|
|
# GFX10: v_xnor_b32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x3c]
|
|
0xf0,0x04,0x0a,0x3c
|
|
|
|
# GFX10: v_xnor_b32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x3c,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x3c,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_xnor_b32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x3c,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x3c,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_xnor_b32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x3c]
|
|
0x7f,0x04,0x0a,0x3c
|
|
|
|
# GFX10: v_xnor_b32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x3c]
|
|
0x7e,0x04,0x0a,0x3c
|
|
|
|
# GFX10: v_xnor_b32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x3c]
|
|
0x7c,0x04,0x0a,0x3c
|
|
|
|
# GFX10: v_xnor_b32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x3c]
|
|
0x01,0x04,0x0a,0x3c
|
|
|
|
# GFX10: v_xnor_b32_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x3c]
|
|
0x67,0x04,0x0a,0x3c
|
|
|
|
# GFX10: v_xnor_b32_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x3c]
|
|
0x77,0x04,0x0a,0x3c
|
|
|
|
# GFX10: v_xnor_b32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x3c]
|
|
0x01,0x05,0x0a,0x3c
|
|
|
|
# GFX10: v_xnor_b32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x3c]
|
|
0x01,0xff,0x0b,0x3c
|
|
|
|
# GFX10: v_xnor_b32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x3c]
|
|
0xff,0x05,0x0a,0x3c
|
|
|
|
# GFX10: v_xnor_b32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x3c]
|
|
0x6b,0x04,0x0a,0x3c
|
|
|
|
# GFX10: v_xnor_b32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x3c]
|
|
0x6a,0x04,0x0a,0x3c
|
|
|
|
# GFX10: v_xor_b32_e32 v255, v1, v2 ; encoding: [0x01,0x05,0xfe,0x3b]
|
|
0x01,0x05,0xfe,0x3b
|
|
|
|
# GFX10: v_xor_b32_e32 v5, -1, v2 ; encoding: [0xc1,0x04,0x0a,0x3a]
|
|
0xc1,0x04,0x0a,0x3a
|
|
|
|
# GFX10: v_xor_b32_e32 v5, -4.0, v2 ; encoding: [0xf7,0x04,0x0a,0x3a]
|
|
0xf7,0x04,0x0a,0x3a
|
|
|
|
# GFX10: v_xor_b32_e32 v5, 0, v2 ; encoding: [0x80,0x04,0x0a,0x3a]
|
|
0x80,0x04,0x0a,0x3a
|
|
|
|
# GFX10: v_xor_b32_e32 v5, 0.5, v2 ; encoding: [0xf0,0x04,0x0a,0x3a]
|
|
0xf0,0x04,0x0a,0x3a
|
|
|
|
# GFX10: v_xor_b32_e32 v5, 0x3f717273, v2 ; encoding: [0xff,0x04,0x0a,0x3a,0x73,0x72,0x71,0x3f]
|
|
0xff,0x04,0x0a,0x3a,0x73,0x72,0x71,0x3f
|
|
|
|
# GFX10: v_xor_b32_e32 v5, 0xaf123456, v2 ; encoding: [0xff,0x04,0x0a,0x3a,0x56,0x34,0x12,0xaf]
|
|
0xff,0x04,0x0a,0x3a,0x56,0x34,0x12,0xaf
|
|
|
|
# GFX10: v_xor_b32_e32 v5, exec_hi, v2 ; encoding: [0x7f,0x04,0x0a,0x3a]
|
|
0x7f,0x04,0x0a,0x3a
|
|
|
|
# GFX10: v_xor_b32_e32 v5, exec_lo, v2 ; encoding: [0x7e,0x04,0x0a,0x3a]
|
|
0x7e,0x04,0x0a,0x3a
|
|
|
|
# GFX10: v_xor_b32_e32 v5, m0, v2 ; encoding: [0x7c,0x04,0x0a,0x3a]
|
|
0x7c,0x04,0x0a,0x3a
|
|
|
|
# GFX10: v_xor_b32_e32 v5, s1, v2 ; encoding: [0x01,0x04,0x0a,0x3a]
|
|
0x01,0x04,0x0a,0x3a
|
|
|
|
# GFX10: v_xor_b32_e32 v5, s103, v2 ; encoding: [0x67,0x04,0x0a,0x3a]
|
|
0x67,0x04,0x0a,0x3a
|
|
|
|
# GFX10: v_xor_b32_e32 v5, ttmp11, v2 ; encoding: [0x77,0x04,0x0a,0x3a]
|
|
0x77,0x04,0x0a,0x3a
|
|
|
|
# GFX10: v_xor_b32_e32 v5, v1, v2 ; encoding: [0x01,0x05,0x0a,0x3a]
|
|
0x01,0x05,0x0a,0x3a
|
|
|
|
# GFX10: v_xor_b32_e32 v5, v1, v255 ; encoding: [0x01,0xff,0x0b,0x3a]
|
|
0x01,0xff,0x0b,0x3a
|
|
|
|
# GFX10: v_xor_b32_e32 v5, v255, v2 ; encoding: [0xff,0x05,0x0a,0x3a]
|
|
0xff,0x05,0x0a,0x3a
|
|
|
|
# GFX10: v_xor_b32_e32 v5, vcc_hi, v2 ; encoding: [0x6b,0x04,0x0a,0x3a]
|
|
0x6b,0x04,0x0a,0x3a
|
|
|
|
# GFX10: v_xor_b32_e32 v5, vcc_lo, v2 ; encoding: [0x6a,0x04,0x0a,0x3a]
|
|
0x6a,0x04,0x0a,0x3a
|