1039 lines
33 KiB
LLVM
1039 lines
33 KiB
LLVM
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
|
|
; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+egpr | FileCheck %s --check-prefix=APX
|
|
|
|
define void @test_1024(ptr %a, ptr %b, ptr %out) nounwind {
|
|
; APX-LABEL: test_1024:
|
|
; APX: # %bb.0:
|
|
; APX-NEXT: pushq %rbp
|
|
; APX-NEXT: pushq %r15
|
|
; APX-NEXT: pushq %r14
|
|
; APX-NEXT: pushq %r13
|
|
; APX-NEXT: pushq %r12
|
|
; APX-NEXT: pushq %rbx
|
|
; APX-NEXT: subq $104, %rsp
|
|
; APX-NEXT: movq %rdx, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: movq %rdi, %r24
|
|
; APX-NEXT: movq (%rdi), %r13
|
|
; APX-NEXT: movq 8(%rdi), %r18
|
|
; APX-NEXT: movq 24(%rdi), %r29
|
|
; APX-NEXT: movq 16(%rdi), %r17
|
|
; APX-NEXT: movq 40(%rdi), %rdi
|
|
; APX-NEXT: movq 32(%r24), %r10
|
|
; APX-NEXT: movq 56(%r24), %r15
|
|
; APX-NEXT: movq 48(%r24), %r12
|
|
; APX-NEXT: movq %rsi, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: movq 24(%rsi), %r23
|
|
; APX-NEXT: movq 16(%rsi), %r11
|
|
; APX-NEXT: movq (%rsi), %r27
|
|
; APX-NEXT: movq 8(%rsi), %r14
|
|
; APX-NEXT: movq %r12, %rax
|
|
; APX-NEXT: mulq %r27
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r19
|
|
; APX-NEXT: movq %r15, %rax
|
|
; APX-NEXT: mulq %r27
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %r16
|
|
; APX-NEXT: addq %r8, %r16
|
|
; APX-NEXT: adcq $0, %r9
|
|
; APX-NEXT: movq %r12, %rax
|
|
; APX-NEXT: mulq %r14
|
|
; APX-NEXT: movq %rdx, %r20
|
|
; APX-NEXT: movq %rax, %r8
|
|
; APX-NEXT: addq %r16, %r8
|
|
; APX-NEXT: adcq %r9, %r20
|
|
; APX-NEXT: setb %al
|
|
; APX-NEXT: movzbl %al, %ecx
|
|
; APX-NEXT: movq %r15, %rax
|
|
; APX-NEXT: mulq %r14
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %r16
|
|
; APX-NEXT: addq %r20, %r16
|
|
; APX-NEXT: adcq %rcx, %r9
|
|
; APX-NEXT: movq %r10, %rax
|
|
; APX-NEXT: mulq %r27
|
|
; APX-NEXT: movq %rdx, %r20
|
|
; APX-NEXT: movq %rax, %r25
|
|
; APX-NEXT: movq %rdi, %rax
|
|
; APX-NEXT: mulq %r27
|
|
; APX-NEXT: movq %rdx, %r21
|
|
; APX-NEXT: movq %rax, %r22
|
|
; APX-NEXT: addq %r20, %r22
|
|
; APX-NEXT: adcq $0, %r21
|
|
; APX-NEXT: movq %r10, %rax
|
|
; APX-NEXT: mulq %r14
|
|
; APX-NEXT: movq %rdx, %r20
|
|
; APX-NEXT: movq %rax, %r28
|
|
; APX-NEXT: addq %r22, %r28
|
|
; APX-NEXT: adcq %r21, %r20
|
|
; APX-NEXT: setb %al
|
|
; APX-NEXT: movzbl %al, %ecx
|
|
; APX-NEXT: movq %rdi, %rax
|
|
; APX-NEXT: mulq %r14
|
|
; APX-NEXT: movq %rdx, %r21
|
|
; APX-NEXT: movq %rax, %r22
|
|
; APX-NEXT: addq %r20, %r22
|
|
; APX-NEXT: adcq %rcx, %r21
|
|
; APX-NEXT: addq %r19, %r22
|
|
; APX-NEXT: adcq %r8, %r21
|
|
; APX-NEXT: adcq $0, %r16
|
|
; APX-NEXT: adcq $0, %r9
|
|
; APX-NEXT: movq %r10, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: movq %r10, %rax
|
|
; APX-NEXT: mulq %r11
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r30
|
|
; APX-NEXT: movq %rdi, %rax
|
|
; APX-NEXT: movq %rdi, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: mulq %r11
|
|
; APX-NEXT: movq %rdx, %r19
|
|
; APX-NEXT: movq %rax, %r20
|
|
; APX-NEXT: addq %r8, %r20
|
|
; APX-NEXT: adcq $0, %r19
|
|
; APX-NEXT: movq %r10, %rax
|
|
; APX-NEXT: mulq %r23
|
|
; APX-NEXT: movq %rdx, %rbx
|
|
; APX-NEXT: movq %rax, %r31
|
|
; APX-NEXT: addq %r20, %r31
|
|
; APX-NEXT: adcq %r19, %rbx
|
|
; APX-NEXT: setb %al
|
|
; APX-NEXT: movzbl %al, %ecx
|
|
; APX-NEXT: movq %rdi, %rax
|
|
; APX-NEXT: mulq %r23
|
|
; APX-NEXT: movq %rdx, %r26
|
|
; APX-NEXT: movq %rax, %r8
|
|
; APX-NEXT: addq %rbx, %r8
|
|
; APX-NEXT: adcq %rcx, %r26
|
|
; APX-NEXT: addq %r22, %r30
|
|
; APX-NEXT: adcq %r21, %r31
|
|
; APX-NEXT: adcq $0, %r8
|
|
; APX-NEXT: adcq $0, %r26
|
|
; APX-NEXT: addq %r16, %r8
|
|
; APX-NEXT: adcq %r9, %r26
|
|
; APX-NEXT: setb %al
|
|
; APX-NEXT: movzbl %al, %ecx
|
|
; APX-NEXT: movq %r12, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: movq %r12, %rax
|
|
; APX-NEXT: mulq %r11
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %rsi
|
|
; APX-NEXT: movq %r15, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: movq %r15, %rax
|
|
; APX-NEXT: mulq %r11
|
|
; APX-NEXT: movq %rdx, %r16
|
|
; APX-NEXT: movq %rax, %r21
|
|
; APX-NEXT: addq %r9, %r21
|
|
; APX-NEXT: adcq $0, %r16
|
|
; APX-NEXT: movq %r12, %rax
|
|
; APX-NEXT: mulq %r23
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %rdi
|
|
; APX-NEXT: addq %r21, %rdi
|
|
; APX-NEXT: adcq %r16, %r9
|
|
; APX-NEXT: setb %al
|
|
; APX-NEXT: movzbl %al, %r10d
|
|
; APX-NEXT: movq %r15, %rax
|
|
; APX-NEXT: mulq %r23
|
|
; APX-NEXT: movq %rdx, %r21
|
|
; APX-NEXT: movq %rax, %r22
|
|
; APX-NEXT: addq %r9, %r22
|
|
; APX-NEXT: adcq %r10, %r21
|
|
; APX-NEXT: addq %r8, %rsi
|
|
; APX-NEXT: movq %rsi, %r19
|
|
; APX-NEXT: adcq %r26, %rdi
|
|
; APX-NEXT: adcq %rcx, %r22
|
|
; APX-NEXT: adcq $0, %r21
|
|
; APX-NEXT: movq %r17, %rax
|
|
; APX-NEXT: mulq %r27
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %rbx
|
|
; APX-NEXT: movq %r29, %rax
|
|
; APX-NEXT: mulq %r27
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %r16
|
|
; APX-NEXT: addq %r8, %r16
|
|
; APX-NEXT: adcq $0, %r9
|
|
; APX-NEXT: movq %r17, %rax
|
|
; APX-NEXT: mulq %r14
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r26
|
|
; APX-NEXT: addq %r16, %r26
|
|
; APX-NEXT: adcq %r9, %r8
|
|
; APX-NEXT: setb %al
|
|
; APX-NEXT: movzbl %al, %ecx
|
|
; APX-NEXT: movq %r29, %rax
|
|
; APX-NEXT: mulq %r14
|
|
; APX-NEXT: movq %r14, %rsi
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %r16
|
|
; APX-NEXT: addq %r8, %r16
|
|
; APX-NEXT: adcq %rcx, %r9
|
|
; APX-NEXT: movq %r13, %rax
|
|
; APX-NEXT: mulq %r27
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: movq %r18, %rax
|
|
; APX-NEXT: mulq %r27
|
|
; APX-NEXT: movq %rdx, %r14
|
|
; APX-NEXT: movq %rax, %r15
|
|
; APX-NEXT: addq %r8, %r15
|
|
; APX-NEXT: adcq $0, %r14
|
|
; APX-NEXT: movq %r13, %rax
|
|
; APX-NEXT: movq %rsi, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: mulq %rsi
|
|
; APX-NEXT: movq %rdx, %r12
|
|
; APX-NEXT: addq %r15, %rax
|
|
; APX-NEXT: movq %rax, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: adcq %r14, %r12
|
|
; APX-NEXT: setb %cl
|
|
; APX-NEXT: movq %r18, %rax
|
|
; APX-NEXT: mulq %rsi
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r15
|
|
; APX-NEXT: addq %r12, %r15
|
|
; APX-NEXT: movzbl %cl, %eax
|
|
; APX-NEXT: adcq %rax, %r8
|
|
; APX-NEXT: addq %rbx, %r15
|
|
; APX-NEXT: adcq %r26, %r8
|
|
; APX-NEXT: adcq $0, %r16
|
|
; APX-NEXT: adcq $0, %r9
|
|
; APX-NEXT: movq %r13, %rax
|
|
; APX-NEXT: mulq %r11
|
|
; APX-NEXT: movq %rdx, %r26
|
|
; APX-NEXT: movq %rax, %rsi
|
|
; APX-NEXT: movq %r18, %rax
|
|
; APX-NEXT: mulq %r11
|
|
; APX-NEXT: movq %rdx, %rbx
|
|
; APX-NEXT: movq %rax, %r14
|
|
; APX-NEXT: addq %r26, %r14
|
|
; APX-NEXT: adcq $0, %rbx
|
|
; APX-NEXT: movq %r13, %rax
|
|
; APX-NEXT: mulq %r23
|
|
; APX-NEXT: movq %rdx, %r12
|
|
; APX-NEXT: addq %r14, %rax
|
|
; APX-NEXT: movq %rax, %r10
|
|
; APX-NEXT: adcq %rbx, %r12
|
|
; APX-NEXT: setb %cl
|
|
; APX-NEXT: movq %r18, %rax
|
|
; APX-NEXT: mulq %r23
|
|
; APX-NEXT: movq %rdx, %r14
|
|
; APX-NEXT: movq %rax, %r26
|
|
; APX-NEXT: addq %r12, %r26
|
|
; APX-NEXT: movzbl %cl, %eax
|
|
; APX-NEXT: adcq %rax, %r14
|
|
; APX-NEXT: addq %r15, %rsi
|
|
; APX-NEXT: movq %rsi, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: adcq %r8, %r10
|
|
; APX-NEXT: movq %r10, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: adcq $0, %r26
|
|
; APX-NEXT: adcq $0, %r14
|
|
; APX-NEXT: addq %r16, %r26
|
|
; APX-NEXT: adcq %r9, %r14
|
|
; APX-NEXT: setb %cl
|
|
; APX-NEXT: movq %r17, %rax
|
|
; APX-NEXT: movq %r11, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: mulq %r11
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %rbx
|
|
; APX-NEXT: movq %r29, %rax
|
|
; APX-NEXT: mulq %r11
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %r16
|
|
; APX-NEXT: addq %r8, %r16
|
|
; APX-NEXT: adcq $0, %r9
|
|
; APX-NEXT: movq %r17, %rax
|
|
; APX-NEXT: mulq %r23
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r15
|
|
; APX-NEXT: addq %r16, %r15
|
|
; APX-NEXT: adcq %r9, %r8
|
|
; APX-NEXT: setb %r9b
|
|
; APX-NEXT: movq %r29, %rax
|
|
; APX-NEXT: mulq %r23
|
|
; APX-NEXT: movq %rdx, %r12
|
|
; APX-NEXT: movq %rax, %rbp
|
|
; APX-NEXT: addq %r8, %rbp
|
|
; APX-NEXT: movzbl %r9b, %eax
|
|
; APX-NEXT: adcq %rax, %r12
|
|
; APX-NEXT: addq %r26, %rbx
|
|
; APX-NEXT: adcq %r14, %r15
|
|
; APX-NEXT: movzbl %cl, %eax
|
|
; APX-NEXT: adcq %rax, %rbp
|
|
; APX-NEXT: adcq $0, %r12
|
|
; APX-NEXT: addq %r25, %rbx
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rsi # 8-byte Reload
|
|
; APX-NEXT: movq 32(%rsi), %r25
|
|
; APX-NEXT: adcq %r28, %r15
|
|
; APX-NEXT: adcq %r30, %rbp
|
|
; APX-NEXT: adcq %r31, %r12
|
|
; APX-NEXT: adcq $0, %r19
|
|
; APX-NEXT: movq %r19, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: adcq $0, %rdi
|
|
; APX-NEXT: adcq $0, %r22
|
|
; APX-NEXT: adcq $0, %r21
|
|
; APX-NEXT: movq %r17, %rax
|
|
; APX-NEXT: mulq %r25
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r28
|
|
; APX-NEXT: movq %r29, %rax
|
|
; APX-NEXT: mulq %r25
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %r16
|
|
; APX-NEXT: addq %r8, %r16
|
|
; APX-NEXT: adcq $0, %r9
|
|
; APX-NEXT: movq 40(%rsi), %rcx
|
|
; APX-NEXT: movq %r17, %rax
|
|
; APX-NEXT: mulq %rcx
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r26
|
|
; APX-NEXT: addq %r16, %r26
|
|
; APX-NEXT: adcq %r9, %r8
|
|
; APX-NEXT: setb %r10b
|
|
; APX-NEXT: movq %r29, %rax
|
|
; APX-NEXT: mulq %rcx
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %r16
|
|
; APX-NEXT: addq %r8, %r16
|
|
; APX-NEXT: movzbl %r10b, %eax
|
|
; APX-NEXT: adcq %rax, %r9
|
|
; APX-NEXT: movq %r13, %rax
|
|
; APX-NEXT: mulq %r25
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r19
|
|
; APX-NEXT: movq %r18, %rax
|
|
; APX-NEXT: mulq %r25
|
|
; APX-NEXT: movq %rdx, %r30
|
|
; APX-NEXT: movq %rax, %r31
|
|
; APX-NEXT: addq %r8, %r31
|
|
; APX-NEXT: adcq $0, %r30
|
|
; APX-NEXT: movq %r13, %rax
|
|
; APX-NEXT: mulq %rcx
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r20
|
|
; APX-NEXT: addq %r31, %r20
|
|
; APX-NEXT: adcq %r30, %r8
|
|
; APX-NEXT: setb %r10b
|
|
; APX-NEXT: movq %r18, %rax
|
|
; APX-NEXT: mulq %rcx
|
|
; APX-NEXT: movq %rdx, %r30
|
|
; APX-NEXT: movq %rax, %r31
|
|
; APX-NEXT: addq %r8, %r31
|
|
; APX-NEXT: movzbl %r10b, %eax
|
|
; APX-NEXT: adcq %rax, %r30
|
|
; APX-NEXT: addq %r28, %r31
|
|
; APX-NEXT: adcq %r26, %r30
|
|
; APX-NEXT: adcq $0, %r16
|
|
; APX-NEXT: adcq $0, %r9
|
|
; APX-NEXT: movq 48(%rsi), %r28
|
|
; APX-NEXT: movq %r13, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: movq %r13, %rax
|
|
; APX-NEXT: mulq %r28
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r11
|
|
; APX-NEXT: movq %r18, %rax
|
|
; APX-NEXT: movq %r18, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: mulq %r28
|
|
; APX-NEXT: movq %rdx, %r26
|
|
; APX-NEXT: movq %rax, %r14
|
|
; APX-NEXT: addq %r8, %r14
|
|
; APX-NEXT: adcq $0, %r26
|
|
; APX-NEXT: movq 56(%rsi), %r10
|
|
; APX-NEXT: movq %r13, %rax
|
|
; APX-NEXT: mulq %r10
|
|
; APX-NEXT: movq %rdx, %r13
|
|
; APX-NEXT: addq %r14, %rax
|
|
; APX-NEXT: movq %rax, %r14
|
|
; APX-NEXT: adcq %r26, %r13
|
|
; APX-NEXT: setb %sil
|
|
; APX-NEXT: movq %r18, %rax
|
|
; APX-NEXT: mulq %r10
|
|
; APX-NEXT: movq %rdx, %r26
|
|
; APX-NEXT: movq %rax, %r8
|
|
; APX-NEXT: addq %r13, %r8
|
|
; APX-NEXT: movzbl %sil, %eax
|
|
; APX-NEXT: adcq %rax, %r26
|
|
; APX-NEXT: addq %r31, %r11
|
|
; APX-NEXT: adcq %r30, %r14
|
|
; APX-NEXT: adcq $0, %r8
|
|
; APX-NEXT: adcq $0, %r26
|
|
; APX-NEXT: addq %r16, %r8
|
|
; APX-NEXT: adcq %r9, %r26
|
|
; APX-NEXT: setb %r18b
|
|
; APX-NEXT: movq %r17, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: movq %r17, %rax
|
|
; APX-NEXT: mulq %r28
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %r30
|
|
; APX-NEXT: movq %r29, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: movq %r29, %rax
|
|
; APX-NEXT: mulq %r28
|
|
; APX-NEXT: movq %rdx, %r16
|
|
; APX-NEXT: movq %rax, %r31
|
|
; APX-NEXT: addq %r9, %r31
|
|
; APX-NEXT: adcq $0, %r16
|
|
; APX-NEXT: movq %r17, %rax
|
|
; APX-NEXT: mulq %r10
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %r17
|
|
; APX-NEXT: addq %r31, %r17
|
|
; APX-NEXT: adcq %r16, %r9
|
|
; APX-NEXT: setb %r16b
|
|
; APX-NEXT: movq %r29, %rax
|
|
; APX-NEXT: mulq %r10
|
|
; APX-NEXT: movq %rdx, %r13
|
|
; APX-NEXT: movq %rax, %r31
|
|
; APX-NEXT: addq %r9, %r31
|
|
; APX-NEXT: movzbl %r16b, %eax
|
|
; APX-NEXT: adcq %rax, %r13
|
|
; APX-NEXT: addq %r8, %r30
|
|
; APX-NEXT: adcq %r26, %r17
|
|
; APX-NEXT: movzbl %r18b, %eax
|
|
; APX-NEXT: adcq %rax, %r31
|
|
; APX-NEXT: adcq $0, %r13
|
|
; APX-NEXT: addq %rbx, %r19
|
|
; APX-NEXT: movq %r19, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: adcq %r15, %r20
|
|
; APX-NEXT: movq %r20, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: adcq %rbp, %r11
|
|
; APX-NEXT: movq %r11, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: adcq %r12, %r14
|
|
; APX-NEXT: movq %r14, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: adcq $0, %r30
|
|
; APX-NEXT: adcq $0, %r17
|
|
; APX-NEXT: adcq $0, %r31
|
|
; APX-NEXT: adcq $0, %r13
|
|
; APX-NEXT: addq {{[-0-9]+}}(%r{{[sb]}}p), %r30 # 8-byte Folded Reload
|
|
; APX-NEXT: adcq %rdi, %r17
|
|
; APX-NEXT: adcq %r22, %r31
|
|
; APX-NEXT: adcq %r21, %r13
|
|
; APX-NEXT: setb %r15b
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rsi # 8-byte Reload
|
|
; APX-NEXT: movq %rsi, %rax
|
|
; APX-NEXT: mulq %r25
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r19
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %r21 # 8-byte Reload
|
|
; APX-NEXT: movq %r21, %rax
|
|
; APX-NEXT: mulq %r25
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %r16
|
|
; APX-NEXT: addq %r8, %r16
|
|
; APX-NEXT: adcq $0, %r9
|
|
; APX-NEXT: movq %rsi, %rax
|
|
; APX-NEXT: movq %rsi, %r29
|
|
; APX-NEXT: mulq %rcx
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r20
|
|
; APX-NEXT: addq %r16, %r20
|
|
; APX-NEXT: adcq %r9, %r8
|
|
; APX-NEXT: setb %r18b
|
|
; APX-NEXT: movq %r21, %rax
|
|
; APX-NEXT: movq %r21, %r14
|
|
; APX-NEXT: mulq %rcx
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %r16
|
|
; APX-NEXT: addq %r8, %r16
|
|
; APX-NEXT: movzbl %r18b, %eax
|
|
; APX-NEXT: adcq %rax, %r9
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rbx # 8-byte Reload
|
|
; APX-NEXT: movq %rbx, %rax
|
|
; APX-NEXT: mulq %r25
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %rdi
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rsi # 8-byte Reload
|
|
; APX-NEXT: movq %rsi, %rax
|
|
; APX-NEXT: mulq %r25
|
|
; APX-NEXT: movq %rdx, %r21
|
|
; APX-NEXT: movq %rax, %r22
|
|
; APX-NEXT: addq %r8, %r22
|
|
; APX-NEXT: adcq $0, %r21
|
|
; APX-NEXT: movq %rbx, %rax
|
|
; APX-NEXT: mulq %rcx
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: addq %r22, %rax
|
|
; APX-NEXT: movq %rax, %r11
|
|
; APX-NEXT: adcq %r21, %r8
|
|
; APX-NEXT: setb %r18b
|
|
; APX-NEXT: movq %rsi, %rax
|
|
; APX-NEXT: movq %rsi, %r21
|
|
; APX-NEXT: mulq %rcx
|
|
; APX-NEXT: movq %rdx, %r22
|
|
; APX-NEXT: movq %rax, %r26
|
|
; APX-NEXT: addq %r8, %r26
|
|
; APX-NEXT: movzbl %r18b, %eax
|
|
; APX-NEXT: adcq %rax, %r22
|
|
; APX-NEXT: addq %r19, %r26
|
|
; APX-NEXT: adcq %r20, %r22
|
|
; APX-NEXT: adcq $0, %r16
|
|
; APX-NEXT: adcq $0, %r9
|
|
; APX-NEXT: movq %rbx, %rax
|
|
; APX-NEXT: mulq %r28
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %rsi
|
|
; APX-NEXT: movq %r21, %rax
|
|
; APX-NEXT: mulq %r28
|
|
; APX-NEXT: movq %rdx, %r19
|
|
; APX-NEXT: movq %rax, %r20
|
|
; APX-NEXT: addq %r8, %r20
|
|
; APX-NEXT: adcq $0, %r19
|
|
; APX-NEXT: movq %rbx, %rax
|
|
; APX-NEXT: mulq %r10
|
|
; APX-NEXT: movq %rdx, %rbx
|
|
; APX-NEXT: addq %r20, %rax
|
|
; APX-NEXT: movq %rax, %r20
|
|
; APX-NEXT: adcq %r19, %rbx
|
|
; APX-NEXT: setb %r18b
|
|
; APX-NEXT: movq %r21, %rax
|
|
; APX-NEXT: mulq %r10
|
|
; APX-NEXT: movq %rdx, %r21
|
|
; APX-NEXT: movq %rax, %r8
|
|
; APX-NEXT: addq %rbx, %r8
|
|
; APX-NEXT: movzbl %r18b, %eax
|
|
; APX-NEXT: adcq %rax, %r21
|
|
; APX-NEXT: addq %r26, %rsi
|
|
; APX-NEXT: adcq %r22, %r20
|
|
; APX-NEXT: adcq $0, %r8
|
|
; APX-NEXT: adcq $0, %r21
|
|
; APX-NEXT: addq %r16, %r8
|
|
; APX-NEXT: adcq %r9, %r21
|
|
; APX-NEXT: setb %r18b
|
|
; APX-NEXT: movq %r29, %rax
|
|
; APX-NEXT: mulq %r28
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %r22
|
|
; APX-NEXT: movq %r14, %rax
|
|
; APX-NEXT: mulq %r28
|
|
; APX-NEXT: movq %rdx, %r16
|
|
; APX-NEXT: movq %rax, %r19
|
|
; APX-NEXT: addq %r9, %r19
|
|
; APX-NEXT: adcq $0, %r16
|
|
; APX-NEXT: movq %r29, %rax
|
|
; APX-NEXT: mulq %r10
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: addq %r19, %rax
|
|
; APX-NEXT: movq %rax, %r19
|
|
; APX-NEXT: adcq %r16, %r9
|
|
; APX-NEXT: setb %r16b
|
|
; APX-NEXT: movq %r14, %rax
|
|
; APX-NEXT: mulq %r10
|
|
; APX-NEXT: movq %rdx, %rbp
|
|
; APX-NEXT: movq %rax, %r12
|
|
; APX-NEXT: addq %r9, %r12
|
|
; APX-NEXT: movzbl %r16b, %eax
|
|
; APX-NEXT: adcq %rax, %rbp
|
|
; APX-NEXT: addq %r8, %r22
|
|
; APX-NEXT: adcq %r21, %r19
|
|
; APX-NEXT: movzbl %r18b, %eax
|
|
; APX-NEXT: adcq %rax, %r12
|
|
; APX-NEXT: adcq $0, %rbp
|
|
; APX-NEXT: addq %r30, %rdi
|
|
; APX-NEXT: movq %rdi, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: adcq %r17, %r11
|
|
; APX-NEXT: movq %r11, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: adcq %r31, %rsi
|
|
; APX-NEXT: movq %rsi, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: adcq %r13, %r20
|
|
; APX-NEXT: movq %r20, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: movzbl %r15b, %eax
|
|
; APX-NEXT: adcq %rax, %r22
|
|
; APX-NEXT: movq %r22, (%rsp) # 8-byte Spill
|
|
; APX-NEXT: adcq $0, %r19
|
|
; APX-NEXT: movq %r19, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: adcq $0, %r12
|
|
; APX-NEXT: adcq $0, %rbp
|
|
; APX-NEXT: movq 64(%r24), %r21
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rdi # 8-byte Reload
|
|
; APX-NEXT: movq %rdi, %rax
|
|
; APX-NEXT: mulq %r21
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r22
|
|
; APX-NEXT: movq %r23, %rax
|
|
; APX-NEXT: mulq %r21
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %r16
|
|
; APX-NEXT: addq %r8, %r16
|
|
; APX-NEXT: adcq $0, %r9
|
|
; APX-NEXT: movq 72(%r24), %r30
|
|
; APX-NEXT: movq %rdi, %rax
|
|
; APX-NEXT: mulq %r30
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r26
|
|
; APX-NEXT: addq %r16, %r26
|
|
; APX-NEXT: adcq %r9, %r8
|
|
; APX-NEXT: setb %r18b
|
|
; APX-NEXT: movq %r23, %rax
|
|
; APX-NEXT: mulq %r30
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %r16
|
|
; APX-NEXT: addq %r8, %r16
|
|
; APX-NEXT: movzbl %r18b, %eax
|
|
; APX-NEXT: adcq %rax, %r9
|
|
; APX-NEXT: movq %r27, %rax
|
|
; APX-NEXT: mulq %r21
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %r11 # 8-byte Reload
|
|
; APX-NEXT: movq %r11, %rax
|
|
; APX-NEXT: mulq %r21
|
|
; APX-NEXT: movq %rdx, %r31
|
|
; APX-NEXT: movq %rax, %rbx
|
|
; APX-NEXT: addq %r8, %rbx
|
|
; APX-NEXT: adcq $0, %r31
|
|
; APX-NEXT: movq %r27, %rax
|
|
; APX-NEXT: mulq %r30
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: addq %rbx, %rax
|
|
; APX-NEXT: movq %rax, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: adcq %r31, %r8
|
|
; APX-NEXT: setb %r18b
|
|
; APX-NEXT: movq %r11, %rax
|
|
; APX-NEXT: mulq %r30
|
|
; APX-NEXT: movq %rdx, %r31
|
|
; APX-NEXT: movq %rax, %rbx
|
|
; APX-NEXT: addq %r8, %rbx
|
|
; APX-NEXT: movzbl %r18b, %eax
|
|
; APX-NEXT: adcq %rax, %r31
|
|
; APX-NEXT: addq %r22, %rbx
|
|
; APX-NEXT: adcq %r26, %r31
|
|
; APX-NEXT: adcq $0, %r16
|
|
; APX-NEXT: adcq $0, %r9
|
|
; APX-NEXT: movq 80(%r24), %r13
|
|
; APX-NEXT: movq %r27, %rax
|
|
; APX-NEXT: mulq %r13
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %rsi
|
|
; APX-NEXT: movq %r11, %rax
|
|
; APX-NEXT: mulq %r13
|
|
; APX-NEXT: movq %rdx, %r26
|
|
; APX-NEXT: movq %rax, %r14
|
|
; APX-NEXT: addq %r8, %r14
|
|
; APX-NEXT: adcq $0, %r26
|
|
; APX-NEXT: movq 88(%r24), %r18
|
|
; APX-NEXT: movq %r27, %rax
|
|
; APX-NEXT: mulq %r18
|
|
; APX-NEXT: movq %rdx, %r15
|
|
; APX-NEXT: movq %rax, %r22
|
|
; APX-NEXT: addq %r14, %r22
|
|
; APX-NEXT: adcq %r26, %r15
|
|
; APX-NEXT: setb %r14b
|
|
; APX-NEXT: movq %r11, %rax
|
|
; APX-NEXT: mulq %r18
|
|
; APX-NEXT: movq %rdx, %r26
|
|
; APX-NEXT: movq %rax, %r8
|
|
; APX-NEXT: addq %r15, %r8
|
|
; APX-NEXT: movzbl %r14b, %eax
|
|
; APX-NEXT: adcq %rax, %r26
|
|
; APX-NEXT: addq %rbx, %rsi
|
|
; APX-NEXT: movq %rsi, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
|
|
; APX-NEXT: adcq %r31, %r22
|
|
; APX-NEXT: adcq $0, %r8
|
|
; APX-NEXT: adcq $0, %r26
|
|
; APX-NEXT: addq %r16, %r8
|
|
; APX-NEXT: adcq %r9, %r26
|
|
; APX-NEXT: setb %r31b
|
|
; APX-NEXT: movq %rdi, %rax
|
|
; APX-NEXT: mulq %r13
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %rsi
|
|
; APX-NEXT: movq %r23, %rax
|
|
; APX-NEXT: mulq %r13
|
|
; APX-NEXT: movq %rdx, %r16
|
|
; APX-NEXT: movq %rax, %r14
|
|
; APX-NEXT: addq %r9, %r14
|
|
; APX-NEXT: adcq $0, %r16
|
|
; APX-NEXT: movq %rdi, %rax
|
|
; APX-NEXT: mulq %r18
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %rbx
|
|
; APX-NEXT: addq %r14, %rbx
|
|
; APX-NEXT: adcq %r16, %r9
|
|
; APX-NEXT: setb %r16b
|
|
; APX-NEXT: movq %r23, %rax
|
|
; APX-NEXT: mulq %r18
|
|
; APX-NEXT: movq %rdx, %r14
|
|
; APX-NEXT: movq %rax, %r15
|
|
; APX-NEXT: addq %r9, %r15
|
|
; APX-NEXT: movzbl %r16b, %eax
|
|
; APX-NEXT: adcq %rax, %r14
|
|
; APX-NEXT: addq %r8, %rsi
|
|
; APX-NEXT: adcq %r26, %rbx
|
|
; APX-NEXT: movzbl %r31b, %eax
|
|
; APX-NEXT: adcq %rax, %r15
|
|
; APX-NEXT: adcq $0, %r14
|
|
; APX-NEXT: imulq %r25, %r18
|
|
; APX-NEXT: movq %r25, %rax
|
|
; APX-NEXT: mulq %r13
|
|
; APX-NEXT: movq %rax, %r8
|
|
; APX-NEXT: addq %r18, %rdx
|
|
; APX-NEXT: imulq %rcx, %r13
|
|
; APX-NEXT: addq %rdx, %r13
|
|
; APX-NEXT: movq %r28, %r9
|
|
; APX-NEXT: imulq %r30, %r9
|
|
; APX-NEXT: movq %r28, %rax
|
|
; APX-NEXT: mulq %r21
|
|
; APX-NEXT: movq %rax, %r26
|
|
; APX-NEXT: addq %r9, %rdx
|
|
; APX-NEXT: imulq %r21, %r10
|
|
; APX-NEXT: addq %rdx, %r10
|
|
; APX-NEXT: addq %r8, %r26
|
|
; APX-NEXT: adcq %r13, %r10
|
|
; APX-NEXT: movq %r21, %rax
|
|
; APX-NEXT: mulq %r25
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r9
|
|
; APX-NEXT: movq %r30, %rax
|
|
; APX-NEXT: mulq %r25
|
|
; APX-NEXT: movq %rdx, %r25
|
|
; APX-NEXT: movq %rax, %r28
|
|
; APX-NEXT: addq %r8, %r28
|
|
; APX-NEXT: adcq $0, %r25
|
|
; APX-NEXT: movq %r21, %rax
|
|
; APX-NEXT: mulq %rcx
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r16
|
|
; APX-NEXT: addq %r28, %r16
|
|
; APX-NEXT: adcq %r25, %r8
|
|
; APX-NEXT: setb %r18b
|
|
; APX-NEXT: movq %r30, %rax
|
|
; APX-NEXT: mulq %rcx
|
|
; APX-NEXT: movq %rdx, %r21
|
|
; APX-NEXT: movq %rax, %r28
|
|
; APX-NEXT: addq %r8, %r28
|
|
; APX-NEXT: movzbl %r18b, %eax
|
|
; APX-NEXT: adcq %rax, %r21
|
|
; APX-NEXT: addq %r26, %r28
|
|
; APX-NEXT: adcq %r10, %r21
|
|
; APX-NEXT: movq 112(%r24), %rcx
|
|
; APX-NEXT: movq %r27, %rax
|
|
; APX-NEXT: mulq %rcx
|
|
; APX-NEXT: movq %rax, %r8
|
|
; APX-NEXT: imulq %r11, %rcx
|
|
; APX-NEXT: addq %rdx, %rcx
|
|
; APX-NEXT: movq 120(%r24), %rax
|
|
; APX-NEXT: imulq %r27, %rax
|
|
; APX-NEXT: addq %rax, %rcx
|
|
; APX-NEXT: movq 96(%r24), %r25
|
|
; APX-NEXT: movq 104(%r24), %r26
|
|
; APX-NEXT: movq %rdi, %rax
|
|
; APX-NEXT: imulq %r26, %rdi
|
|
; APX-NEXT: mulq %r25
|
|
; APX-NEXT: movq %rax, %r29
|
|
; APX-NEXT: addq %rdi, %rdx
|
|
; APX-NEXT: imulq %r25, %r23
|
|
; APX-NEXT: addq %rdx, %r23
|
|
; APX-NEXT: addq %r8, %r29
|
|
; APX-NEXT: adcq %rcx, %r23
|
|
; APX-NEXT: movq %r25, %rax
|
|
; APX-NEXT: mulq %r27
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r20
|
|
; APX-NEXT: movq %r26, %rax
|
|
; APX-NEXT: mulq %r27
|
|
; APX-NEXT: movq %rdx, %r27
|
|
; APX-NEXT: movq %rax, %r30
|
|
; APX-NEXT: addq %r8, %r30
|
|
; APX-NEXT: adcq $0, %r27
|
|
; APX-NEXT: movq %r25, %rax
|
|
; APX-NEXT: mulq %r11
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r25
|
|
; APX-NEXT: addq %r30, %r25
|
|
; APX-NEXT: adcq %r27, %r8
|
|
; APX-NEXT: setb %cl
|
|
; APX-NEXT: movq %r26, %rax
|
|
; APX-NEXT: mulq %r11
|
|
; APX-NEXT: movq %rdx, %r24
|
|
; APX-NEXT: movq %rax, %r27
|
|
; APX-NEXT: addq %r8, %r27
|
|
; APX-NEXT: movzbl %cl, %eax
|
|
; APX-NEXT: adcq %rax, %r24
|
|
; APX-NEXT: addq %r29, %r27
|
|
; APX-NEXT: adcq %r23, %r24
|
|
; APX-NEXT: addq %r9, %r20
|
|
; APX-NEXT: adcq %r16, %r25
|
|
; APX-NEXT: adcq %r28, %r27
|
|
; APX-NEXT: adcq %r21, %r24
|
|
; APX-NEXT: addq %rsi, %r20
|
|
; APX-NEXT: adcq %rbx, %r25
|
|
; APX-NEXT: adcq %r15, %r27
|
|
; APX-NEXT: adcq %r14, %r24
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %r11 # 8-byte Reload
|
|
; APX-NEXT: movq 80(%r11), %rbx
|
|
; APX-NEXT: movq %rbx, %rax
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %r19 # 8-byte Reload
|
|
; APX-NEXT: mulq %r19
|
|
; APX-NEXT: movq %rax, %r21
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq 88(%r11), %r28
|
|
; APX-NEXT: movq %r28, %rax
|
|
; APX-NEXT: mulq %r19
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %r16
|
|
; APX-NEXT: addq %r8, %r16
|
|
; APX-NEXT: adcq $0, %r9
|
|
; APX-NEXT: movq %rbx, %rax
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %r17 # 8-byte Reload
|
|
; APX-NEXT: mulq %r17
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r26
|
|
; APX-NEXT: addq %r16, %r26
|
|
; APX-NEXT: adcq %r9, %r8
|
|
; APX-NEXT: setb %cl
|
|
; APX-NEXT: movq %r28, %rax
|
|
; APX-NEXT: mulq %r17
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %r16
|
|
; APX-NEXT: addq %r8, %r16
|
|
; APX-NEXT: movzbl %cl, %eax
|
|
; APX-NEXT: adcq %rax, %r9
|
|
; APX-NEXT: movq 64(%r11), %r15
|
|
; APX-NEXT: movq %r15, %rax
|
|
; APX-NEXT: mulq %r19
|
|
; APX-NEXT: movq %rax, %r23
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq 72(%r11), %r14
|
|
; APX-NEXT: movq %r14, %rax
|
|
; APX-NEXT: mulq %r19
|
|
; APX-NEXT: movq %rdx, %r30
|
|
; APX-NEXT: movq %rax, %r31
|
|
; APX-NEXT: addq %r8, %r31
|
|
; APX-NEXT: adcq $0, %r30
|
|
; APX-NEXT: movq %r15, %rax
|
|
; APX-NEXT: mulq %r17
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r29
|
|
; APX-NEXT: addq %r31, %r29
|
|
; APX-NEXT: adcq %r30, %r8
|
|
; APX-NEXT: setb %cl
|
|
; APX-NEXT: movq %r14, %rax
|
|
; APX-NEXT: mulq %r17
|
|
; APX-NEXT: movq %rdx, %r31
|
|
; APX-NEXT: movq %rax, %r13
|
|
; APX-NEXT: addq %r8, %r13
|
|
; APX-NEXT: movzbl %cl, %eax
|
|
; APX-NEXT: adcq %rax, %r31
|
|
; APX-NEXT: addq %r21, %r13
|
|
; APX-NEXT: adcq %r26, %r31
|
|
; APX-NEXT: adcq $0, %r16
|
|
; APX-NEXT: adcq $0, %r9
|
|
; APX-NEXT: movq %r15, %rax
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rdi # 8-byte Reload
|
|
; APX-NEXT: mulq %rdi
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r30
|
|
; APX-NEXT: movq %r14, %rax
|
|
; APX-NEXT: mulq %rdi
|
|
; APX-NEXT: movq %rdx, %r26
|
|
; APX-NEXT: movq %rax, %rcx
|
|
; APX-NEXT: addq %r8, %rcx
|
|
; APX-NEXT: adcq $0, %r26
|
|
; APX-NEXT: movq %r15, %rax
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %r18 # 8-byte Reload
|
|
; APX-NEXT: mulq %r18
|
|
; APX-NEXT: movq %rdx, %r10
|
|
; APX-NEXT: movq %rax, %r21
|
|
; APX-NEXT: addq %rcx, %r21
|
|
; APX-NEXT: adcq %r26, %r10
|
|
; APX-NEXT: setb %cl
|
|
; APX-NEXT: movq %r14, %rax
|
|
; APX-NEXT: mulq %r18
|
|
; APX-NEXT: movq %rdx, %r26
|
|
; APX-NEXT: movq %rax, %r8
|
|
; APX-NEXT: addq %r10, %r8
|
|
; APX-NEXT: movzbl %cl, %eax
|
|
; APX-NEXT: adcq %rax, %r26
|
|
; APX-NEXT: addq %r13, %r30
|
|
; APX-NEXT: adcq %r31, %r21
|
|
; APX-NEXT: adcq $0, %r8
|
|
; APX-NEXT: adcq $0, %r26
|
|
; APX-NEXT: addq %r16, %r8
|
|
; APX-NEXT: adcq %r9, %r26
|
|
; APX-NEXT: setb %sil
|
|
; APX-NEXT: movq %rbx, %rax
|
|
; APX-NEXT: mulq %rdi
|
|
; APX-NEXT: movq %rdx, %rcx
|
|
; APX-NEXT: movq %rax, %r31
|
|
; APX-NEXT: movq %r28, %rax
|
|
; APX-NEXT: mulq %rdi
|
|
; APX-NEXT: movq %rdx, %r9
|
|
; APX-NEXT: movq %rax, %r10
|
|
; APX-NEXT: addq %rcx, %r10
|
|
; APX-NEXT: adcq $0, %r9
|
|
; APX-NEXT: movq %rbx, %rax
|
|
; APX-NEXT: mulq %r18
|
|
; APX-NEXT: movq %rdx, %rcx
|
|
; APX-NEXT: movq %rax, %r13
|
|
; APX-NEXT: addq %r10, %r13
|
|
; APX-NEXT: adcq %r9, %rcx
|
|
; APX-NEXT: setb %r10b
|
|
; APX-NEXT: movq %r28, %rax
|
|
; APX-NEXT: mulq %r18
|
|
; APX-NEXT: movq %rdx, %r16
|
|
; APX-NEXT: movq %rax, %r9
|
|
; APX-NEXT: addq %rcx, %r9
|
|
; APX-NEXT: movzbl %r10b, %eax
|
|
; APX-NEXT: adcq %rax, %r16
|
|
; APX-NEXT: addq %r8, %r31
|
|
; APX-NEXT: adcq %r26, %r13
|
|
; APX-NEXT: movzbl %sil, %eax
|
|
; APX-NEXT: adcq %rax, %r9
|
|
; APX-NEXT: adcq $0, %r16
|
|
; APX-NEXT: movq 96(%r11), %rcx
|
|
; APX-NEXT: imulq %rcx, %r18
|
|
; APX-NEXT: movq %rcx, %rax
|
|
; APX-NEXT: mulq %rdi
|
|
; APX-NEXT: movq %rax, %r8
|
|
; APX-NEXT: addq %r18, %rdx
|
|
; APX-NEXT: movq 104(%r11), %r26
|
|
; APX-NEXT: movq %rdi, %rax
|
|
; APX-NEXT: imulq %r26, %rax
|
|
; APX-NEXT: addq %rdx, %rax
|
|
; APX-NEXT: movq %rax, %r10
|
|
; APX-NEXT: movq 112(%r11), %rax
|
|
; APX-NEXT: movq %rax, %rsi
|
|
; APX-NEXT: imulq %r17, %rsi
|
|
; APX-NEXT: mulq %r19
|
|
; APX-NEXT: movq %rax, %rdi
|
|
; APX-NEXT: addq %rsi, %rdx
|
|
; APX-NEXT: movq 120(%r11), %r18
|
|
; APX-NEXT: imulq %r19, %r18
|
|
; APX-NEXT: addq %rdx, %r18
|
|
; APX-NEXT: addq %r8, %rdi
|
|
; APX-NEXT: adcq %r10, %r18
|
|
; APX-NEXT: movq %r19, %rax
|
|
; APX-NEXT: mulq %rcx
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %rsi
|
|
; APX-NEXT: movq %r17, %rax
|
|
; APX-NEXT: mulq %rcx
|
|
; APX-NEXT: movq %rdx, %rcx
|
|
; APX-NEXT: movq %rax, %r10
|
|
; APX-NEXT: addq %r8, %r10
|
|
; APX-NEXT: adcq $0, %rcx
|
|
; APX-NEXT: movq %r19, %rax
|
|
; APX-NEXT: mulq %r26
|
|
; APX-NEXT: movq %rdx, %r8
|
|
; APX-NEXT: movq %rax, %r11
|
|
; APX-NEXT: addq %r10, %r11
|
|
; APX-NEXT: adcq %rcx, %r8
|
|
; APX-NEXT: setb %cl
|
|
; APX-NEXT: movq %r17, %rax
|
|
; APX-NEXT: mulq %r26
|
|
; APX-NEXT: movq %rdx, %r10
|
|
; APX-NEXT: movq %rax, %r17
|
|
; APX-NEXT: addq %r8, %r17
|
|
; APX-NEXT: movzbl %cl, %eax
|
|
; APX-NEXT: adcq %rax, %r10
|
|
; APX-NEXT: addq %rdi, %r17
|
|
; APX-NEXT: adcq %r18, %r10
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rdi # 8-byte Reload
|
|
; APX-NEXT: imulq %r15, %rdi
|
|
; APX-NEXT: movq %r15, %rax
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %r8 # 8-byte Reload
|
|
; APX-NEXT: mulq %r8
|
|
; APX-NEXT: movq %rax, %rcx
|
|
; APX-NEXT: addq %rdi, %rdx
|
|
; APX-NEXT: movq %r8, %rax
|
|
; APX-NEXT: imulq %r14, %rax
|
|
; APX-NEXT: addq %rdx, %rax
|
|
; APX-NEXT: movq %rax, %r18
|
|
; APX-NEXT: movq %rbx, %rdi
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %r19 # 8-byte Reload
|
|
; APX-NEXT: imulq %r19, %rdi
|
|
; APX-NEXT: movq %rbx, %rax
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %r8 # 8-byte Reload
|
|
; APX-NEXT: mulq %r8
|
|
; APX-NEXT: movq %rax, %r26
|
|
; APX-NEXT: addq %rdi, %rdx
|
|
; APX-NEXT: imulq %r8, %r28
|
|
; APX-NEXT: addq %rdx, %r28
|
|
; APX-NEXT: addq %rcx, %r26
|
|
; APX-NEXT: adcq %r18, %r28
|
|
; APX-NEXT: movq %r8, %rax
|
|
; APX-NEXT: movq %r8, %rdi
|
|
; APX-NEXT: mulq %r15
|
|
; APX-NEXT: movq %rdx, %rcx
|
|
; APX-NEXT: movq %rax, %r8
|
|
; APX-NEXT: movq %r19, %rax
|
|
; APX-NEXT: mulq %r15
|
|
; APX-NEXT: movq %rdx, %rbx
|
|
; APX-NEXT: movq %rax, %r15
|
|
; APX-NEXT: addq %rcx, %r15
|
|
; APX-NEXT: adcq $0, %rbx
|
|
; APX-NEXT: movq %rdi, %rax
|
|
; APX-NEXT: mulq %r14
|
|
; APX-NEXT: movq %rdx, %rcx
|
|
; APX-NEXT: movq %rax, %r18
|
|
; APX-NEXT: addq %r15, %r18
|
|
; APX-NEXT: adcq %rbx, %rcx
|
|
; APX-NEXT: setb %dil
|
|
; APX-NEXT: movq %r19, %rax
|
|
; APX-NEXT: mulq %r14
|
|
; APX-NEXT: addq %rcx, %rax
|
|
; APX-NEXT: movzbl %dil, %ecx
|
|
; APX-NEXT: adcq %rcx, %rdx
|
|
; APX-NEXT: addq %r26, %rax
|
|
; APX-NEXT: adcq %r28, %rdx
|
|
; APX-NEXT: addq %rsi, %r8
|
|
; APX-NEXT: adcq %r11, %r18
|
|
; APX-NEXT: adcq %r17, %rax
|
|
; APX-NEXT: adcq %r10, %rdx
|
|
; APX-NEXT: addq %r31, %r8
|
|
; APX-NEXT: adcq %r13, %r18
|
|
; APX-NEXT: adcq %r9, %rax
|
|
; APX-NEXT: adcq %r16, %rdx
|
|
; APX-NEXT: addq {{[-0-9]+}}(%r{{[sb]}}p), %r23 # 8-byte Folded Reload
|
|
; APX-NEXT: adcq {{[-0-9]+}}(%r{{[sb]}}p), %r29 # 8-byte Folded Reload
|
|
; APX-NEXT: adcq {{[-0-9]+}}(%r{{[sb]}}p), %r30 # 8-byte Folded Reload
|
|
; APX-NEXT: adcq %r22, %r21
|
|
; APX-NEXT: adcq %r20, %r8
|
|
; APX-NEXT: adcq %r25, %r18
|
|
; APX-NEXT: adcq %r27, %rax
|
|
; APX-NEXT: adcq %r24, %rdx
|
|
; APX-NEXT: addq {{[-0-9]+}}(%r{{[sb]}}p), %r23 # 8-byte Folded Reload
|
|
; APX-NEXT: adcq {{[-0-9]+}}(%r{{[sb]}}p), %r29 # 8-byte Folded Reload
|
|
; APX-NEXT: adcq {{[-0-9]+}}(%r{{[sb]}}p), %r30 # 8-byte Folded Reload
|
|
; APX-NEXT: adcq {{[-0-9]+}}(%r{{[sb]}}p), %r21 # 8-byte Folded Reload
|
|
; APX-NEXT: adcq (%rsp), %r8 # 8-byte Folded Reload
|
|
; APX-NEXT: adcq {{[-0-9]+}}(%r{{[sb]}}p), %r18 # 8-byte Folded Reload
|
|
; APX-NEXT: adcq %r12, %rax
|
|
; APX-NEXT: adcq %rbp, %rdx
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rcx # 8-byte Reload
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rsi # 8-byte Reload
|
|
; APX-NEXT: movq %rsi, (%rcx)
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rsi # 8-byte Reload
|
|
; APX-NEXT: movq %rsi, 8(%rcx)
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rsi # 8-byte Reload
|
|
; APX-NEXT: movq %rsi, 16(%rcx)
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rsi # 8-byte Reload
|
|
; APX-NEXT: movq %rsi, 24(%rcx)
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rsi # 8-byte Reload
|
|
; APX-NEXT: movq %rsi, 32(%rcx)
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rsi # 8-byte Reload
|
|
; APX-NEXT: movq %rsi, 40(%rcx)
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rsi # 8-byte Reload
|
|
; APX-NEXT: movq %rsi, 48(%rcx)
|
|
; APX-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rsi # 8-byte Reload
|
|
; APX-NEXT: movq %rsi, 56(%rcx)
|
|
; APX-NEXT: movq %r23, 64(%rcx)
|
|
; APX-NEXT: movq %r29, 72(%rcx)
|
|
; APX-NEXT: movq %r30, 80(%rcx)
|
|
; APX-NEXT: movq %r21, 88(%rcx)
|
|
; APX-NEXT: movq %r8, 96(%rcx)
|
|
; APX-NEXT: movq %r18, 104(%rcx)
|
|
; APX-NEXT: movq %rax, 112(%rcx)
|
|
; APX-NEXT: movq %rdx, 120(%rcx)
|
|
; APX-NEXT: addq $104, %rsp
|
|
; APX-NEXT: popq %rbx
|
|
; APX-NEXT: popq %r12
|
|
; APX-NEXT: popq %r13
|
|
; APX-NEXT: popq %r14
|
|
; APX-NEXT: popq %r15
|
|
; APX-NEXT: popq %rbp
|
|
; APX-NEXT: retq
|
|
%av = load i1024, ptr %a
|
|
%bv = load i1024, ptr %b
|
|
%r = mul i1024 %av, %bv
|
|
store i1024 %r, ptr %out
|
|
ret void
|
|
}
|