125 lines
5.2 KiB
LLVM
125 lines
5.2 KiB
LLVM
; NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
|
|
; RUN: llc -mtriple=aarch64-unknown-unknown -global-isel -global-isel-abort=1 -verify-machineinstrs -stop-after=irtranslator %s -o - | FileCheck %s
|
|
|
|
define void @copy(ptr %dst, ptr %src) {
|
|
; CHECK-LABEL: name: copy
|
|
; CHECK: bb.1.entry:
|
|
; CHECK: liveins: $x0, $x1
|
|
; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
|
|
; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
|
|
; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
|
|
; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[C]](s32)
|
|
; CHECK: G_MEMCPY [[COPY]](p0), [[COPY1]](p0), [[ZEXT]](s64), 0 :: (store (s8) into %ir.dst), (load (s8) from %ir.src)
|
|
; CHECK: RET_ReallyLR
|
|
entry:
|
|
call void @llvm.memcpy.p0.p0.i32(ptr %dst, ptr %src, i32 4, i1 false)
|
|
ret void
|
|
}
|
|
|
|
define void @inline_copy(ptr %dst, ptr %src) {
|
|
; CHECK-LABEL: name: inline_copy
|
|
; CHECK: bb.1.entry:
|
|
; CHECK: liveins: $x0, $x1
|
|
; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
|
|
; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
|
|
; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
|
|
; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[C]](s32)
|
|
; CHECK: G_MEMCPY_INLINE [[COPY]](p0), [[COPY1]](p0), [[ZEXT]](s64) :: (store (s8) into %ir.dst), (load (s8) from %ir.src)
|
|
; CHECK: RET_ReallyLR
|
|
entry:
|
|
call void @llvm.memcpy.inline.p0.p0.i32(ptr %dst, ptr %src, i32 4, i1 false)
|
|
ret void
|
|
}
|
|
|
|
define void @copy_volatile(ptr %dst, ptr %src) {
|
|
; CHECK-LABEL: name: copy_volatile
|
|
; CHECK: bb.1.entry:
|
|
; CHECK: liveins: $x0, $x1
|
|
; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
|
|
; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
|
|
; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
|
|
; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[C]](s32)
|
|
; CHECK: G_MEMCPY [[COPY]](p0), [[COPY1]](p0), [[ZEXT]](s64), 0 :: (volatile store (s8) into %ir.dst), (volatile load (s8) from %ir.src)
|
|
; CHECK: RET_ReallyLR
|
|
entry:
|
|
call void @llvm.memcpy.p0.p0.i32(ptr %dst, ptr %src, i32 4, i1 true)
|
|
ret void
|
|
}
|
|
|
|
define void @inline_copy_volatile(ptr %dst, ptr %src) {
|
|
; CHECK-LABEL: name: inline_copy_volatile
|
|
; CHECK: bb.1.entry:
|
|
; CHECK: liveins: $x0, $x1
|
|
; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
|
|
; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
|
|
; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
|
|
; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[C]](s32)
|
|
; CHECK: G_MEMCPY_INLINE [[COPY]](p0), [[COPY1]](p0), [[ZEXT]](s64) :: (volatile store (s8) into %ir.dst), (volatile load (s8) from %ir.src)
|
|
; CHECK: RET_ReallyLR
|
|
entry:
|
|
call void @llvm.memcpy.inline.p0.p0.i32(ptr %dst, ptr %src, i32 4, i1 true)
|
|
ret void
|
|
}
|
|
|
|
define void @tail_copy(ptr %dst, ptr %src) {
|
|
; CHECK-LABEL: name: tail_copy
|
|
; CHECK: bb.1.entry:
|
|
; CHECK: liveins: $x0, $x1
|
|
; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
|
|
; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
|
|
; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
|
|
; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[C]](s32)
|
|
; CHECK: G_MEMCPY [[COPY]](p0), [[COPY1]](p0), [[ZEXT]](s64), 1 :: (store (s8) into %ir.dst), (load (s8) from %ir.src)
|
|
; CHECK: RET_ReallyLR
|
|
entry:
|
|
tail call void @llvm.memcpy.p0.p0.i32(ptr %dst, ptr %src, i32 4, i1 false)
|
|
ret void
|
|
}
|
|
|
|
define void @tail_inline_copy(ptr %dst, ptr %src) {
|
|
; CHECK-LABEL: name: tail_inline_copy
|
|
; CHECK: bb.1.entry:
|
|
; CHECK: liveins: $x0, $x1
|
|
; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
|
|
; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
|
|
; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
|
|
; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[C]](s32)
|
|
; CHECK: G_MEMCPY_INLINE [[COPY]](p0), [[COPY1]](p0), [[ZEXT]](s64) :: (store (s8) into %ir.dst), (load (s8) from %ir.src)
|
|
; CHECK: RET_ReallyLR
|
|
entry:
|
|
tail call void @llvm.memcpy.inline.p0.p0.i32(ptr %dst, ptr %src, i32 4, i1 false)
|
|
ret void
|
|
}
|
|
|
|
define void @tail_copy_volatile(ptr %dst, ptr %src) {
|
|
; CHECK-LABEL: name: tail_copy_volatile
|
|
; CHECK: bb.1.entry:
|
|
; CHECK: liveins: $x0, $x1
|
|
; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
|
|
; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
|
|
; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
|
|
; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[C]](s32)
|
|
; CHECK: G_MEMCPY [[COPY]](p0), [[COPY1]](p0), [[ZEXT]](s64), 1 :: (volatile store (s8) into %ir.dst), (volatile load (s8) from %ir.src)
|
|
; CHECK: RET_ReallyLR
|
|
entry:
|
|
tail call void @llvm.memcpy.p0.p0.i32(ptr %dst, ptr %src, i32 4, i1 true)
|
|
ret void
|
|
}
|
|
|
|
define void @tail_inline_copy_volatile(ptr %dst, ptr %src) {
|
|
; CHECK-LABEL: name: tail_inline_copy_volatile
|
|
; CHECK: bb.1.entry:
|
|
; CHECK: liveins: $x0, $x1
|
|
; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
|
|
; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
|
|
; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
|
|
; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[C]](s32)
|
|
; CHECK: G_MEMCPY_INLINE [[COPY]](p0), [[COPY1]](p0), [[ZEXT]](s64) :: (volatile store (s8) into %ir.dst), (volatile load (s8) from %ir.src)
|
|
; CHECK: RET_ReallyLR
|
|
entry:
|
|
tail call void @llvm.memcpy.inline.p0.p0.i32(ptr %dst, ptr %src, i32 4, i1 true)
|
|
ret void
|
|
}
|
|
|
|
declare void @llvm.memcpy.p0.p0.i32(ptr nocapture writeonly, ptr nocapture readonly, i32, i1) nounwind
|
|
declare void @llvm.memcpy.inline.p0.p0.i32(ptr nocapture writeonly, ptr nocapture readonly, i32, i1) nounwind
|