; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 3 ; RUN: llc -verify-machineinstrs -mcpu=pwr7 -ppc-asm-full-reg-names \ ; RUN: -mtriple powerpc64-ibm-aix-xcoff -mattr=+aix-small-local-exec-tls < %s \ ; RUN: | FileCheck %s --check-prefix=SMALL-LOCAL-EXEC-SMALLCM64 ; RUN: llc -verify-machineinstrs -mcpu=pwr7 -ppc-asm-full-reg-names \ ; RUN: -mtriple powerpc64-ibm-aix-xcoff --code-model=large \ ; RUN: -mattr=+aix-small-local-exec-tls < %s | FileCheck %s \ ; RUN: --check-prefix=SMALL-LOCAL-EXEC-LARGECM64 @ThreadLocalVarInit = thread_local(localexec) global double 1.000000e+00, align 8 @VarInit = local_unnamed_addr global double 8.700000e+01, align 8 @IThreadLocalVarInit = internal thread_local(localexec) global double 1.000000e+00, align 8 declare nonnull ptr @llvm.threadlocal.address.p0(ptr nonnull) #1 @f = thread_local(localexec) global [87 x double] zeroinitializer, align 8 define nonnull ptr @AddrTest1() local_unnamed_addr #0 { ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: AddrTest1: ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: la r3, f[TL]@le(r13) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: addi r3, r3, 48 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr ; ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: AddrTest1: ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: la r3, f[TL]@le(r13) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: addi r3, r3, 48 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr entry: %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @f) %arrayidx = getelementptr inbounds [87 x double], ptr %0, i64 0, i64 6 ret ptr %arrayidx } define void @storeITLInit(double noundef %x) { ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: storeITLInit: ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: stfd f1, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr ; ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: storeITLInit: ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: stfd f1, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr entry: %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @IThreadLocalVarInit) store double %x, ptr %0, align 8 ret void } define void @storeTLInit(double noundef %x) { ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: storeTLInit: ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: stfd f1, ThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr ; ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: storeTLInit: ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: stfd f1, ThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr entry: %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @ThreadLocalVarInit) store double %x, ptr %0, align 8 ret void } define double @loadITLInit() { ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadITLInit: ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lfd f1, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr ; ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadITLInit: ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lfd f1, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr entry: %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @IThreadLocalVarInit) %1 = load double, ptr %0, align 8 ret double %1 } define double @loadITLInit2() { ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadITLInit2: ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: ld r3, L..C0(r2) # @VarInit ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lfd f0, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lfd f1, 0(r3) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: xsadddp f1, f0, f1 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr ; ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadITLInit2: ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: addis r3, L..C0@u(r2) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lfd f0, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: ld r3, L..C0@l(r3) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lfd f1, 0(r3) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: xsadddp f1, f0, f1 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr entry: %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @IThreadLocalVarInit) %1 = load double, ptr %0, align 8 %2 = load double, ptr @VarInit, align 8 %add = fadd double %1, %2 ret double %add } define double @loadTLInit() { ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadTLInit: ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lfd f1, ThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr ; ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadTLInit: ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lfd f1, ThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr entry: %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @ThreadLocalVarInit) %1 = load double, ptr %0, align 8 ret double %1 } define double @loadTLInit2() { ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadTLInit2: ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: ld r3, L..C0(r2) # @VarInit ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lfd f0, ThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lfd f1, 0(r3) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: xsadddp f1, f0, f1 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr ; ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadTLInit2: ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: addis r3, L..C0@u(r2) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lfd f0, ThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: ld r3, L..C0@l(r3) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lfd f1, 0(r3) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: xsadddp f1, f0, f1 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr entry: %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @ThreadLocalVarInit) %1 = load double, ptr %0, align 8 %2 = load double, ptr @VarInit, align 8 %add = fadd double %1, %2 ret double %add } define void @loadStore1(double noundef %x) { ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadStore1: ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: vspltisw v2, 1 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lfd f1, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: xvcvsxwdp vs0, vs34 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: vspltisw v3, 8 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: xsadddp f0, f1, f0 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: xvcvsxwdp vs1, vs35 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: xsadddp f0, f0, f1 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: stfd f0, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr ; ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadStore1: ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: vspltisw v2, 1 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lfd f1, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: xvcvsxwdp vs0, vs34 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: vspltisw v3, 8 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: xsadddp f0, f1, f0 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: xvcvsxwdp vs1, vs35 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: xsadddp f0, f0, f1 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: stfd f0, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr entry: %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @IThreadLocalVarInit) %1 = load double, ptr %0, align 8 %inc = fadd double %1, 1.000000e+00 %add = fadd double %inc, 8.000000e+00 store double %add, ptr %0, align 8 ret void }