; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 3 ; RUN: llc -verify-machineinstrs -mcpu=pwr7 -ppc-asm-full-reg-names \ ; RUN: -mtriple powerpc64-ibm-aix-xcoff -mattr=+aix-small-local-exec-tls < %s \ ; RUN: | FileCheck %s --check-prefix=SMALL-LOCAL-EXEC-SMALLCM64 ; RUN: llc -verify-machineinstrs -mcpu=pwr7 -ppc-asm-full-reg-names \ ; RUN: -mtriple powerpc64-ibm-aix-xcoff --code-model=large \ ; RUN: -mattr=+aix-small-local-exec-tls < %s | FileCheck %s \ ; RUN: --check-prefix=SMALL-LOCAL-EXEC-LARGECM64 @ThreadLocalVarInit = thread_local(localexec) global i32 1, align 4 @VarInit = local_unnamed_addr global i32 87, align 4 @IThreadLocalVarInit = internal thread_local(localexec) global i32 1, align 4 declare nonnull ptr @llvm.threadlocal.address.p0(ptr nonnull) #1 %struct.anon = type { i32 } @ThreadLocalStruct = thread_local(localexec) global %struct.anon zeroinitializer, align 1 @a = thread_local(localexec) global [87 x i32] zeroinitializer, align 4 define nonnull ptr @AddrTest1() local_unnamed_addr #0 { ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: AddrTest1: ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: la r3, a[TL]@le(r13) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: addi r3, r3, 12 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr ; ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: AddrTest1: ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: la r3, a[TL]@le(r13) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: addi r3, r3, 12 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr entry: %0 = tail call align 4 ptr @llvm.threadlocal.address.p0(ptr align 4 @a) %arrayidx = getelementptr inbounds [87 x i32], ptr %0, i64 0, i64 3 ret ptr %arrayidx } define signext i32 @testUnaligned() { ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: testUnaligned: ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: la r3, ThreadLocalStruct[TL]@le(r13) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lwa r3, 0(r3) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr ; ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: testUnaligned: ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: la r3, ThreadLocalStruct[TL]@le(r13) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lwa r3, 0(r3) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr entry: %0 = call align 1 ptr @llvm.threadlocal.address.p0(ptr align 1 @ThreadLocalStruct) %x = getelementptr inbounds %struct.anon, ptr %0, i32 0, i32 0 %1 = load i32, ptr %x, align 1 ret i32 %1 } define void @storeITLInit(i32 noundef signext %x) { ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: storeITLInit: ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: stw r3, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr ; ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: storeITLInit: ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: stw r3, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr entry: %0 = tail call align 4 ptr @llvm.threadlocal.address.p0(ptr align 4 @IThreadLocalVarInit) store i32 %x, ptr %0, align 4 ret void } define void @storeTLInit(i32 noundef signext %x) { ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: storeTLInit: ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: stw r3, ThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr ; ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: storeTLInit: ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: stw r3, ThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr entry: %0 = tail call align 4 ptr @llvm.threadlocal.address.p0(ptr align 4 @ThreadLocalVarInit) store i32 %x, ptr %0, align 4 ret void } define signext i32 @loadITLInit() { ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadITLInit: ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lwa r3, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr ; ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadITLInit: ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lwa r3, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr entry: %0 = tail call align 4 ptr @llvm.threadlocal.address.p0(ptr align 4 @IThreadLocalVarInit) %1 = load i32, ptr %0, align 4 ret i32 %1 } define signext i32 @loadITLInit2() { ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadITLInit2: ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: ld r4, L..C0(r2) # @VarInit ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lwz r3, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lwz r4, 0(r4) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: add r3, r4, r3 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: extsw r3, r3 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr ; ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadITLInit2: ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: addis r4, L..C0@u(r2) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lwz r3, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: ld r4, L..C0@l(r4) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lwz r4, 0(r4) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: add r3, r4, r3 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: extsw r3, r3 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr entry: %0 = tail call align 4 ptr @llvm.threadlocal.address.p0(ptr align 4 @IThreadLocalVarInit) %1 = load i32, ptr %0, align 4 %2 = load i32, ptr @VarInit, align 4 %add = add nsw i32 %2, %1 ret i32 %add } define signext i32 @loadTLInit() { ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadTLInit: ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lwa r3, ThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr ; ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadTLInit: ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lwa r3, ThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr entry: %0 = tail call align 4 ptr @llvm.threadlocal.address.p0(ptr align 4 @ThreadLocalVarInit) %1 = load i32, ptr %0, align 4 ret i32 %1 } define signext i32 @loadTLInit2() { ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadTLInit2: ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: ld r4, L..C0(r2) # @VarInit ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lwz r3, ThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lwz r4, 0(r4) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: add r3, r4, r3 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: extsw r3, r3 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr ; ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadTLInit2: ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: addis r4, L..C0@u(r2) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lwz r3, ThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: ld r4, L..C0@l(r4) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lwz r4, 0(r4) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: add r3, r4, r3 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: extsw r3, r3 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr entry: %0 = tail call align 4 ptr @llvm.threadlocal.address.p0(ptr align 4 @ThreadLocalVarInit) %1 = load i32, ptr %0, align 4 %2 = load i32, ptr @VarInit, align 4 %add = add nsw i32 %2, %1 ret i32 %add } define void @loadStore1(i32 noundef signext %x) { ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadStore1: ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lwz r3, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: addi r3, r3, 9 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: stw r3, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr ; ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadStore1: ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lwz r3, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: addi r3, r3, 9 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: stw r3, IThreadLocalVarInit[TL]@le(r13) ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr entry: %0 = tail call align 4 ptr @llvm.threadlocal.address.p0(ptr align 4 @IThreadLocalVarInit) %1 = load i32, ptr %0, align 4 %add = add nsw i32 %1, 9 store i32 %add, ptr %0, align 4 ret void }