; NOTE: Assertions have been autogenerated by utils/update_test_checks.py ; RUN: opt < %s -passes=instcombine -mtriple=x86_64-unknown-unknown -S | FileCheck %s declare <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32>, <4 x i32>, <4 x i32>, i32 immarg) declare <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32>, <8 x i32>, <8 x i32>, i32 immarg) declare <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64>, <8 x i64>, <8 x i64>, i32 immarg) declare <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64>, <2 x i64>, <2 x i64>, i32 immarg) declare <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32>, <16 x i32>, <16 x i32>, i32 immarg) declare <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64>, <4 x i64>, <4 x i64>, i32 immarg) define <16 x i32> @vpternlog_d_v512_imm0(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm0( ; CHECK-NEXT: ret <16 x i32> zeroinitializer ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 0) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm1(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm1( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 1) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 1) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm2(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm2( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 2) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 2) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm3(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm3( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 3) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 3) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm4(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm4( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 4) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 4) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm5(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm5( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 5) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 5) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm6(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm6( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 6) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 6) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm7(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm7( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 7) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 7) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm8(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm8( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 8) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 8) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm9(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm9( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 9) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 9) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm10(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm10( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 10) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 10) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm11(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm11( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 11) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 11) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm12(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm12( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 12) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 12) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm13(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm13( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 13) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 13) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm14(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm14( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 14) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 14) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm15(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm15( ; CHECK-NEXT: [[R:%.*]] = xor <8 x i64> [[V0:%.*]], ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 15) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm16(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm16( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 16) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 16) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm17(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm17( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 17) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 17) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm18(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm18( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 18) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 18) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm19(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm19( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 19) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 19) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm20(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm20( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 20) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 20) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm21(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm21( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 21) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 21) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm22(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm22( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 22) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 22) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm23(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm23( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 23) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 23) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm24(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm24( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 24) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 24) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm25(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm25( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 25) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 25) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm26(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm26( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 26) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 26) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm27(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm27( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 27) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 27) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm28(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm28( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 28) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 28) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm29(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm29( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 29) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 29) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm30(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm30( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 30) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 30) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm31(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm31( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 31) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 31) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm32(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm32( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 32) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 32) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm33(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm33( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 33) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 33) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm34(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm34( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 34) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 34) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm35(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm35( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 35) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 35) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm36(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm36( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 36) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 36) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm37(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm37( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 37) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 37) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm38(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm38( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 38) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 38) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm39(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm39( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 39) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 39) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm40(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm40( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 40) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 40) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm41(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm41( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 41) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 41) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm42(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm42( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 42) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 42) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm43(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm43( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 43) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 43) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm44(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm44( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 44) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 44) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm45(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm45( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 45) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 45) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm46(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm46( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 46) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 46) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm47(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm47( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 47) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 47) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm48(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm48( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 48) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 48) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm49(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm49( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 49) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 49) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm50(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm50( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 50) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 50) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm51(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm51( ; CHECK-NEXT: [[R:%.*]] = xor <8 x i64> [[V1:%.*]], ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 51) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm52(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm52( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 52) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 52) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm53(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm53( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 53) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 53) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm54(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm54( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 54) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 54) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm55(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm55( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 55) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 55) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm56(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm56( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 56) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 56) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm57(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm57( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 57) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 57) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm58(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm58( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 58) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 58) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm59(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm59( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 59) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 59) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm60(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm60( ; CHECK-NEXT: [[R:%.*]] = xor <16 x i32> [[V0:%.*]], [[V1:%.*]] ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 60) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm61(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm61( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 61) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 61) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm62(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm62( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 62) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 62) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm63(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm63( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 63) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 63) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm64(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm64( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 64) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 64) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm65(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm65( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 65) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 65) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm66(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm66( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 66) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 66) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm67(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm67( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 67) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 67) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm68(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm68( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 68) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 68) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm69(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm69( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 69) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 69) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm70(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm70( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 70) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 70) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm71(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm71( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 71) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 71) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm72(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm72( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 72) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 72) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm73(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm73( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 73) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 73) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm74(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm74( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 74) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 74) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm75(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm75( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 75) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 75) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm76(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm76( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 76) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 76) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm77(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm77( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 77) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 77) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm78(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm78( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 78) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 78) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm79(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm79( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 79) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 79) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm80(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm80( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 80) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 80) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm81(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm81( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 81) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 81) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm82(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm82( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 82) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 82) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm83(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm83( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 83) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 83) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm84(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm84( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 84) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 84) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm85(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm85( ; CHECK-NEXT: [[R:%.*]] = xor <2 x i64> [[V2:%.*]], ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 85) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm86(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm86( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 86) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 86) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm87(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm87( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 87) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 87) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm88(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm88( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 88) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 88) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm89(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm89( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 89) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 89) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm90(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm90( ; CHECK-NEXT: [[R:%.*]] = xor <16 x i32> [[V0:%.*]], [[V2:%.*]] ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 90) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm91(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm91( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 91) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 91) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm92(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm92( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 92) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 92) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm93(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm93( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 93) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 93) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm94(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm94( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 94) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 94) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm95(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm95( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 95) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 95) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm96(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm96( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 96) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 96) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm97(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm97( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 97) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 97) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm98(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm98( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 98) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 98) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm99(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm99( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 99) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 99) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm100(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm100( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 100) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 100) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm101(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm101( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 101) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 101) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm102(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm102( ; CHECK-NEXT: [[R:%.*]] = xor <16 x i32> [[V1:%.*]], [[V2:%.*]] ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 102) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm103(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm103( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 103) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 103) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm104(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm104( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 104) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 104) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm105(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm105( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 105) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 105) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm106(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm106( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 106) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 106) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm107(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm107( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 107) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 107) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm108(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm108( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 108) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 108) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm109(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm109( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 109) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 109) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm110(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm110( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 110) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 110) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm111(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm111( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 111) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 111) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm112(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm112( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 112) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 112) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm113(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm113( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 113) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 113) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm114(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm114( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 114) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 114) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm115(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm115( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 115) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 115) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm116(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm116( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 116) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 116) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm117(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm117( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 117) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 117) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm118(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm118( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 118) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 118) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm119(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm119( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 119) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 119) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm120(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm120( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 120) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 120) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm121(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm121( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 121) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 121) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm122(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm122( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 122) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 122) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm123(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm123( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 123) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 123) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm124(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm124( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 124) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 124) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm125(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm125( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 125) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 125) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm126(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm126( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 126) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 126) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm127(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm127( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 127) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 127) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm128(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm128( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 128) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 128) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm129(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm129( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 129) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 129) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm130(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm130( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 130) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 130) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm131(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm131( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 131) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 131) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm132(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm132( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 132) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 132) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm133(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm133( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 133) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 133) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm134(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm134( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 134) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 134) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm135(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm135( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 135) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 135) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm136(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm136( ; CHECK-NEXT: [[R:%.*]] = and <4 x i32> [[V1:%.*]], [[V2:%.*]] ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 136) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm137(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm137( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 137) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 137) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm138(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm138( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 138) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 138) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm139(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm139( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 139) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 139) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm140(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm140( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 140) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 140) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm141(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm141( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 141) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 141) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm142(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm142( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 142) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 142) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm143(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm143( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 143) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 143) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm144(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm144( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 144) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 144) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm145(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm145( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 145) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 145) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm146(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm146( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 146) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 146) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm147(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm147( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 147) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 147) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm148(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm148( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 148) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 148) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm149(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm149( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 149) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 149) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm150(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm150( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 150) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 150) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm151(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm151( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 151) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 151) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm152(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm152( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 152) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 152) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm153(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm153( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 153) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 153) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm154(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm154( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 154) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 154) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm155(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm155( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 155) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 155) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm156(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm156( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 156) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 156) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm157(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm157( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 157) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 157) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm158(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm158( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 158) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 158) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm159(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm159( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 159) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 159) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm160(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm160( ; CHECK-NEXT: [[R:%.*]] = and <4 x i32> [[V0:%.*]], [[V2:%.*]] ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 160) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm161(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm161( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 161) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 161) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm162(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm162( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 162) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 162) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm163(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm163( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 163) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 163) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm164(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm164( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 164) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 164) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm165(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm165( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 165) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 165) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm166(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm166( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 166) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 166) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm167(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm167( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 167) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 167) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm168(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm168( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 168) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 168) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm169(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm169( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 169) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 169) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm170(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm170( ; CHECK-NEXT: ret <8 x i32> [[V2:%.*]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 170) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm171(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm171( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 171) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 171) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm172(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm172( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 172) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 172) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm173(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm173( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 173) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 173) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm174(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm174( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 174) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 174) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm175(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm175( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 175) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 175) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm176(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm176( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 176) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 176) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm177(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm177( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 177) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 177) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm178(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm178( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 178) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 178) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm179(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm179( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 179) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 179) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm180(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm180( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 180) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 180) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm181(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm181( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 181) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 181) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm182(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm182( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 182) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 182) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm183(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm183( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 183) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 183) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm184(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm184( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 184) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 184) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm185(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm185( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 185) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 185) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm186(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm186( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 186) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 186) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm187(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm187( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 187) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 187) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm188(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm188( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 188) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 188) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm189(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm189( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 189) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 189) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm190(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm190( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 190) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 190) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm191(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm191( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 191) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 191) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm192(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm192( ; CHECK-NEXT: [[R:%.*]] = and <16 x i32> [[V0:%.*]], [[V1:%.*]] ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 192) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm193(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm193( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 193) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 193) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm194(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm194( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 194) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 194) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm195(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm195( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 195) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 195) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm196(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm196( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 196) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 196) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm197(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm197( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 197) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 197) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm198(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm198( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 198) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 198) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm199(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm199( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 199) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 199) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm200(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm200( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 200) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 200) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm201(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm201( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 201) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 201) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm202(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm202( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 202) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 202) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm203(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm203( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 203) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 203) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm204(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm204( ; CHECK-NEXT: ret <16 x i32> [[V1:%.*]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 204) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm205(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm205( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 205) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 205) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm206(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm206( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 206) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 206) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm207(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm207( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 207) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 207) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm208(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm208( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 208) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 208) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm209(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm209( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 209) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 209) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm210(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm210( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 210) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 210) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm211(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm211( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 211) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 211) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm212(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm212( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 212) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 212) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm213(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm213( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 213) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 213) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm214(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm214( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 214) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 214) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm215(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm215( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 215) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 215) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm216(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm216( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 216) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 216) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm217(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm217( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 217) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 217) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm218(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm218( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 218) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 218) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm219(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm219( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 219) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 219) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm220(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm220( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 220) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 220) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm221(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm221( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 221) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 221) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm222(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm222( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 222) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 222) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm223(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm223( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 223) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 223) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm224(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm224( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 224) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 224) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm225(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm225( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 225) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 225) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm226(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm226( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 226) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 226) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm227(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm227( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 227) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 227) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm228(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm228( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 228) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 228) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm229(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm229( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 229) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 229) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm230(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm230( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 230) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 230) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm231(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm231( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 231) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 231) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm232(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm232( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 232) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 232) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm233(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm233( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 233) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 233) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm234(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm234( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 234) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 234) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm235(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm235( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 235) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 235) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm236(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm236( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 236) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 236) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm237(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm237( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 237) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 237) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm238(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm238( ; CHECK-NEXT: [[R:%.*]] = or <4 x i32> [[V1:%.*]], [[V2:%.*]] ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 238) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm239(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm239( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 239) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 239) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm240(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm240( ; CHECK-NEXT: ret <16 x i32> [[V0:%.*]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 240) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm241(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm241( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 241) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 241) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm242(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm242( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 242) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 242) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm243(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm243( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 243) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 243) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm244(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm244( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> [[V0:%.*]], <4 x i32> [[V1:%.*]], <4 x i32> [[V2:%.*]], i32 244) ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 244) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm245(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm245( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 245) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 245) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm246(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm246( ; CHECK-NEXT: [[R:%.*]] = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> [[V0:%.*]], <16 x i32> [[V1:%.*]], <16 x i32> [[V2:%.*]], i32 246) ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 246) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm247(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm247( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 247) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 247) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm248(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm248( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 248) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 248) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm249(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm249( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> [[V0:%.*]], <8 x i64> [[V1:%.*]], <8 x i64> [[V2:%.*]], i32 249) ; CHECK-NEXT: ret <8 x i64> [[R]] ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 249) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_v128_imm250(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v128_imm250( ; CHECK-NEXT: [[R:%.*]] = or <4 x i32> [[V0:%.*]], [[V2:%.*]] ; CHECK-NEXT: ret <4 x i32> [[R]] ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> %v0, <4 x i32> %v1, <4 x i32> %v2, i32 250) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_v256_imm251(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v256_imm251( ; CHECK-NEXT: [[R:%.*]] = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> [[V0:%.*]], <4 x i64> [[V1:%.*]], <4 x i64> [[V2:%.*]], i32 251) ; CHECK-NEXT: ret <4 x i64> [[R]] ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> %v0, <4 x i64> %v1, <4 x i64> %v2, i32 251) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_v512_imm252(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v512_imm252( ; CHECK-NEXT: [[R:%.*]] = or <16 x i32> [[V0:%.*]], [[V1:%.*]] ; CHECK-NEXT: ret <16 x i32> [[R]] ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> %v0, <16 x i32> %v1, <16 x i32> %v2, i32 252) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_v128_imm253(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v128_imm253( ; CHECK-NEXT: [[R:%.*]] = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> [[V0:%.*]], <2 x i64> [[V1:%.*]], <2 x i64> [[V2:%.*]], i32 253) ; CHECK-NEXT: ret <2 x i64> [[R]] ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> %v0, <2 x i64> %v1, <2 x i64> %v2, i32 253) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_v256_imm254(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2) { ; CHECK-LABEL: @vpternlog_d_v256_imm254( ; CHECK-NEXT: [[R:%.*]] = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> [[V0:%.*]], <8 x i32> [[V1:%.*]], <8 x i32> [[V2:%.*]], i32 254) ; CHECK-NEXT: ret <8 x i32> [[R]] ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> %v0, <8 x i32> %v1, <8 x i32> %v2, i32 254) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_v512_imm255(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2) { ; CHECK-LABEL: @vpternlog_q_v512_imm255( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> %v0, <8 x i64> %v1, <8 x i64> %v2, i32 255) ret <8 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm0() { ; CHECK-LABEL: @vpternlog_d_constv512_imm0( ; CHECK-NEXT: ret <16 x i32> zeroinitializer ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 0) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm1() { ; CHECK-LABEL: @vpternlog_q_constv128_imm1( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 1) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm2() { ; CHECK-LABEL: @vpternlog_d_constv256_imm2( ; CHECK-NEXT: ret <8 x i32> zeroinitializer ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 2) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm3() { ; CHECK-LABEL: @vpternlog_q_constv512_imm3( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 3) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm4() { ; CHECK-LABEL: @vpternlog_d_constv128_imm4( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 4) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm5() { ; CHECK-LABEL: @vpternlog_q_constv256_imm5( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 5) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm6() { ; CHECK-LABEL: @vpternlog_d_constv512_imm6( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 6) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm7() { ; CHECK-LABEL: @vpternlog_q_constv128_imm7( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 7) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm8() { ; CHECK-LABEL: @vpternlog_d_constv256_imm8( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 8) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm9() { ; CHECK-LABEL: @vpternlog_q_constv512_imm9( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 9) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm10() { ; CHECK-LABEL: @vpternlog_d_constv128_imm10( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 10) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm11() { ; CHECK-LABEL: @vpternlog_q_constv256_imm11( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 11) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm12() { ; CHECK-LABEL: @vpternlog_d_constv512_imm12( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 12) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm13() { ; CHECK-LABEL: @vpternlog_q_constv128_imm13( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 13) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm14() { ; CHECK-LABEL: @vpternlog_d_constv256_imm14( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 14) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm15() { ; CHECK-LABEL: @vpternlog_q_constv512_imm15( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 15) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm16() { ; CHECK-LABEL: @vpternlog_d_constv128_imm16( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 16) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm17() { ; CHECK-LABEL: @vpternlog_q_constv256_imm17( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 17) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm18() { ; CHECK-LABEL: @vpternlog_d_constv512_imm18( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 18) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm19() { ; CHECK-LABEL: @vpternlog_q_constv128_imm19( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 19) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm20() { ; CHECK-LABEL: @vpternlog_d_constv256_imm20( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 20) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm21() { ; CHECK-LABEL: @vpternlog_q_constv512_imm21( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 21) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm22() { ; CHECK-LABEL: @vpternlog_d_constv128_imm22( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 22) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm23() { ; CHECK-LABEL: @vpternlog_q_constv256_imm23( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 23) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm24() { ; CHECK-LABEL: @vpternlog_d_constv512_imm24( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 24) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm25() { ; CHECK-LABEL: @vpternlog_q_constv128_imm25( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 25) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm26() { ; CHECK-LABEL: @vpternlog_d_constv256_imm26( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 26) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm27() { ; CHECK-LABEL: @vpternlog_q_constv512_imm27( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 27) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm28() { ; CHECK-LABEL: @vpternlog_d_constv128_imm28( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 28) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm29() { ; CHECK-LABEL: @vpternlog_q_constv256_imm29( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 29) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm30() { ; CHECK-LABEL: @vpternlog_d_constv512_imm30( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 30) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm31() { ; CHECK-LABEL: @vpternlog_q_constv128_imm31( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 31) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm32() { ; CHECK-LABEL: @vpternlog_d_constv256_imm32( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 32) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm33() { ; CHECK-LABEL: @vpternlog_q_constv512_imm33( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 33) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm34() { ; CHECK-LABEL: @vpternlog_d_constv128_imm34( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 34) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm35() { ; CHECK-LABEL: @vpternlog_q_constv256_imm35( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 35) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm36() { ; CHECK-LABEL: @vpternlog_d_constv512_imm36( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 36) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm37() { ; CHECK-LABEL: @vpternlog_q_constv128_imm37( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 37) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm38() { ; CHECK-LABEL: @vpternlog_d_constv256_imm38( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 38) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm39() { ; CHECK-LABEL: @vpternlog_q_constv512_imm39( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 39) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm40() { ; CHECK-LABEL: @vpternlog_d_constv128_imm40( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 40) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm41() { ; CHECK-LABEL: @vpternlog_q_constv256_imm41( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 41) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm42() { ; CHECK-LABEL: @vpternlog_d_constv512_imm42( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 42) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm43() { ; CHECK-LABEL: @vpternlog_q_constv128_imm43( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 43) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm44() { ; CHECK-LABEL: @vpternlog_d_constv256_imm44( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 44) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm45() { ; CHECK-LABEL: @vpternlog_q_constv512_imm45( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 45) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm46() { ; CHECK-LABEL: @vpternlog_d_constv128_imm46( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 46) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm47() { ; CHECK-LABEL: @vpternlog_q_constv256_imm47( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 47) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm48() { ; CHECK-LABEL: @vpternlog_d_constv512_imm48( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 48) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm49() { ; CHECK-LABEL: @vpternlog_q_constv128_imm49( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 49) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm50() { ; CHECK-LABEL: @vpternlog_d_constv256_imm50( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 50) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm51() { ; CHECK-LABEL: @vpternlog_q_constv512_imm51( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 51) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm52() { ; CHECK-LABEL: @vpternlog_d_constv128_imm52( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 52) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm53() { ; CHECK-LABEL: @vpternlog_q_constv256_imm53( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 53) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm54() { ; CHECK-LABEL: @vpternlog_d_constv512_imm54( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 54) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm55() { ; CHECK-LABEL: @vpternlog_q_constv128_imm55( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 55) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm56() { ; CHECK-LABEL: @vpternlog_d_constv256_imm56( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 56) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm57() { ; CHECK-LABEL: @vpternlog_q_constv512_imm57( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 57) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm58() { ; CHECK-LABEL: @vpternlog_d_constv128_imm58( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 58) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm59() { ; CHECK-LABEL: @vpternlog_q_constv256_imm59( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 59) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm60() { ; CHECK-LABEL: @vpternlog_d_constv512_imm60( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 60) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm61() { ; CHECK-LABEL: @vpternlog_q_constv128_imm61( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 61) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm62() { ; CHECK-LABEL: @vpternlog_d_constv256_imm62( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 62) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm63() { ; CHECK-LABEL: @vpternlog_q_constv512_imm63( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 63) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm64() { ; CHECK-LABEL: @vpternlog_d_constv128_imm64( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 64) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm65() { ; CHECK-LABEL: @vpternlog_q_constv256_imm65( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 65) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm66() { ; CHECK-LABEL: @vpternlog_d_constv512_imm66( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 66) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm67() { ; CHECK-LABEL: @vpternlog_q_constv128_imm67( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 67) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm68() { ; CHECK-LABEL: @vpternlog_d_constv256_imm68( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 68) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm69() { ; CHECK-LABEL: @vpternlog_q_constv512_imm69( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 69) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm70() { ; CHECK-LABEL: @vpternlog_d_constv128_imm70( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 70) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm71() { ; CHECK-LABEL: @vpternlog_q_constv256_imm71( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 71) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm72() { ; CHECK-LABEL: @vpternlog_d_constv512_imm72( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 72) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm73() { ; CHECK-LABEL: @vpternlog_q_constv128_imm73( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 73) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm74() { ; CHECK-LABEL: @vpternlog_d_constv256_imm74( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 74) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm75() { ; CHECK-LABEL: @vpternlog_q_constv512_imm75( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 75) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm76() { ; CHECK-LABEL: @vpternlog_d_constv128_imm76( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 76) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm77() { ; CHECK-LABEL: @vpternlog_q_constv256_imm77( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 77) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm78() { ; CHECK-LABEL: @vpternlog_d_constv512_imm78( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 78) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm79() { ; CHECK-LABEL: @vpternlog_q_constv128_imm79( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 79) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm80() { ; CHECK-LABEL: @vpternlog_d_constv256_imm80( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 80) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm81() { ; CHECK-LABEL: @vpternlog_q_constv512_imm81( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 81) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm82() { ; CHECK-LABEL: @vpternlog_d_constv128_imm82( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 82) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm83() { ; CHECK-LABEL: @vpternlog_q_constv256_imm83( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 83) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm84() { ; CHECK-LABEL: @vpternlog_d_constv512_imm84( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 84) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm85() { ; CHECK-LABEL: @vpternlog_q_constv128_imm85( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 85) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm86() { ; CHECK-LABEL: @vpternlog_d_constv256_imm86( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 86) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm87() { ; CHECK-LABEL: @vpternlog_q_constv512_imm87( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 87) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm88() { ; CHECK-LABEL: @vpternlog_d_constv128_imm88( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 88) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm89() { ; CHECK-LABEL: @vpternlog_q_constv256_imm89( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 89) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm90() { ; CHECK-LABEL: @vpternlog_d_constv512_imm90( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 90) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm91() { ; CHECK-LABEL: @vpternlog_q_constv128_imm91( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 91) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm92() { ; CHECK-LABEL: @vpternlog_d_constv256_imm92( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 92) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm93() { ; CHECK-LABEL: @vpternlog_q_constv512_imm93( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 93) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm94() { ; CHECK-LABEL: @vpternlog_d_constv128_imm94( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 94) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm95() { ; CHECK-LABEL: @vpternlog_q_constv256_imm95( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 95) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm96() { ; CHECK-LABEL: @vpternlog_d_constv512_imm96( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 96) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm97() { ; CHECK-LABEL: @vpternlog_q_constv128_imm97( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 97) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm98() { ; CHECK-LABEL: @vpternlog_d_constv256_imm98( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 98) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm99() { ; CHECK-LABEL: @vpternlog_q_constv512_imm99( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 99) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm100() { ; CHECK-LABEL: @vpternlog_d_constv128_imm100( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 100) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm101() { ; CHECK-LABEL: @vpternlog_q_constv256_imm101( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 101) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm102() { ; CHECK-LABEL: @vpternlog_d_constv512_imm102( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 102) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm103() { ; CHECK-LABEL: @vpternlog_q_constv128_imm103( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 103) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm104() { ; CHECK-LABEL: @vpternlog_d_constv256_imm104( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 104) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm105() { ; CHECK-LABEL: @vpternlog_q_constv512_imm105( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 105) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm106() { ; CHECK-LABEL: @vpternlog_d_constv128_imm106( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 106) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm107() { ; CHECK-LABEL: @vpternlog_q_constv256_imm107( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 107) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm108() { ; CHECK-LABEL: @vpternlog_d_constv512_imm108( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 108) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm109() { ; CHECK-LABEL: @vpternlog_q_constv128_imm109( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 109) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm110() { ; CHECK-LABEL: @vpternlog_d_constv256_imm110( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 110) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm111() { ; CHECK-LABEL: @vpternlog_q_constv512_imm111( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 111) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm112() { ; CHECK-LABEL: @vpternlog_d_constv128_imm112( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 112) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm113() { ; CHECK-LABEL: @vpternlog_q_constv256_imm113( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 113) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm114() { ; CHECK-LABEL: @vpternlog_d_constv512_imm114( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 114) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm115() { ; CHECK-LABEL: @vpternlog_q_constv128_imm115( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 115) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm116() { ; CHECK-LABEL: @vpternlog_d_constv256_imm116( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 116) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm117() { ; CHECK-LABEL: @vpternlog_q_constv512_imm117( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 117) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm118() { ; CHECK-LABEL: @vpternlog_d_constv128_imm118( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 118) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm119() { ; CHECK-LABEL: @vpternlog_q_constv256_imm119( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 119) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm120() { ; CHECK-LABEL: @vpternlog_d_constv512_imm120( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 120) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm121() { ; CHECK-LABEL: @vpternlog_q_constv128_imm121( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 121) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm122() { ; CHECK-LABEL: @vpternlog_d_constv256_imm122( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 122) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm123() { ; CHECK-LABEL: @vpternlog_q_constv512_imm123( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 123) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm124() { ; CHECK-LABEL: @vpternlog_d_constv128_imm124( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 124) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm125() { ; CHECK-LABEL: @vpternlog_q_constv256_imm125( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 125) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm126() { ; CHECK-LABEL: @vpternlog_d_constv512_imm126( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 126) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm127() { ; CHECK-LABEL: @vpternlog_q_constv128_imm127( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 127) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm128() { ; CHECK-LABEL: @vpternlog_d_constv256_imm128( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 128) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm129() { ; CHECK-LABEL: @vpternlog_q_constv512_imm129( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 129) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm130() { ; CHECK-LABEL: @vpternlog_d_constv128_imm130( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 130) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm131() { ; CHECK-LABEL: @vpternlog_q_constv256_imm131( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 131) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm132() { ; CHECK-LABEL: @vpternlog_d_constv512_imm132( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 132) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm133() { ; CHECK-LABEL: @vpternlog_q_constv128_imm133( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 133) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm134() { ; CHECK-LABEL: @vpternlog_d_constv256_imm134( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 134) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm135() { ; CHECK-LABEL: @vpternlog_q_constv512_imm135( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 135) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm136() { ; CHECK-LABEL: @vpternlog_d_constv128_imm136( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 136) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm137() { ; CHECK-LABEL: @vpternlog_q_constv256_imm137( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 137) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm138() { ; CHECK-LABEL: @vpternlog_d_constv512_imm138( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 138) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm139() { ; CHECK-LABEL: @vpternlog_q_constv128_imm139( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 139) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm140() { ; CHECK-LABEL: @vpternlog_d_constv256_imm140( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 140) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm141() { ; CHECK-LABEL: @vpternlog_q_constv512_imm141( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 141) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm142() { ; CHECK-LABEL: @vpternlog_d_constv128_imm142( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 142) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm143() { ; CHECK-LABEL: @vpternlog_q_constv256_imm143( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 143) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm144() { ; CHECK-LABEL: @vpternlog_d_constv512_imm144( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 144) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm145() { ; CHECK-LABEL: @vpternlog_q_constv128_imm145( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 145) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm146() { ; CHECK-LABEL: @vpternlog_d_constv256_imm146( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 146) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm147() { ; CHECK-LABEL: @vpternlog_q_constv512_imm147( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 147) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm148() { ; CHECK-LABEL: @vpternlog_d_constv128_imm148( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 148) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm149() { ; CHECK-LABEL: @vpternlog_q_constv256_imm149( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 149) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm150() { ; CHECK-LABEL: @vpternlog_d_constv512_imm150( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 150) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm151() { ; CHECK-LABEL: @vpternlog_q_constv128_imm151( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 151) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm152() { ; CHECK-LABEL: @vpternlog_d_constv256_imm152( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 152) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm153() { ; CHECK-LABEL: @vpternlog_q_constv512_imm153( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 153) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm154() { ; CHECK-LABEL: @vpternlog_d_constv128_imm154( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 154) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm155() { ; CHECK-LABEL: @vpternlog_q_constv256_imm155( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 155) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm156() { ; CHECK-LABEL: @vpternlog_d_constv512_imm156( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 156) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm157() { ; CHECK-LABEL: @vpternlog_q_constv128_imm157( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 157) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm158() { ; CHECK-LABEL: @vpternlog_d_constv256_imm158( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 158) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm159() { ; CHECK-LABEL: @vpternlog_q_constv512_imm159( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 159) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm160() { ; CHECK-LABEL: @vpternlog_d_constv128_imm160( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 160) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm161() { ; CHECK-LABEL: @vpternlog_q_constv256_imm161( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 161) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm162() { ; CHECK-LABEL: @vpternlog_d_constv512_imm162( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 162) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm163() { ; CHECK-LABEL: @vpternlog_q_constv128_imm163( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 163) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm164() { ; CHECK-LABEL: @vpternlog_d_constv256_imm164( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 164) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm165() { ; CHECK-LABEL: @vpternlog_q_constv512_imm165( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 165) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm166() { ; CHECK-LABEL: @vpternlog_d_constv128_imm166( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 166) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm167() { ; CHECK-LABEL: @vpternlog_q_constv256_imm167( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 167) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm168() { ; CHECK-LABEL: @vpternlog_d_constv512_imm168( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 168) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm169() { ; CHECK-LABEL: @vpternlog_q_constv128_imm169( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 169) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm170() { ; CHECK-LABEL: @vpternlog_d_constv256_imm170( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 170) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm171() { ; CHECK-LABEL: @vpternlog_q_constv512_imm171( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 171) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm172() { ; CHECK-LABEL: @vpternlog_d_constv128_imm172( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 172) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm173() { ; CHECK-LABEL: @vpternlog_q_constv256_imm173( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 173) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm174() { ; CHECK-LABEL: @vpternlog_d_constv512_imm174( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 174) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm175() { ; CHECK-LABEL: @vpternlog_q_constv128_imm175( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 175) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm176() { ; CHECK-LABEL: @vpternlog_d_constv256_imm176( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 176) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm177() { ; CHECK-LABEL: @vpternlog_q_constv512_imm177( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 177) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm178() { ; CHECK-LABEL: @vpternlog_d_constv128_imm178( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 178) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm179() { ; CHECK-LABEL: @vpternlog_q_constv256_imm179( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 179) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm180() { ; CHECK-LABEL: @vpternlog_d_constv512_imm180( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 180) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm181() { ; CHECK-LABEL: @vpternlog_q_constv128_imm181( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 181) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm182() { ; CHECK-LABEL: @vpternlog_d_constv256_imm182( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 182) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm183() { ; CHECK-LABEL: @vpternlog_q_constv512_imm183( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 183) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm184() { ; CHECK-LABEL: @vpternlog_d_constv128_imm184( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 184) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm185() { ; CHECK-LABEL: @vpternlog_q_constv256_imm185( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 185) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm186() { ; CHECK-LABEL: @vpternlog_d_constv512_imm186( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 186) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm187() { ; CHECK-LABEL: @vpternlog_q_constv128_imm187( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 187) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm188() { ; CHECK-LABEL: @vpternlog_d_constv256_imm188( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 188) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm189() { ; CHECK-LABEL: @vpternlog_q_constv512_imm189( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 189) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm190() { ; CHECK-LABEL: @vpternlog_d_constv128_imm190( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 190) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm191() { ; CHECK-LABEL: @vpternlog_q_constv256_imm191( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 191) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm192() { ; CHECK-LABEL: @vpternlog_d_constv512_imm192( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 192) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm193() { ; CHECK-LABEL: @vpternlog_q_constv128_imm193( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 193) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm194() { ; CHECK-LABEL: @vpternlog_d_constv256_imm194( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 194) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm195() { ; CHECK-LABEL: @vpternlog_q_constv512_imm195( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 195) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm196() { ; CHECK-LABEL: @vpternlog_d_constv128_imm196( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 196) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm197() { ; CHECK-LABEL: @vpternlog_q_constv256_imm197( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 197) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm198() { ; CHECK-LABEL: @vpternlog_d_constv512_imm198( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 198) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm199() { ; CHECK-LABEL: @vpternlog_q_constv128_imm199( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 199) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm200() { ; CHECK-LABEL: @vpternlog_d_constv256_imm200( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 200) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm201() { ; CHECK-LABEL: @vpternlog_q_constv512_imm201( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 201) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm202() { ; CHECK-LABEL: @vpternlog_d_constv128_imm202( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 202) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm203() { ; CHECK-LABEL: @vpternlog_q_constv256_imm203( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 203) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm204() { ; CHECK-LABEL: @vpternlog_d_constv512_imm204( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 204) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm205() { ; CHECK-LABEL: @vpternlog_q_constv128_imm205( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 205) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm206() { ; CHECK-LABEL: @vpternlog_d_constv256_imm206( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 206) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm207() { ; CHECK-LABEL: @vpternlog_q_constv512_imm207( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 207) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm208() { ; CHECK-LABEL: @vpternlog_d_constv128_imm208( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 208) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm209() { ; CHECK-LABEL: @vpternlog_q_constv256_imm209( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 209) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm210() { ; CHECK-LABEL: @vpternlog_d_constv512_imm210( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 210) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm211() { ; CHECK-LABEL: @vpternlog_q_constv128_imm211( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 211) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm212() { ; CHECK-LABEL: @vpternlog_d_constv256_imm212( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 212) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm213() { ; CHECK-LABEL: @vpternlog_q_constv512_imm213( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 213) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm214() { ; CHECK-LABEL: @vpternlog_d_constv128_imm214( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 214) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm215() { ; CHECK-LABEL: @vpternlog_q_constv256_imm215( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 215) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm216() { ; CHECK-LABEL: @vpternlog_d_constv512_imm216( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 216) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm217() { ; CHECK-LABEL: @vpternlog_q_constv128_imm217( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 217) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm218() { ; CHECK-LABEL: @vpternlog_d_constv256_imm218( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 218) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm219() { ; CHECK-LABEL: @vpternlog_q_constv512_imm219( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 219) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm220() { ; CHECK-LABEL: @vpternlog_d_constv128_imm220( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 220) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm221() { ; CHECK-LABEL: @vpternlog_q_constv256_imm221( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 221) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm222() { ; CHECK-LABEL: @vpternlog_d_constv512_imm222( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 222) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm223() { ; CHECK-LABEL: @vpternlog_q_constv128_imm223( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 223) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm224() { ; CHECK-LABEL: @vpternlog_d_constv256_imm224( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 224) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm225() { ; CHECK-LABEL: @vpternlog_q_constv512_imm225( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 225) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm226() { ; CHECK-LABEL: @vpternlog_d_constv128_imm226( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 226) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm227() { ; CHECK-LABEL: @vpternlog_q_constv256_imm227( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 227) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm228() { ; CHECK-LABEL: @vpternlog_d_constv512_imm228( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 228) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm229() { ; CHECK-LABEL: @vpternlog_q_constv128_imm229( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 229) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm230() { ; CHECK-LABEL: @vpternlog_d_constv256_imm230( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 230) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm231() { ; CHECK-LABEL: @vpternlog_q_constv512_imm231( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 231) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm232() { ; CHECK-LABEL: @vpternlog_d_constv128_imm232( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 232) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm233() { ; CHECK-LABEL: @vpternlog_q_constv256_imm233( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 233) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm234() { ; CHECK-LABEL: @vpternlog_d_constv512_imm234( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 234) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm235() { ; CHECK-LABEL: @vpternlog_q_constv128_imm235( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 235) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm236() { ; CHECK-LABEL: @vpternlog_d_constv256_imm236( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 236) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm237() { ; CHECK-LABEL: @vpternlog_q_constv512_imm237( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 237) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm238() { ; CHECK-LABEL: @vpternlog_d_constv128_imm238( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 238) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm239() { ; CHECK-LABEL: @vpternlog_q_constv256_imm239( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 239) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm240() { ; CHECK-LABEL: @vpternlog_d_constv512_imm240( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 240) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm241() { ; CHECK-LABEL: @vpternlog_q_constv128_imm241( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 241) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm242() { ; CHECK-LABEL: @vpternlog_d_constv256_imm242( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 242) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm243() { ; CHECK-LABEL: @vpternlog_q_constv512_imm243( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 243) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm244() { ; CHECK-LABEL: @vpternlog_d_constv128_imm244( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 244) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm245() { ; CHECK-LABEL: @vpternlog_q_constv256_imm245( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 245) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm246() { ; CHECK-LABEL: @vpternlog_d_constv512_imm246( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 246) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm247() { ; CHECK-LABEL: @vpternlog_q_constv128_imm247( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 247) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm248() { ; CHECK-LABEL: @vpternlog_d_constv256_imm248( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 248) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm249() { ; CHECK-LABEL: @vpternlog_q_constv512_imm249( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 249) ret <8 x i64> %r } define <4 x i32> @vpternlog_d_constv128_imm250() { ; CHECK-LABEL: @vpternlog_d_constv128_imm250( ; CHECK-NEXT: ret <4 x i32> ; %r = tail call <4 x i32> @llvm.x86.avx512.pternlog.d.128(<4 x i32> , <4 x i32> , <4 x i32> , i32 250) ret <4 x i32> %r } define <4 x i64> @vpternlog_q_constv256_imm251() { ; CHECK-LABEL: @vpternlog_q_constv256_imm251( ; CHECK-NEXT: ret <4 x i64> ; %r = tail call <4 x i64> @llvm.x86.avx512.pternlog.q.256(<4 x i64> , <4 x i64> , <4 x i64> , i32 251) ret <4 x i64> %r } define <16 x i32> @vpternlog_d_constv512_imm252() { ; CHECK-LABEL: @vpternlog_d_constv512_imm252( ; CHECK-NEXT: ret <16 x i32> ; %r = tail call <16 x i32> @llvm.x86.avx512.pternlog.d.512(<16 x i32> , <16 x i32> , <16 x i32> , i32 252) ret <16 x i32> %r } define <2 x i64> @vpternlog_q_constv128_imm253() { ; CHECK-LABEL: @vpternlog_q_constv128_imm253( ; CHECK-NEXT: ret <2 x i64> ; %r = tail call <2 x i64> @llvm.x86.avx512.pternlog.q.128(<2 x i64> , <2 x i64> , <2 x i64> , i32 253) ret <2 x i64> %r } define <8 x i32> @vpternlog_d_constv256_imm254() { ; CHECK-LABEL: @vpternlog_d_constv256_imm254( ; CHECK-NEXT: ret <8 x i32> ; %r = tail call <8 x i32> @llvm.x86.avx512.pternlog.d.256(<8 x i32> , <8 x i32> , <8 x i32> , i32 254) ret <8 x i32> %r } define <8 x i64> @vpternlog_q_constv512_imm255() { ; CHECK-LABEL: @vpternlog_q_constv512_imm255( ; CHECK-NEXT: ret <8 x i64> ; %r = tail call <8 x i64> @llvm.x86.avx512.pternlog.q.512(<8 x i64> , <8 x i64> , <8 x i64> , i32 255) ret <8 x i64> %r }