39893 lines
1.5 MiB
39893 lines
1.5 MiB
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
|
|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown | FileCheck %s --check-prefixes=SSE,SSE2
|
|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+sse3 | FileCheck %s --check-prefixes=SSE,SSE3
|
|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+ssse3 | FileCheck %s --check-prefixes=SSE,SSSE3
|
|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+sse4.1 | FileCheck %s --check-prefixes=SSE,SSE41
|
|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx | FileCheck %s --check-prefixes=AVX,AVX1
|
|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx2 | FileCheck %s --check-prefixes=AVX,AVX2
|
|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512vpopcntdq | FileCheck %s --check-prefixes=AVX,AVX512VPOPCNTDQ
|
|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512vpopcntdq,+avx512vl | FileCheck %s --check-prefixes=AVX,AVX512VPOPCNTDQVL
|
|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bitalg | FileCheck %s --check-prefix=BITALG_NOVLX
|
|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bitalg,+avx512vl | FileCheck %s --check-prefix=BITALG
|
|
|
|
|
|
define <16 x i8> @ugt_1_v16i8(<16 x i8> %0) {
|
|
; SSE-LABEL: ugt_1_v16i8:
|
|
; SSE: # %bb.0:
|
|
; SSE-NEXT: pcmpeqd %xmm2, %xmm2
|
|
; SSE-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE-NEXT: paddb %xmm2, %xmm1
|
|
; SSE-NEXT: pand %xmm0, %xmm1
|
|
; SSE-NEXT: pxor %xmm0, %xmm0
|
|
; SSE-NEXT: pcmpeqb %xmm0, %xmm1
|
|
; SSE-NEXT: pxor %xmm2, %xmm1
|
|
; SSE-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_1_v16i8:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vpand %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm2, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpcmpeqb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_1_v16i8:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vpand %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm2, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpcmpeqb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_1_v16i8:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; AVX512VPOPCNTDQ-NEXT: vpaddb %xmm1, %xmm0, %xmm1
|
|
; AVX512VPOPCNTDQ-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpternlogq $15, %zmm0, %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_1_v16i8:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpaddb %xmm1, %xmm0, %xmm1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpternlogq $15, %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_1_v16i8:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtb {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_1_v16i8:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleub {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2b %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0)
|
|
%3 = icmp ugt <16 x i8> %2, <i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1>
|
|
%4 = sext <16 x i1> %3 to <16 x i8>
|
|
ret <16 x i8> %4
|
|
}
|
|
|
|
define <16 x i8> @ult_2_v16i8(<16 x i8> %0) {
|
|
; SSE-LABEL: ult_2_v16i8:
|
|
; SSE: # %bb.0:
|
|
; SSE-NEXT: pcmpeqd %xmm1, %xmm1
|
|
; SSE-NEXT: paddb %xmm0, %xmm1
|
|
; SSE-NEXT: pand %xmm1, %xmm0
|
|
; SSE-NEXT: pxor %xmm1, %xmm1
|
|
; SSE-NEXT: pcmpeqb %xmm1, %xmm0
|
|
; SSE-NEXT: retq
|
|
;
|
|
; AVX-LABEL: ult_2_v16i8:
|
|
; AVX: # %bb.0:
|
|
; AVX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; AVX-NEXT: vpaddb %xmm1, %xmm0, %xmm1
|
|
; AVX-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_2_v16i8:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_2_v16i8:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltub {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2b %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0)
|
|
%3 = icmp ult <16 x i8> %2, <i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2>
|
|
%4 = sext <16 x i1> %3 to <16 x i8>
|
|
ret <16 x i8> %4
|
|
}
|
|
|
|
define <16 x i8> @ugt_2_v16i8(<16 x i8> %0) {
|
|
; SSE2-LABEL: ugt_2_v16i8:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pcmpgtb {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_2_v16i8:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pcmpgtb {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_2_v16i8:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3]
|
|
; SSSE3-NEXT: pmaxub %xmm3, %xmm0
|
|
; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_2_v16i8:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3]
|
|
; SSE41-NEXT: pmaxub %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_2_v16i8:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpmaxub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_2_v16i8:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpmaxub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_2_v16i8:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtb {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_2_v16i8:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_2_v16i8:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtb {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_2_v16i8:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleub {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2b %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0)
|
|
%3 = icmp ugt <16 x i8> %2, <i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2>
|
|
%4 = sext <16 x i1> %3 to <16 x i8>
|
|
ret <16 x i8> %4
|
|
}
|
|
|
|
define <16 x i8> @ult_3_v16i8(<16 x i8> %0) {
|
|
; SSE2-LABEL: ult_3_v16i8:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3]
|
|
; SSE2-NEXT: pcmpgtb %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_3_v16i8:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3]
|
|
; SSE3-NEXT: pcmpgtb %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_3_v16i8:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2]
|
|
; SSSE3-NEXT: pminub %xmm3, %xmm0
|
|
; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_3_v16i8:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2]
|
|
; SSE41-NEXT: pminub %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_3_v16i8:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpminub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_3_v16i8:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpminub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_3_v16i8:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_3_v16i8:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3]
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_3_v16i8:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_3_v16i8:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltub {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2b %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0)
|
|
%3 = icmp ult <16 x i8> %2, <i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3>
|
|
%4 = sext <16 x i1> %3 to <16 x i8>
|
|
ret <16 x i8> %4
|
|
}
|
|
|
|
define <16 x i8> @ugt_3_v16i8(<16 x i8> %0) {
|
|
; SSE2-LABEL: ugt_3_v16i8:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pcmpgtb {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_3_v16i8:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pcmpgtb {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_3_v16i8:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
|
|
; SSSE3-NEXT: pmaxub %xmm3, %xmm0
|
|
; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_3_v16i8:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
|
|
; SSE41-NEXT: pmaxub %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_3_v16i8:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpmaxub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_3_v16i8:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpmaxub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_3_v16i8:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtb {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_3_v16i8:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_3_v16i8:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtb {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_3_v16i8:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleub {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2b %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0)
|
|
%3 = icmp ugt <16 x i8> %2, <i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3>
|
|
%4 = sext <16 x i1> %3 to <16 x i8>
|
|
ret <16 x i8> %4
|
|
}
|
|
|
|
define <16 x i8> @ult_4_v16i8(<16 x i8> %0) {
|
|
; SSE2-LABEL: ult_4_v16i8:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
|
|
; SSE2-NEXT: pcmpgtb %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_4_v16i8:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
|
|
; SSE3-NEXT: pcmpgtb %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_4_v16i8:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3]
|
|
; SSSE3-NEXT: pminub %xmm3, %xmm0
|
|
; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_4_v16i8:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3]
|
|
; SSE41-NEXT: pminub %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_4_v16i8:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpminub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_4_v16i8:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpminub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_4_v16i8:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_4_v16i8:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_4_v16i8:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_4_v16i8:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltub {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2b %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0)
|
|
%3 = icmp ult <16 x i8> %2, <i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4>
|
|
%4 = sext <16 x i1> %3 to <16 x i8>
|
|
ret <16 x i8> %4
|
|
}
|
|
|
|
define <16 x i8> @ugt_4_v16i8(<16 x i8> %0) {
|
|
; SSE2-LABEL: ugt_4_v16i8:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pcmpgtb {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_4_v16i8:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pcmpgtb {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_4_v16i8:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5]
|
|
; SSSE3-NEXT: pmaxub %xmm3, %xmm0
|
|
; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_4_v16i8:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5]
|
|
; SSE41-NEXT: pmaxub %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_4_v16i8:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpmaxub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_4_v16i8:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpmaxub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_4_v16i8:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtb {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_4_v16i8:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_4_v16i8:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtb {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_4_v16i8:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleub {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2b %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0)
|
|
%3 = icmp ugt <16 x i8> %2, <i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4>
|
|
%4 = sext <16 x i1> %3 to <16 x i8>
|
|
ret <16 x i8> %4
|
|
}
|
|
|
|
define <16 x i8> @ult_5_v16i8(<16 x i8> %0) {
|
|
; SSE2-LABEL: ult_5_v16i8:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5]
|
|
; SSE2-NEXT: pcmpgtb %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_5_v16i8:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5]
|
|
; SSE3-NEXT: pcmpgtb %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_5_v16i8:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
|
|
; SSSE3-NEXT: pminub %xmm3, %xmm0
|
|
; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_5_v16i8:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
|
|
; SSE41-NEXT: pminub %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_5_v16i8:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpminub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_5_v16i8:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpminub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_5_v16i8:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_5_v16i8:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5]
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_5_v16i8:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_5_v16i8:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltub {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2b %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0)
|
|
%3 = icmp ult <16 x i8> %2, <i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5>
|
|
%4 = sext <16 x i1> %3 to <16 x i8>
|
|
ret <16 x i8> %4
|
|
}
|
|
|
|
define <16 x i8> @ugt_5_v16i8(<16 x i8> %0) {
|
|
; SSE2-LABEL: ugt_5_v16i8:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pcmpgtb {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_5_v16i8:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pcmpgtb {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_5_v16i8:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6]
|
|
; SSSE3-NEXT: pmaxub %xmm3, %xmm0
|
|
; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_5_v16i8:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6]
|
|
; SSE41-NEXT: pmaxub %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_5_v16i8:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpmaxub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_5_v16i8:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpmaxub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_5_v16i8:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtb {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_5_v16i8:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_5_v16i8:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtb {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_5_v16i8:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleub {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2b %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0)
|
|
%3 = icmp ugt <16 x i8> %2, <i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5>
|
|
%4 = sext <16 x i1> %3 to <16 x i8>
|
|
ret <16 x i8> %4
|
|
}
|
|
|
|
define <16 x i8> @ult_6_v16i8(<16 x i8> %0) {
|
|
; SSE2-LABEL: ult_6_v16i8:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6]
|
|
; SSE2-NEXT: pcmpgtb %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_6_v16i8:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6]
|
|
; SSE3-NEXT: pcmpgtb %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_6_v16i8:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5]
|
|
; SSSE3-NEXT: pminub %xmm3, %xmm0
|
|
; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_6_v16i8:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5]
|
|
; SSE41-NEXT: pminub %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_6_v16i8:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpminub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_6_v16i8:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpminub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_6_v16i8:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_6_v16i8:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6]
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_6_v16i8:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_6_v16i8:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltub {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2b %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0)
|
|
%3 = icmp ult <16 x i8> %2, <i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6>
|
|
%4 = sext <16 x i1> %3 to <16 x i8>
|
|
ret <16 x i8> %4
|
|
}
|
|
|
|
define <16 x i8> @ugt_6_v16i8(<16 x i8> %0) {
|
|
; SSE2-LABEL: ugt_6_v16i8:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pcmpgtb {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_6_v16i8:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pcmpgtb {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_6_v16i8:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7]
|
|
; SSSE3-NEXT: pmaxub %xmm3, %xmm0
|
|
; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_6_v16i8:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7]
|
|
; SSE41-NEXT: pmaxub %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_6_v16i8:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpmaxub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_6_v16i8:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpmaxub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_6_v16i8:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtb {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_6_v16i8:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_6_v16i8:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtb {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_6_v16i8:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleub {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2b %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0)
|
|
%3 = icmp ugt <16 x i8> %2, <i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6>
|
|
%4 = sext <16 x i1> %3 to <16 x i8>
|
|
ret <16 x i8> %4
|
|
}
|
|
|
|
define <16 x i8> @ult_7_v16i8(<16 x i8> %0) {
|
|
; SSE2-LABEL: ult_7_v16i8:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7]
|
|
; SSE2-NEXT: pcmpgtb %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_7_v16i8:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7]
|
|
; SSE3-NEXT: pcmpgtb %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_7_v16i8:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6]
|
|
; SSSE3-NEXT: pminub %xmm3, %xmm0
|
|
; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_7_v16i8:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6]
|
|
; SSE41-NEXT: pminub %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpeqb %xmm3, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_7_v16i8:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpminub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_7_v16i8:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpminub {{.*}}(%rip), %xmm0, %xmm1
|
|
; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_7_v16i8:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_7_v16i8:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdb %zmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} xmm1 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7]
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_7_v16i8:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_7_v16i8:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltub {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2b %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0)
|
|
%3 = icmp ult <16 x i8> %2, <i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7>
|
|
%4 = sext <16 x i1> %3 to <16 x i8>
|
|
ret <16 x i8> %4
|
|
}
|
|
|
|
define <8 x i16> @ugt_1_v8i16(<8 x i16> %0) {
|
|
; SSE-LABEL: ugt_1_v8i16:
|
|
; SSE: # %bb.0:
|
|
; SSE-NEXT: pcmpeqd %xmm2, %xmm2
|
|
; SSE-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE-NEXT: paddw %xmm2, %xmm1
|
|
; SSE-NEXT: pand %xmm0, %xmm1
|
|
; SSE-NEXT: pxor %xmm0, %xmm0
|
|
; SSE-NEXT: pcmpeqw %xmm0, %xmm1
|
|
; SSE-NEXT: pxor %xmm2, %xmm1
|
|
; SSE-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_1_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpaddw %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vpand %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm2, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpcmpeqw %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_1_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpaddw %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vpand %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm2, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpcmpeqw %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_1_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; AVX512VPOPCNTDQ-NEXT: vpaddw %xmm1, %xmm0, %xmm1
|
|
; AVX512VPOPCNTDQ-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpeqw %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpternlogq $15, %zmm0, %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_1_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpaddw %xmm1, %xmm0, %xmm1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqw %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpternlogq $15, %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_1_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_1_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ugt <8 x i16> %2, <i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ult_2_v8i16(<8 x i16> %0) {
|
|
; SSE-LABEL: ult_2_v8i16:
|
|
; SSE: # %bb.0:
|
|
; SSE-NEXT: pcmpeqd %xmm1, %xmm1
|
|
; SSE-NEXT: paddw %xmm0, %xmm1
|
|
; SSE-NEXT: pand %xmm1, %xmm0
|
|
; SSE-NEXT: pxor %xmm1, %xmm1
|
|
; SSE-NEXT: pcmpeqw %xmm1, %xmm0
|
|
; SSE-NEXT: retq
|
|
;
|
|
; AVX-LABEL: ult_2_v8i16:
|
|
; AVX: # %bb.0:
|
|
; AVX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; AVX-NEXT: vpaddw %xmm1, %xmm0, %xmm1
|
|
; AVX-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX-NEXT: vpcmpeqw %xmm1, %xmm0, %xmm0
|
|
; AVX-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_2_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [2,2,2,2,2,2,2,2]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_2_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ult <8 x i16> %2, <i16 2, i16 2, i16 2, i16 2, i16 2, i16 2, i16 2, i16 2>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ugt_2_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ugt_2_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: psllw $8, %xmm0
|
|
; SSE2-NEXT: paddb %xmm1, %xmm0
|
|
; SSE2-NEXT: psrlw $8, %xmm0
|
|
; SSE2-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_2_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: psllw $8, %xmm0
|
|
; SSE3-NEXT: paddb %xmm1, %xmm0
|
|
; SSE3-NEXT: psrlw $8, %xmm0
|
|
; SSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_2_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm0
|
|
; SSSE3-NEXT: psllw $8, %xmm0
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm0
|
|
; SSSE3-NEXT: psrlw $8, %xmm0
|
|
; SSSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_2_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm0
|
|
; SSE41-NEXT: psllw $8, %xmm0
|
|
; SSE41-NEXT: paddb %xmm3, %xmm0
|
|
; SSE41-NEXT: psrlw $8, %xmm0
|
|
; SSE41-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_2_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_2_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_2_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_2_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_2_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_2_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ugt <8 x i16> %2, <i16 2, i16 2, i16 2, i16 2, i16 2, i16 2, i16 2, i16 2>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ult_3_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ult_3_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: psllw $8, %xmm2
|
|
; SSE2-NEXT: paddb %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $8, %xmm2
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3,3,3,3,3]
|
|
; SSE2-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_3_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: psllw $8, %xmm2
|
|
; SSE3-NEXT: paddb %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $8, %xmm2
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3,3,3,3,3]
|
|
; SSE3-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_3_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: psllw $8, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm1
|
|
; SSSE3-NEXT: psrlw $8, %xmm1
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3,3,3,3,3]
|
|
; SSSE3-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_3_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm1
|
|
; SSE41-NEXT: psllw $8, %xmm1
|
|
; SSE41-NEXT: paddb %xmm3, %xmm1
|
|
; SSE41-NEXT: psrlw $8, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3,3,3,3,3]
|
|
; SSE41-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_3_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3,3,3,3,3,3,3]
|
|
; AVX1-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_3_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3,3,3,3,3,3,3]
|
|
; AVX2-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_3_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3,3,3,3,3,3,3]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_3_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3,3,3,3,3,3,3]
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_3_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3,3,3,3,3,3,3]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_3_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ult <8 x i16> %2, <i16 3, i16 3, i16 3, i16 3, i16 3, i16 3, i16 3, i16 3>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ugt_3_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ugt_3_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: psllw $8, %xmm0
|
|
; SSE2-NEXT: paddb %xmm1, %xmm0
|
|
; SSE2-NEXT: psrlw $8, %xmm0
|
|
; SSE2-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_3_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: psllw $8, %xmm0
|
|
; SSE3-NEXT: paddb %xmm1, %xmm0
|
|
; SSE3-NEXT: psrlw $8, %xmm0
|
|
; SSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_3_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm0
|
|
; SSSE3-NEXT: psllw $8, %xmm0
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm0
|
|
; SSSE3-NEXT: psrlw $8, %xmm0
|
|
; SSSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_3_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm0
|
|
; SSE41-NEXT: psllw $8, %xmm0
|
|
; SSE41-NEXT: paddb %xmm3, %xmm0
|
|
; SSE41-NEXT: psrlw $8, %xmm0
|
|
; SSE41-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_3_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_3_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_3_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_3_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_3_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_3_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ugt <8 x i16> %2, <i16 3, i16 3, i16 3, i16 3, i16 3, i16 3, i16 3, i16 3>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ult_4_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ult_4_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: psllw $8, %xmm2
|
|
; SSE2-NEXT: paddb %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $8, %xmm2
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4,4,4,4,4]
|
|
; SSE2-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_4_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: psllw $8, %xmm2
|
|
; SSE3-NEXT: paddb %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $8, %xmm2
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4,4,4,4,4]
|
|
; SSE3-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_4_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: psllw $8, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm1
|
|
; SSSE3-NEXT: psrlw $8, %xmm1
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4,4,4,4,4]
|
|
; SSSE3-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_4_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm1
|
|
; SSE41-NEXT: psllw $8, %xmm1
|
|
; SSE41-NEXT: paddb %xmm3, %xmm1
|
|
; SSE41-NEXT: psrlw $8, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4,4,4,4,4]
|
|
; SSE41-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_4_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4]
|
|
; AVX1-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_4_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4]
|
|
; AVX2-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_4_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_4_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4]
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_4_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_4_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ult <8 x i16> %2, <i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ugt_4_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ugt_4_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: psllw $8, %xmm0
|
|
; SSE2-NEXT: paddb %xmm1, %xmm0
|
|
; SSE2-NEXT: psrlw $8, %xmm0
|
|
; SSE2-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_4_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: psllw $8, %xmm0
|
|
; SSE3-NEXT: paddb %xmm1, %xmm0
|
|
; SSE3-NEXT: psrlw $8, %xmm0
|
|
; SSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_4_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm0
|
|
; SSSE3-NEXT: psllw $8, %xmm0
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm0
|
|
; SSSE3-NEXT: psrlw $8, %xmm0
|
|
; SSSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_4_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm0
|
|
; SSE41-NEXT: psllw $8, %xmm0
|
|
; SSE41-NEXT: paddb %xmm3, %xmm0
|
|
; SSE41-NEXT: psrlw $8, %xmm0
|
|
; SSE41-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_4_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_4_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_4_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_4_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_4_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_4_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ugt <8 x i16> %2, <i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ult_5_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ult_5_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: psllw $8, %xmm2
|
|
; SSE2-NEXT: paddb %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $8, %xmm2
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5,5,5,5,5]
|
|
; SSE2-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_5_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: psllw $8, %xmm2
|
|
; SSE3-NEXT: paddb %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $8, %xmm2
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5,5,5,5,5]
|
|
; SSE3-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_5_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: psllw $8, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm1
|
|
; SSSE3-NEXT: psrlw $8, %xmm1
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5,5,5,5,5]
|
|
; SSSE3-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_5_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm1
|
|
; SSE41-NEXT: psllw $8, %xmm1
|
|
; SSE41-NEXT: paddb %xmm3, %xmm1
|
|
; SSE41-NEXT: psrlw $8, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5,5,5,5,5]
|
|
; SSE41-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_5_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5,5,5,5,5,5,5]
|
|
; AVX1-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_5_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5,5,5,5,5,5,5]
|
|
; AVX2-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_5_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5,5,5,5,5,5,5]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_5_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5,5,5,5,5,5,5]
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_5_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5,5,5,5,5,5,5]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_5_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ult <8 x i16> %2, <i16 5, i16 5, i16 5, i16 5, i16 5, i16 5, i16 5, i16 5>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ugt_5_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ugt_5_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: psllw $8, %xmm0
|
|
; SSE2-NEXT: paddb %xmm1, %xmm0
|
|
; SSE2-NEXT: psrlw $8, %xmm0
|
|
; SSE2-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_5_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: psllw $8, %xmm0
|
|
; SSE3-NEXT: paddb %xmm1, %xmm0
|
|
; SSE3-NEXT: psrlw $8, %xmm0
|
|
; SSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_5_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm0
|
|
; SSSE3-NEXT: psllw $8, %xmm0
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm0
|
|
; SSSE3-NEXT: psrlw $8, %xmm0
|
|
; SSSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_5_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm0
|
|
; SSE41-NEXT: psllw $8, %xmm0
|
|
; SSE41-NEXT: paddb %xmm3, %xmm0
|
|
; SSE41-NEXT: psrlw $8, %xmm0
|
|
; SSE41-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_5_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_5_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_5_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_5_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_5_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_5_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ugt <8 x i16> %2, <i16 5, i16 5, i16 5, i16 5, i16 5, i16 5, i16 5, i16 5>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ult_6_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ult_6_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: psllw $8, %xmm2
|
|
; SSE2-NEXT: paddb %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $8, %xmm2
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6,6,6,6,6]
|
|
; SSE2-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_6_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: psllw $8, %xmm2
|
|
; SSE3-NEXT: paddb %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $8, %xmm2
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6,6,6,6,6]
|
|
; SSE3-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_6_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: psllw $8, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm1
|
|
; SSSE3-NEXT: psrlw $8, %xmm1
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6,6,6,6,6]
|
|
; SSSE3-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_6_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm1
|
|
; SSE41-NEXT: psllw $8, %xmm1
|
|
; SSE41-NEXT: paddb %xmm3, %xmm1
|
|
; SSE41-NEXT: psrlw $8, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6,6,6,6,6]
|
|
; SSE41-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_6_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6,6,6,6,6,6,6]
|
|
; AVX1-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_6_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6,6,6,6,6,6,6]
|
|
; AVX2-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_6_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6,6,6,6,6,6,6]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_6_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6,6,6,6,6,6,6]
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_6_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6,6,6,6,6,6,6]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_6_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ult <8 x i16> %2, <i16 6, i16 6, i16 6, i16 6, i16 6, i16 6, i16 6, i16 6>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ugt_6_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ugt_6_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: psllw $8, %xmm0
|
|
; SSE2-NEXT: paddb %xmm1, %xmm0
|
|
; SSE2-NEXT: psrlw $8, %xmm0
|
|
; SSE2-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_6_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: psllw $8, %xmm0
|
|
; SSE3-NEXT: paddb %xmm1, %xmm0
|
|
; SSE3-NEXT: psrlw $8, %xmm0
|
|
; SSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_6_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm0
|
|
; SSSE3-NEXT: psllw $8, %xmm0
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm0
|
|
; SSSE3-NEXT: psrlw $8, %xmm0
|
|
; SSSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_6_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm0
|
|
; SSE41-NEXT: psllw $8, %xmm0
|
|
; SSE41-NEXT: paddb %xmm3, %xmm0
|
|
; SSE41-NEXT: psrlw $8, %xmm0
|
|
; SSE41-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_6_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_6_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_6_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_6_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_6_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_6_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ugt <8 x i16> %2, <i16 6, i16 6, i16 6, i16 6, i16 6, i16 6, i16 6, i16 6>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ult_7_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ult_7_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: psllw $8, %xmm2
|
|
; SSE2-NEXT: paddb %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $8, %xmm2
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [7,7,7,7,7,7,7,7]
|
|
; SSE2-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_7_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: psllw $8, %xmm2
|
|
; SSE3-NEXT: paddb %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $8, %xmm2
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [7,7,7,7,7,7,7,7]
|
|
; SSE3-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_7_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: psllw $8, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm1
|
|
; SSSE3-NEXT: psrlw $8, %xmm1
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [7,7,7,7,7,7,7,7]
|
|
; SSSE3-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_7_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm1
|
|
; SSE41-NEXT: psllw $8, %xmm1
|
|
; SSE41-NEXT: paddb %xmm3, %xmm1
|
|
; SSE41-NEXT: psrlw $8, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [7,7,7,7,7,7,7,7]
|
|
; SSE41-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_7_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [7,7,7,7,7,7,7,7]
|
|
; AVX1-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_7_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [7,7,7,7,7,7,7,7]
|
|
; AVX2-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_7_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [7,7,7,7,7,7,7,7]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_7_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} xmm1 = [7,7,7,7,7,7,7,7]
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_7_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [7,7,7,7,7,7,7,7]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_7_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ult <8 x i16> %2, <i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ugt_7_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ugt_7_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: psllw $8, %xmm0
|
|
; SSE2-NEXT: paddb %xmm1, %xmm0
|
|
; SSE2-NEXT: psrlw $8, %xmm0
|
|
; SSE2-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_7_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: psllw $8, %xmm0
|
|
; SSE3-NEXT: paddb %xmm1, %xmm0
|
|
; SSE3-NEXT: psrlw $8, %xmm0
|
|
; SSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_7_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm0
|
|
; SSSE3-NEXT: psllw $8, %xmm0
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm0
|
|
; SSSE3-NEXT: psrlw $8, %xmm0
|
|
; SSSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_7_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm0
|
|
; SSE41-NEXT: psllw $8, %xmm0
|
|
; SSE41-NEXT: paddb %xmm3, %xmm0
|
|
; SSE41-NEXT: psrlw $8, %xmm0
|
|
; SSE41-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_7_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_7_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_7_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_7_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_7_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_7_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ugt <8 x i16> %2, <i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ult_8_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ult_8_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: psllw $8, %xmm2
|
|
; SSE2-NEXT: paddb %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $8, %xmm2
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [8,8,8,8,8,8,8,8]
|
|
; SSE2-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_8_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: psllw $8, %xmm2
|
|
; SSE3-NEXT: paddb %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $8, %xmm2
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [8,8,8,8,8,8,8,8]
|
|
; SSE3-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_8_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: psllw $8, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm1
|
|
; SSSE3-NEXT: psrlw $8, %xmm1
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [8,8,8,8,8,8,8,8]
|
|
; SSSE3-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_8_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm1
|
|
; SSE41-NEXT: psllw $8, %xmm1
|
|
; SSE41-NEXT: paddb %xmm3, %xmm1
|
|
; SSE41-NEXT: psrlw $8, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [8,8,8,8,8,8,8,8]
|
|
; SSE41-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_8_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [8,8,8,8,8,8,8,8]
|
|
; AVX1-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_8_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [8,8,8,8,8,8,8,8]
|
|
; AVX2-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_8_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [8,8,8,8,8,8,8,8]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_8_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} xmm1 = [8,8,8,8,8,8,8,8]
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_8_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [8,8,8,8,8,8,8,8]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_8_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ult <8 x i16> %2, <i16 8, i16 8, i16 8, i16 8, i16 8, i16 8, i16 8, i16 8>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ugt_8_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ugt_8_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: psllw $8, %xmm0
|
|
; SSE2-NEXT: paddb %xmm1, %xmm0
|
|
; SSE2-NEXT: psrlw $8, %xmm0
|
|
; SSE2-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_8_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: psllw $8, %xmm0
|
|
; SSE3-NEXT: paddb %xmm1, %xmm0
|
|
; SSE3-NEXT: psrlw $8, %xmm0
|
|
; SSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_8_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm0
|
|
; SSSE3-NEXT: psllw $8, %xmm0
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm0
|
|
; SSSE3-NEXT: psrlw $8, %xmm0
|
|
; SSSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_8_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm0
|
|
; SSE41-NEXT: psllw $8, %xmm0
|
|
; SSE41-NEXT: paddb %xmm3, %xmm0
|
|
; SSE41-NEXT: psrlw $8, %xmm0
|
|
; SSE41-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_8_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_8_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_8_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_8_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_8_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_8_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ugt <8 x i16> %2, <i16 8, i16 8, i16 8, i16 8, i16 8, i16 8, i16 8, i16 8>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ult_9_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ult_9_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: psllw $8, %xmm2
|
|
; SSE2-NEXT: paddb %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $8, %xmm2
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [9,9,9,9,9,9,9,9]
|
|
; SSE2-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_9_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: psllw $8, %xmm2
|
|
; SSE3-NEXT: paddb %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $8, %xmm2
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [9,9,9,9,9,9,9,9]
|
|
; SSE3-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_9_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: psllw $8, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm1
|
|
; SSSE3-NEXT: psrlw $8, %xmm1
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [9,9,9,9,9,9,9,9]
|
|
; SSSE3-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_9_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm1
|
|
; SSE41-NEXT: psllw $8, %xmm1
|
|
; SSE41-NEXT: paddb %xmm3, %xmm1
|
|
; SSE41-NEXT: psrlw $8, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [9,9,9,9,9,9,9,9]
|
|
; SSE41-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_9_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [9,9,9,9,9,9,9,9]
|
|
; AVX1-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_9_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [9,9,9,9,9,9,9,9]
|
|
; AVX2-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_9_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [9,9,9,9,9,9,9,9]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_9_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} xmm1 = [9,9,9,9,9,9,9,9]
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_9_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [9,9,9,9,9,9,9,9]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_9_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ult <8 x i16> %2, <i16 9, i16 9, i16 9, i16 9, i16 9, i16 9, i16 9, i16 9>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ugt_9_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ugt_9_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: psllw $8, %xmm0
|
|
; SSE2-NEXT: paddb %xmm1, %xmm0
|
|
; SSE2-NEXT: psrlw $8, %xmm0
|
|
; SSE2-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_9_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: psllw $8, %xmm0
|
|
; SSE3-NEXT: paddb %xmm1, %xmm0
|
|
; SSE3-NEXT: psrlw $8, %xmm0
|
|
; SSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_9_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm0
|
|
; SSSE3-NEXT: psllw $8, %xmm0
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm0
|
|
; SSSE3-NEXT: psrlw $8, %xmm0
|
|
; SSSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_9_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm0
|
|
; SSE41-NEXT: psllw $8, %xmm0
|
|
; SSE41-NEXT: paddb %xmm3, %xmm0
|
|
; SSE41-NEXT: psrlw $8, %xmm0
|
|
; SSE41-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_9_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_9_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_9_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_9_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_9_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_9_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ugt <8 x i16> %2, <i16 9, i16 9, i16 9, i16 9, i16 9, i16 9, i16 9, i16 9>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ult_10_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ult_10_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: psllw $8, %xmm2
|
|
; SSE2-NEXT: paddb %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $8, %xmm2
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [10,10,10,10,10,10,10,10]
|
|
; SSE2-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_10_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: psllw $8, %xmm2
|
|
; SSE3-NEXT: paddb %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $8, %xmm2
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [10,10,10,10,10,10,10,10]
|
|
; SSE3-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_10_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: psllw $8, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm1
|
|
; SSSE3-NEXT: psrlw $8, %xmm1
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [10,10,10,10,10,10,10,10]
|
|
; SSSE3-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_10_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm1
|
|
; SSE41-NEXT: psllw $8, %xmm1
|
|
; SSE41-NEXT: paddb %xmm3, %xmm1
|
|
; SSE41-NEXT: psrlw $8, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [10,10,10,10,10,10,10,10]
|
|
; SSE41-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_10_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [10,10,10,10,10,10,10,10]
|
|
; AVX1-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_10_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [10,10,10,10,10,10,10,10]
|
|
; AVX2-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_10_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [10,10,10,10,10,10,10,10]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_10_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} xmm1 = [10,10,10,10,10,10,10,10]
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_10_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [10,10,10,10,10,10,10,10]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_10_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ult <8 x i16> %2, <i16 10, i16 10, i16 10, i16 10, i16 10, i16 10, i16 10, i16 10>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ugt_10_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ugt_10_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: psllw $8, %xmm0
|
|
; SSE2-NEXT: paddb %xmm1, %xmm0
|
|
; SSE2-NEXT: psrlw $8, %xmm0
|
|
; SSE2-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_10_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: psllw $8, %xmm0
|
|
; SSE3-NEXT: paddb %xmm1, %xmm0
|
|
; SSE3-NEXT: psrlw $8, %xmm0
|
|
; SSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_10_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm0
|
|
; SSSE3-NEXT: psllw $8, %xmm0
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm0
|
|
; SSSE3-NEXT: psrlw $8, %xmm0
|
|
; SSSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_10_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm0
|
|
; SSE41-NEXT: psllw $8, %xmm0
|
|
; SSE41-NEXT: paddb %xmm3, %xmm0
|
|
; SSE41-NEXT: psrlw $8, %xmm0
|
|
; SSE41-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_10_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_10_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_10_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_10_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_10_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_10_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ugt <8 x i16> %2, <i16 10, i16 10, i16 10, i16 10, i16 10, i16 10, i16 10, i16 10>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ult_11_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ult_11_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: psllw $8, %xmm2
|
|
; SSE2-NEXT: paddb %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $8, %xmm2
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [11,11,11,11,11,11,11,11]
|
|
; SSE2-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_11_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: psllw $8, %xmm2
|
|
; SSE3-NEXT: paddb %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $8, %xmm2
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [11,11,11,11,11,11,11,11]
|
|
; SSE3-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_11_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: psllw $8, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm1
|
|
; SSSE3-NEXT: psrlw $8, %xmm1
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [11,11,11,11,11,11,11,11]
|
|
; SSSE3-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_11_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm1
|
|
; SSE41-NEXT: psllw $8, %xmm1
|
|
; SSE41-NEXT: paddb %xmm3, %xmm1
|
|
; SSE41-NEXT: psrlw $8, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [11,11,11,11,11,11,11,11]
|
|
; SSE41-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_11_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [11,11,11,11,11,11,11,11]
|
|
; AVX1-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_11_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [11,11,11,11,11,11,11,11]
|
|
; AVX2-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_11_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [11,11,11,11,11,11,11,11]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_11_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} xmm1 = [11,11,11,11,11,11,11,11]
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_11_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [11,11,11,11,11,11,11,11]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_11_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ult <8 x i16> %2, <i16 11, i16 11, i16 11, i16 11, i16 11, i16 11, i16 11, i16 11>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ugt_11_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ugt_11_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: psllw $8, %xmm0
|
|
; SSE2-NEXT: paddb %xmm1, %xmm0
|
|
; SSE2-NEXT: psrlw $8, %xmm0
|
|
; SSE2-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_11_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: psllw $8, %xmm0
|
|
; SSE3-NEXT: paddb %xmm1, %xmm0
|
|
; SSE3-NEXT: psrlw $8, %xmm0
|
|
; SSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_11_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm0
|
|
; SSSE3-NEXT: psllw $8, %xmm0
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm0
|
|
; SSSE3-NEXT: psrlw $8, %xmm0
|
|
; SSSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_11_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm0
|
|
; SSE41-NEXT: psllw $8, %xmm0
|
|
; SSE41-NEXT: paddb %xmm3, %xmm0
|
|
; SSE41-NEXT: psrlw $8, %xmm0
|
|
; SSE41-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_11_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_11_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_11_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_11_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_11_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_11_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ugt <8 x i16> %2, <i16 11, i16 11, i16 11, i16 11, i16 11, i16 11, i16 11, i16 11>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ult_12_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ult_12_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: psllw $8, %xmm2
|
|
; SSE2-NEXT: paddb %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $8, %xmm2
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [12,12,12,12,12,12,12,12]
|
|
; SSE2-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_12_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: psllw $8, %xmm2
|
|
; SSE3-NEXT: paddb %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $8, %xmm2
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [12,12,12,12,12,12,12,12]
|
|
; SSE3-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_12_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: psllw $8, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm1
|
|
; SSSE3-NEXT: psrlw $8, %xmm1
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [12,12,12,12,12,12,12,12]
|
|
; SSSE3-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_12_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm1
|
|
; SSE41-NEXT: psllw $8, %xmm1
|
|
; SSE41-NEXT: paddb %xmm3, %xmm1
|
|
; SSE41-NEXT: psrlw $8, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [12,12,12,12,12,12,12,12]
|
|
; SSE41-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_12_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [12,12,12,12,12,12,12,12]
|
|
; AVX1-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_12_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [12,12,12,12,12,12,12,12]
|
|
; AVX2-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_12_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [12,12,12,12,12,12,12,12]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_12_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} xmm1 = [12,12,12,12,12,12,12,12]
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_12_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [12,12,12,12,12,12,12,12]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_12_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ult <8 x i16> %2, <i16 12, i16 12, i16 12, i16 12, i16 12, i16 12, i16 12, i16 12>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ugt_12_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ugt_12_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: psllw $8, %xmm0
|
|
; SSE2-NEXT: paddb %xmm1, %xmm0
|
|
; SSE2-NEXT: psrlw $8, %xmm0
|
|
; SSE2-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_12_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: psllw $8, %xmm0
|
|
; SSE3-NEXT: paddb %xmm1, %xmm0
|
|
; SSE3-NEXT: psrlw $8, %xmm0
|
|
; SSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_12_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm0
|
|
; SSSE3-NEXT: psllw $8, %xmm0
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm0
|
|
; SSSE3-NEXT: psrlw $8, %xmm0
|
|
; SSSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_12_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm0
|
|
; SSE41-NEXT: psllw $8, %xmm0
|
|
; SSE41-NEXT: paddb %xmm3, %xmm0
|
|
; SSE41-NEXT: psrlw $8, %xmm0
|
|
; SSE41-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_12_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_12_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_12_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_12_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_12_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_12_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ugt <8 x i16> %2, <i16 12, i16 12, i16 12, i16 12, i16 12, i16 12, i16 12, i16 12>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ult_13_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ult_13_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: psllw $8, %xmm2
|
|
; SSE2-NEXT: paddb %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $8, %xmm2
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [13,13,13,13,13,13,13,13]
|
|
; SSE2-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_13_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: psllw $8, %xmm2
|
|
; SSE3-NEXT: paddb %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $8, %xmm2
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [13,13,13,13,13,13,13,13]
|
|
; SSE3-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_13_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: psllw $8, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm1
|
|
; SSSE3-NEXT: psrlw $8, %xmm1
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [13,13,13,13,13,13,13,13]
|
|
; SSSE3-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_13_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm1
|
|
; SSE41-NEXT: psllw $8, %xmm1
|
|
; SSE41-NEXT: paddb %xmm3, %xmm1
|
|
; SSE41-NEXT: psrlw $8, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [13,13,13,13,13,13,13,13]
|
|
; SSE41-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_13_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [13,13,13,13,13,13,13,13]
|
|
; AVX1-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_13_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [13,13,13,13,13,13,13,13]
|
|
; AVX2-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_13_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [13,13,13,13,13,13,13,13]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_13_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} xmm1 = [13,13,13,13,13,13,13,13]
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_13_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [13,13,13,13,13,13,13,13]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_13_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ult <8 x i16> %2, <i16 13, i16 13, i16 13, i16 13, i16 13, i16 13, i16 13, i16 13>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ugt_13_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ugt_13_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: psllw $8, %xmm0
|
|
; SSE2-NEXT: paddb %xmm1, %xmm0
|
|
; SSE2-NEXT: psrlw $8, %xmm0
|
|
; SSE2-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_13_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: psllw $8, %xmm0
|
|
; SSE3-NEXT: paddb %xmm1, %xmm0
|
|
; SSE3-NEXT: psrlw $8, %xmm0
|
|
; SSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_13_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm0
|
|
; SSSE3-NEXT: psllw $8, %xmm0
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm0
|
|
; SSSE3-NEXT: psrlw $8, %xmm0
|
|
; SSSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_13_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm0
|
|
; SSE41-NEXT: psllw $8, %xmm0
|
|
; SSE41-NEXT: paddb %xmm3, %xmm0
|
|
; SSE41-NEXT: psrlw $8, %xmm0
|
|
; SSE41-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_13_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_13_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_13_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_13_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_13_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_13_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ugt <8 x i16> %2, <i16 13, i16 13, i16 13, i16 13, i16 13, i16 13, i16 13, i16 13>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ult_14_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ult_14_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: psllw $8, %xmm2
|
|
; SSE2-NEXT: paddb %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $8, %xmm2
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [14,14,14,14,14,14,14,14]
|
|
; SSE2-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_14_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: psllw $8, %xmm2
|
|
; SSE3-NEXT: paddb %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $8, %xmm2
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [14,14,14,14,14,14,14,14]
|
|
; SSE3-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_14_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: psllw $8, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm1
|
|
; SSSE3-NEXT: psrlw $8, %xmm1
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [14,14,14,14,14,14,14,14]
|
|
; SSSE3-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_14_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm1
|
|
; SSE41-NEXT: psllw $8, %xmm1
|
|
; SSE41-NEXT: paddb %xmm3, %xmm1
|
|
; SSE41-NEXT: psrlw $8, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [14,14,14,14,14,14,14,14]
|
|
; SSE41-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_14_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [14,14,14,14,14,14,14,14]
|
|
; AVX1-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_14_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [14,14,14,14,14,14,14,14]
|
|
; AVX2-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_14_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [14,14,14,14,14,14,14,14]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_14_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} xmm1 = [14,14,14,14,14,14,14,14]
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_14_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [14,14,14,14,14,14,14,14]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_14_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ult <8 x i16> %2, <i16 14, i16 14, i16 14, i16 14, i16 14, i16 14, i16 14, i16 14>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ugt_14_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ugt_14_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: psllw $8, %xmm0
|
|
; SSE2-NEXT: paddb %xmm1, %xmm0
|
|
; SSE2-NEXT: psrlw $8, %xmm0
|
|
; SSE2-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_14_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: psllw $8, %xmm0
|
|
; SSE3-NEXT: paddb %xmm1, %xmm0
|
|
; SSE3-NEXT: psrlw $8, %xmm0
|
|
; SSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_14_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm0
|
|
; SSSE3-NEXT: psllw $8, %xmm0
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm0
|
|
; SSSE3-NEXT: psrlw $8, %xmm0
|
|
; SSSE3-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_14_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm0
|
|
; SSE41-NEXT: psllw $8, %xmm0
|
|
; SSE41-NEXT: paddb %xmm3, %xmm0
|
|
; SSE41-NEXT: psrlw $8, %xmm0
|
|
; SSE41-NEXT: pcmpgtw {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_14_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_14_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_14_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_14_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_14_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_14_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ugt <8 x i16> %2, <i16 14, i16 14, i16 14, i16 14, i16 14, i16 14, i16 14, i16 14>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <8 x i16> @ult_15_v8i16(<8 x i16> %0) {
|
|
; SSE2-LABEL: ult_15_v8i16:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: psllw $8, %xmm2
|
|
; SSE2-NEXT: paddb %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $8, %xmm2
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [15,15,15,15,15,15,15,15]
|
|
; SSE2-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_15_v8i16:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: psllw $8, %xmm2
|
|
; SSE3-NEXT: paddb %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $8, %xmm2
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [15,15,15,15,15,15,15,15]
|
|
; SSE3-NEXT: pcmpgtw %xmm2, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_15_v8i16:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: psllw $8, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm3, %xmm1
|
|
; SSSE3-NEXT: psrlw $8, %xmm1
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_15_v8i16:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm1
|
|
; SSE41-NEXT: psllw $8, %xmm1
|
|
; SSE41-NEXT: paddb %xmm3, %xmm1
|
|
; SSE41-NEXT: psrlw $8, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: pcmpgtw %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_15_v8i16:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_15_v8i16:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpsllw $8, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpaddb %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: vpsrlw $8, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_15_v8i16:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_15_v8i16:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15]
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_15_v8i16:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_15_v8i16:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntw %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuw {{.*}}(%rip), %xmm0, %k0
|
|
; BITALG-NEXT: vpmovm2w %k0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0)
|
|
%3 = icmp ult <8 x i16> %2, <i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15>
|
|
%4 = sext <8 x i1> %3 to <8 x i16>
|
|
ret <8 x i16> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_1_v4i32(<4 x i32> %0) {
|
|
; SSE-LABEL: ugt_1_v4i32:
|
|
; SSE: # %bb.0:
|
|
; SSE-NEXT: pcmpeqd %xmm2, %xmm2
|
|
; SSE-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE-NEXT: paddd %xmm2, %xmm1
|
|
; SSE-NEXT: pand %xmm0, %xmm1
|
|
; SSE-NEXT: pxor %xmm0, %xmm0
|
|
; SSE-NEXT: pcmpeqd %xmm0, %xmm1
|
|
; SSE-NEXT: pxor %xmm2, %xmm1
|
|
; SSE-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_1_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpaddd %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vpand %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm2, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpcmpeqd %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_1_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpaddd %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vpand %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm2, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpcmpeqd %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_1_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [1,1,1,1]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_1_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_1_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpaddd %xmm1, %xmm0, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpternlogq $15, %zmm0, %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_1_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpaddd %xmm1, %xmm0, %xmm1
|
|
; BITALG-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpternlogq $15, %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 1, i32 1, i32 1, i32 1>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_2_v4i32(<4 x i32> %0) {
|
|
; SSE-LABEL: ult_2_v4i32:
|
|
; SSE: # %bb.0:
|
|
; SSE-NEXT: pcmpeqd %xmm1, %xmm1
|
|
; SSE-NEXT: paddd %xmm0, %xmm1
|
|
; SSE-NEXT: pand %xmm1, %xmm0
|
|
; SSE-NEXT: pxor %xmm1, %xmm1
|
|
; SSE-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_2_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpaddd %xmm1, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_2_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpaddd %xmm1, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_2_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [2,2,2,2]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_2_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_2_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpaddd %xmm1, %xmm0, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_2_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpaddd %xmm1, %xmm0, %xmm1
|
|
; BITALG-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 2, i32 2, i32 2, i32 2>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_2_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_2_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_2_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_2_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_2_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_2_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_2_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [2,2,2,2]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_2_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [2,2,2,2]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_2_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_2_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [2,2,2,2]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_2_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 2, i32 2, i32 2, i32 2>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_3_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_3_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_3_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_3_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_3_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_3_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3,3,3]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_3_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [3,3,3,3]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_3_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [3,3,3,3]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_3_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_3_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [3,3,3,3]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_3_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 3, i32 3, i32 3, i32 3>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_3_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_3_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_3_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_3_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_3_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_3_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_3_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [3,3,3,3]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_3_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [3,3,3,3]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_3_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_3_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [3,3,3,3]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_3_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 3, i32 3, i32 3, i32 3>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_4_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_4_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_4_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_4_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_4_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_4_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_4_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4,4,4,4]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_4_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4,4,4,4]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_4_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_4_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4,4,4,4]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_4_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 4, i32 4, i32 4, i32 4>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_4_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_4_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_4_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_4_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_4_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_4_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_4_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4,4,4,4]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_4_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4,4,4,4]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_4_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_4_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4,4,4,4]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_4_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 4, i32 4, i32 4, i32 4>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_5_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_5_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_5_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_5_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_5_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_5_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5,5,5]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_5_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [5,5,5,5]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_5_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [5,5,5,5]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_5_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_5_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [5,5,5,5]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_5_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 5, i32 5, i32 5, i32 5>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_5_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_5_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_5_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_5_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_5_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_5_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_5_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [5,5,5,5]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_5_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [5,5,5,5]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_5_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_5_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [5,5,5,5]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_5_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 5, i32 5, i32 5, i32 5>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_6_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_6_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_6_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_6_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_6_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_6_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6,6,6]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_6_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [6,6,6,6]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_6_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [6,6,6,6]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_6_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_6_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [6,6,6,6]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_6_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 6, i32 6, i32 6, i32 6>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_6_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_6_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_6_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_6_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_6_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_6_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_6_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [6,6,6,6]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_6_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [6,6,6,6]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_6_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_6_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [6,6,6,6]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_6_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 6, i32 6, i32 6, i32 6>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_7_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_7_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [7,7,7,7]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_7_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [7,7,7,7]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_7_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [7,7,7,7]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_7_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [7,7,7,7]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_7_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [7,7,7,7]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_7_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [7,7,7,7]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_7_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [7,7,7,7]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_7_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_7_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [7,7,7,7]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_7_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 7, i32 7, i32 7, i32 7>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_7_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_7_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_7_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_7_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_7_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_7_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_7_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [7,7,7,7]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_7_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [7,7,7,7]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_7_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_7_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [7,7,7,7]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_7_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 7, i32 7, i32 7, i32 7>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_8_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_8_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [8,8,8,8]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_8_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [8,8,8,8]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_8_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [8,8,8,8]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_8_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [8,8,8,8]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_8_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [8,8,8,8]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_8_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [8,8,8,8]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_8_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [8,8,8,8]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_8_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_8_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [8,8,8,8]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_8_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 8, i32 8, i32 8, i32 8>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_8_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_8_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_8_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_8_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_8_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_8_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_8_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [8,8,8,8]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_8_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [8,8,8,8]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_8_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_8_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [8,8,8,8]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_8_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 8, i32 8, i32 8, i32 8>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_9_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_9_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [9,9,9,9]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_9_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [9,9,9,9]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_9_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [9,9,9,9]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_9_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [9,9,9,9]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_9_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [9,9,9,9]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_9_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [9,9,9,9]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_9_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [9,9,9,9]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_9_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_9_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [9,9,9,9]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_9_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 9, i32 9, i32 9, i32 9>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_9_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_9_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_9_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_9_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_9_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_9_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_9_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [9,9,9,9]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_9_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [9,9,9,9]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_9_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_9_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [9,9,9,9]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_9_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 9, i32 9, i32 9, i32 9>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_10_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_10_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [10,10,10,10]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_10_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [10,10,10,10]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_10_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [10,10,10,10]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_10_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [10,10,10,10]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_10_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [10,10,10,10]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_10_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [10,10,10,10]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_10_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [10,10,10,10]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_10_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_10_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [10,10,10,10]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_10_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 10, i32 10, i32 10, i32 10>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_10_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_10_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_10_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_10_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_10_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_10_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_10_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [10,10,10,10]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_10_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [10,10,10,10]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_10_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_10_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [10,10,10,10]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_10_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 10, i32 10, i32 10, i32 10>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_11_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_11_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [11,11,11,11]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_11_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [11,11,11,11]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_11_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [11,11,11,11]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_11_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [11,11,11,11]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_11_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [11,11,11,11]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_11_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [11,11,11,11]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_11_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [11,11,11,11]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_11_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_11_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [11,11,11,11]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_11_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 11, i32 11, i32 11, i32 11>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_11_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_11_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_11_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_11_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_11_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_11_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_11_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [11,11,11,11]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_11_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [11,11,11,11]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_11_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_11_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [11,11,11,11]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_11_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 11, i32 11, i32 11, i32 11>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_12_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_12_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [12,12,12,12]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_12_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [12,12,12,12]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_12_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [12,12,12,12]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_12_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [12,12,12,12]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_12_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [12,12,12,12]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_12_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [12,12,12,12]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_12_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [12,12,12,12]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_12_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_12_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [12,12,12,12]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_12_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 12, i32 12, i32 12, i32 12>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_12_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_12_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_12_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_12_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_12_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_12_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_12_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [12,12,12,12]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_12_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [12,12,12,12]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_12_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_12_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [12,12,12,12]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_12_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 12, i32 12, i32 12, i32 12>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_13_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_13_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [13,13,13,13]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_13_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [13,13,13,13]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_13_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [13,13,13,13]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_13_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [13,13,13,13]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_13_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [13,13,13,13]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_13_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [13,13,13,13]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_13_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [13,13,13,13]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_13_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_13_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [13,13,13,13]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_13_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 13, i32 13, i32 13, i32 13>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_13_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_13_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_13_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_13_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_13_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_13_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_13_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [13,13,13,13]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_13_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [13,13,13,13]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_13_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_13_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [13,13,13,13]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_13_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 13, i32 13, i32 13, i32 13>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_14_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_14_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [14,14,14,14]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_14_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [14,14,14,14]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_14_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [14,14,14,14]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_14_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [14,14,14,14]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_14_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [14,14,14,14]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_14_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [14,14,14,14]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_14_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [14,14,14,14]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_14_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_14_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [14,14,14,14]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_14_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 14, i32 14, i32 14, i32 14>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_14_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_14_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_14_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_14_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_14_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_14_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_14_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [14,14,14,14]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_14_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [14,14,14,14]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_14_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_14_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [14,14,14,14]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_14_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 14, i32 14, i32 14, i32 14>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_15_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_15_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [15,15,15,15]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_15_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [15,15,15,15]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_15_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [15,15,15,15]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_15_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [15,15,15,15]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_15_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_15_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [15,15,15,15]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_15_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [15,15,15,15]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_15_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_15_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [15,15,15,15]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_15_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 15, i32 15, i32 15, i32 15>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_15_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_15_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_15_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_15_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_15_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_15_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_15_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [15,15,15,15]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_15_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [15,15,15,15]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_15_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_15_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [15,15,15,15]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_15_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 15, i32 15, i32 15, i32 15>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_16_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_16_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [16,16,16,16]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_16_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [16,16,16,16]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_16_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [16,16,16,16]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_16_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [16,16,16,16]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_16_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [16,16,16,16]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_16_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [16,16,16,16]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_16_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [16,16,16,16]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_16_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_16_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [16,16,16,16]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_16_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 16, i32 16, i32 16, i32 16>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_16_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_16_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_16_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_16_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_16_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_16_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_16_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [16,16,16,16]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_16_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [16,16,16,16]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_16_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_16_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [16,16,16,16]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_16_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 16, i32 16, i32 16, i32 16>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_17_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_17_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [17,17,17,17]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_17_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [17,17,17,17]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_17_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [17,17,17,17]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_17_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [17,17,17,17]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_17_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [17,17,17,17]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_17_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [17,17,17,17]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_17_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [17,17,17,17]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_17_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_17_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [17,17,17,17]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_17_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 17, i32 17, i32 17, i32 17>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_17_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_17_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_17_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_17_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_17_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_17_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_17_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [17,17,17,17]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_17_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [17,17,17,17]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_17_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_17_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [17,17,17,17]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_17_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 17, i32 17, i32 17, i32 17>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_18_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_18_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [18,18,18,18]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_18_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [18,18,18,18]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_18_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [18,18,18,18]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_18_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [18,18,18,18]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_18_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [18,18,18,18]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_18_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [18,18,18,18]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_18_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [18,18,18,18]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_18_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_18_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [18,18,18,18]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_18_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 18, i32 18, i32 18, i32 18>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_18_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_18_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_18_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_18_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_18_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_18_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_18_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [18,18,18,18]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_18_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [18,18,18,18]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_18_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_18_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [18,18,18,18]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_18_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 18, i32 18, i32 18, i32 18>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_19_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_19_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [19,19,19,19]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_19_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [19,19,19,19]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_19_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [19,19,19,19]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_19_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [19,19,19,19]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_19_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [19,19,19,19]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_19_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [19,19,19,19]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_19_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [19,19,19,19]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_19_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_19_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [19,19,19,19]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_19_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 19, i32 19, i32 19, i32 19>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_19_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_19_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_19_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_19_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_19_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_19_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_19_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [19,19,19,19]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_19_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [19,19,19,19]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_19_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_19_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [19,19,19,19]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_19_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 19, i32 19, i32 19, i32 19>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_20_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_20_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [20,20,20,20]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_20_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [20,20,20,20]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_20_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [20,20,20,20]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_20_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [20,20,20,20]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_20_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [20,20,20,20]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_20_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [20,20,20,20]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_20_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [20,20,20,20]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_20_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_20_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [20,20,20,20]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_20_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 20, i32 20, i32 20, i32 20>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_20_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_20_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_20_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_20_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_20_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_20_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_20_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [20,20,20,20]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_20_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [20,20,20,20]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_20_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_20_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [20,20,20,20]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_20_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 20, i32 20, i32 20, i32 20>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_21_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_21_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [21,21,21,21]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_21_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [21,21,21,21]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_21_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [21,21,21,21]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_21_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [21,21,21,21]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_21_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [21,21,21,21]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_21_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [21,21,21,21]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_21_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [21,21,21,21]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_21_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_21_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [21,21,21,21]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_21_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 21, i32 21, i32 21, i32 21>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_21_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_21_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_21_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_21_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_21_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_21_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_21_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [21,21,21,21]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_21_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [21,21,21,21]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_21_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_21_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [21,21,21,21]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_21_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 21, i32 21, i32 21, i32 21>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_22_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_22_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [22,22,22,22]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_22_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [22,22,22,22]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_22_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [22,22,22,22]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_22_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [22,22,22,22]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_22_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [22,22,22,22]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_22_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [22,22,22,22]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_22_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [22,22,22,22]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_22_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_22_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [22,22,22,22]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_22_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 22, i32 22, i32 22, i32 22>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_22_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_22_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_22_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_22_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_22_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_22_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_22_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [22,22,22,22]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_22_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [22,22,22,22]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_22_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_22_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [22,22,22,22]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_22_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 22, i32 22, i32 22, i32 22>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_23_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_23_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [23,23,23,23]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_23_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [23,23,23,23]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_23_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [23,23,23,23]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_23_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [23,23,23,23]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_23_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [23,23,23,23]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_23_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [23,23,23,23]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_23_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [23,23,23,23]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_23_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_23_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [23,23,23,23]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_23_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 23, i32 23, i32 23, i32 23>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_23_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_23_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_23_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_23_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_23_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_23_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_23_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [23,23,23,23]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_23_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [23,23,23,23]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_23_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_23_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [23,23,23,23]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_23_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 23, i32 23, i32 23, i32 23>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_24_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_24_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [24,24,24,24]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_24_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [24,24,24,24]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_24_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [24,24,24,24]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_24_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [24,24,24,24]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_24_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [24,24,24,24]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_24_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [24,24,24,24]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_24_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [24,24,24,24]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_24_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_24_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [24,24,24,24]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_24_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 24, i32 24, i32 24, i32 24>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_24_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_24_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_24_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_24_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_24_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_24_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_24_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [24,24,24,24]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_24_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [24,24,24,24]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_24_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_24_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [24,24,24,24]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_24_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 24, i32 24, i32 24, i32 24>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_25_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_25_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [25,25,25,25]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_25_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [25,25,25,25]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_25_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [25,25,25,25]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_25_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [25,25,25,25]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_25_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [25,25,25,25]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_25_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [25,25,25,25]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_25_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [25,25,25,25]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_25_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_25_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [25,25,25,25]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_25_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 25, i32 25, i32 25, i32 25>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_25_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_25_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_25_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_25_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_25_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_25_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_25_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [25,25,25,25]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_25_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [25,25,25,25]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_25_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_25_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [25,25,25,25]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_25_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 25, i32 25, i32 25, i32 25>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_26_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_26_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [26,26,26,26]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_26_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [26,26,26,26]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_26_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [26,26,26,26]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_26_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [26,26,26,26]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_26_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [26,26,26,26]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_26_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [26,26,26,26]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_26_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [26,26,26,26]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_26_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_26_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [26,26,26,26]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_26_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 26, i32 26, i32 26, i32 26>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_26_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_26_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_26_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_26_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_26_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_26_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_26_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [26,26,26,26]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_26_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [26,26,26,26]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_26_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_26_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [26,26,26,26]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_26_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 26, i32 26, i32 26, i32 26>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_27_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_27_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [27,27,27,27]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_27_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [27,27,27,27]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_27_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [27,27,27,27]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_27_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [27,27,27,27]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_27_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [27,27,27,27]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_27_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [27,27,27,27]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_27_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [27,27,27,27]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_27_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_27_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [27,27,27,27]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_27_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 27, i32 27, i32 27, i32 27>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_27_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_27_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_27_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_27_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_27_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_27_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_27_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [27,27,27,27]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_27_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [27,27,27,27]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_27_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_27_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [27,27,27,27]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_27_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 27, i32 27, i32 27, i32 27>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_28_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_28_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [28,28,28,28]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_28_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [28,28,28,28]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_28_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [28,28,28,28]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_28_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [28,28,28,28]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_28_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [28,28,28,28]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_28_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [28,28,28,28]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_28_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [28,28,28,28]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_28_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_28_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [28,28,28,28]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_28_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 28, i32 28, i32 28, i32 28>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_28_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_28_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_28_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_28_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_28_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_28_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_28_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [28,28,28,28]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_28_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [28,28,28,28]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_28_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_28_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [28,28,28,28]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_28_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 28, i32 28, i32 28, i32 28>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_29_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_29_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [29,29,29,29]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_29_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [29,29,29,29]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_29_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [29,29,29,29]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_29_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [29,29,29,29]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_29_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [29,29,29,29]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_29_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [29,29,29,29]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_29_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [29,29,29,29]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_29_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_29_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [29,29,29,29]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_29_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 29, i32 29, i32 29, i32 29>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_29_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_29_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_29_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_29_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_29_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_29_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_29_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [29,29,29,29]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_29_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [29,29,29,29]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_29_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_29_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [29,29,29,29]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_29_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 29, i32 29, i32 29, i32 29>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_30_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_30_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [30,30,30,30]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_30_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [30,30,30,30]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_30_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [30,30,30,30]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_30_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [30,30,30,30]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_30_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [30,30,30,30]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_30_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [30,30,30,30]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_30_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [30,30,30,30]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_30_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_30_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [30,30,30,30]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_30_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 30, i32 30, i32 30, i32 30>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ugt_30_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ugt_30_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_30_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_30_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm3
|
|
; SSSE3-NEXT: pand %xmm2, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm3, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm1
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSSE3-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_30_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm0
|
|
; SSE41-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_30_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtd {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_30_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [30,30,30,30]
|
|
; AVX2-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_30_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [30,30,30,30]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_30_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_30_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [30,30,30,30]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_30_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ugt <4 x i32> %2, <i32 30, i32 30, i32 30, i32 30>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <4 x i32> @ult_31_v4i32(<4 x i32> %0) {
|
|
; SSE2-LABEL: ult_31_v4i32:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE2-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE2-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [31,31,31,31]
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_31_v4i32:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm2
|
|
; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
|
|
; SSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE3-NEXT: packuswb %xmm2, %xmm1
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [31,31,31,31]
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_31_v4i32:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm1
|
|
; SSSE3-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm0[2],xmm1[3],xmm0[3]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm1
|
|
; SSSE3-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
|
|
; SSSE3-NEXT: psadbw %xmm0, %xmm3
|
|
; SSSE3-NEXT: packuswb %xmm1, %xmm3
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [31,31,31,31]
|
|
; SSSE3-NEXT: pcmpgtd %xmm3, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_31_v4i32:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm3[0],zero,xmm3[1],zero
|
|
; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm0[2],xmm3[3],xmm0[3]
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm3
|
|
; SSE41-NEXT: psadbw %xmm0, %xmm1
|
|
; SSE41-NEXT: packuswb %xmm3, %xmm1
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [31,31,31,31]
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_31_v4i32:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [31,31,31,31]
|
|
; AVX1-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_31_v4i32:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [31,31,31,31]
|
|
; AVX2-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_31_v4i32:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpbroadcastd {{.*#+}} xmm1 = [31,31,31,31]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_31_v4i32:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_31_v4i32:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [31,31,31,31]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_31_v4i32:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
|
|
; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltud {{.*}}(%rip){1to4}, %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0)
|
|
%3 = icmp ult <4 x i32> %2, <i32 31, i32 31, i32 31, i32 31>
|
|
%4 = sext <4 x i1> %3 to <4 x i32>
|
|
ret <4 x i32> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_1_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_1_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm1
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: paddq %xmm1, %xmm2
|
|
; SSE2-NEXT: pand %xmm0, %xmm2
|
|
; SSE2-NEXT: pxor %xmm3, %xmm3
|
|
; SSE2-NEXT: pcmpeqd %xmm2, %xmm3
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,0,3,2]
|
|
; SSE2-NEXT: pand %xmm3, %xmm0
|
|
; SSE2-NEXT: pxor %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_1_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm1
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: paddq %xmm1, %xmm2
|
|
; SSE3-NEXT: pand %xmm0, %xmm2
|
|
; SSE3-NEXT: pxor %xmm3, %xmm3
|
|
; SSE3-NEXT: pcmpeqd %xmm2, %xmm3
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,0,3,2]
|
|
; SSE3-NEXT: pand %xmm3, %xmm0
|
|
; SSE3-NEXT: pxor %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_1_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm1
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: paddq %xmm1, %xmm2
|
|
; SSSE3-NEXT: pand %xmm0, %xmm2
|
|
; SSSE3-NEXT: pxor %xmm3, %xmm3
|
|
; SSSE3-NEXT: pcmpeqd %xmm2, %xmm3
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,0,3,2]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm0
|
|
; SSSE3-NEXT: pxor %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_1_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: pcmpeqd %xmm2, %xmm2
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE41-NEXT: paddq %xmm2, %xmm1
|
|
; SSE41-NEXT: pand %xmm0, %xmm1
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: pcmpeqq %xmm0, %xmm1
|
|
; SSE41-NEXT: pxor %xmm2, %xmm1
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_1_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpaddq %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vpand %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm2, %xmm2, %xmm2
|
|
; AVX1-NEXT: vpcmpeqq %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_1_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpaddq %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vpand %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm2, %xmm2, %xmm2
|
|
; AVX2-NEXT: vpcmpeqq %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_1_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_1_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_1_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpaddq %xmm1, %xmm0, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpcmpeqq %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpternlogq $15, %zmm0, %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_1_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpaddq %xmm1, %xmm0, %xmm1
|
|
; BITALG-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpcmpeqq %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpternlogq $15, %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 1, i64 1>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_2_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_2_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm1
|
|
; SSE2-NEXT: paddq %xmm0, %xmm1
|
|
; SSE2-NEXT: pand %xmm0, %xmm1
|
|
; SSE2-NEXT: pxor %xmm2, %xmm2
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,0,3,2]
|
|
; SSE2-NEXT: pand %xmm2, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_2_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm1
|
|
; SSE3-NEXT: paddq %xmm0, %xmm1
|
|
; SSE3-NEXT: pand %xmm0, %xmm1
|
|
; SSE3-NEXT: pxor %xmm2, %xmm2
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,0,3,2]
|
|
; SSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_2_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm1
|
|
; SSSE3-NEXT: paddq %xmm0, %xmm1
|
|
; SSSE3-NEXT: pand %xmm0, %xmm1
|
|
; SSSE3-NEXT: pxor %xmm2, %xmm2
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,0,3,2]
|
|
; SSSE3-NEXT: pand %xmm2, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_2_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm1
|
|
; SSE41-NEXT: paddq %xmm0, %xmm1
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pxor %xmm1, %xmm1
|
|
; SSE41-NEXT: pcmpeqq %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_2_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpaddq %xmm1, %xmm0, %xmm1
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpcmpeqq %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_2_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpaddq %xmm1, %xmm0, %xmm1
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpcmpeqq %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_2_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [2,2]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_2_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_2_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpaddq %xmm1, %xmm0, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpcmpeqq %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_2_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpaddq %xmm1, %xmm0, %xmm1
|
|
; BITALG-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpcmpeqq %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 2, i64 2>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_2_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_2_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483650,2147483650]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_2_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483650,2147483650]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_2_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483650,2147483650]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_2_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483650,2147483650]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_2_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_2_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_2_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_2_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_2_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_2_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 2, i64 2>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_3_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_3_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483651,2147483651]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_3_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483651,2147483651]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_3_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483651,2147483651]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_3_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483651,2147483651]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_3_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_3_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_3_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_3_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_3_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_3_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 3, i64 3>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_3_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_3_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483651,2147483651]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_3_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483651,2147483651]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_3_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483651,2147483651]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_3_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483651,2147483651]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_3_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_3_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_3_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_3_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_3_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_3_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 3, i64 3>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_4_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_4_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483652,2147483652]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_4_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483652,2147483652]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_4_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483652,2147483652]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_4_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483652,2147483652]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_4_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_4_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_4_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_4_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_4_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_4_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 4, i64 4>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_4_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_4_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483652,2147483652]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_4_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483652,2147483652]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_4_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483652,2147483652]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_4_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483652,2147483652]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_4_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_4_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_4_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_4_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_4_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_4_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 4, i64 4>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_5_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_5_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483653,2147483653]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_5_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483653,2147483653]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_5_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483653,2147483653]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_5_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483653,2147483653]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_5_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_5_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_5_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_5_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_5_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_5_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 5, i64 5>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_5_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_5_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483653,2147483653]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_5_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483653,2147483653]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_5_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483653,2147483653]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_5_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483653,2147483653]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_5_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_5_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_5_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_5_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_5_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_5_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 5, i64 5>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_6_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_6_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483654,2147483654]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_6_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483654,2147483654]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_6_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483654,2147483654]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_6_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483654,2147483654]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_6_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_6_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_6_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_6_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_6_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_6_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 6, i64 6>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_6_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_6_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483654,2147483654]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_6_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483654,2147483654]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_6_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483654,2147483654]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_6_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483654,2147483654]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_6_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_6_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_6_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_6_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_6_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_6_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 6, i64 6>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_7_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_7_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483655,2147483655]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_7_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483655,2147483655]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_7_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483655,2147483655]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_7_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483655,2147483655]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_7_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [7,7]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_7_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [7,7]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_7_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [7,7]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_7_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_7_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [7,7]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_7_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 7, i64 7>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_7_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_7_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483655,2147483655]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_7_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483655,2147483655]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_7_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483655,2147483655]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_7_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483655,2147483655]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_7_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_7_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_7_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_7_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_7_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_7_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 7, i64 7>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_8_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_8_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483656,2147483656]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_8_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483656,2147483656]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_8_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483656,2147483656]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_8_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483656,2147483656]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_8_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [8,8]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_8_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [8,8]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_8_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [8,8]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_8_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_8_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [8,8]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_8_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 8, i64 8>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_8_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_8_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483656,2147483656]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_8_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483656,2147483656]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_8_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483656,2147483656]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_8_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483656,2147483656]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_8_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_8_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_8_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_8_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_8_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_8_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 8, i64 8>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_9_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_9_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483657,2147483657]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_9_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483657,2147483657]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_9_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483657,2147483657]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_9_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483657,2147483657]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_9_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [9,9]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_9_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [9,9]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_9_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [9,9]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_9_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_9_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [9,9]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_9_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 9, i64 9>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_9_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_9_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483657,2147483657]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_9_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483657,2147483657]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_9_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483657,2147483657]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_9_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483657,2147483657]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_9_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_9_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_9_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_9_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_9_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_9_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 9, i64 9>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_10_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_10_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483658,2147483658]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_10_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483658,2147483658]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_10_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483658,2147483658]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_10_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483658,2147483658]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_10_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [10,10]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_10_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [10,10]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_10_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [10,10]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_10_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_10_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [10,10]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_10_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 10, i64 10>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_10_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_10_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483658,2147483658]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_10_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483658,2147483658]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_10_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483658,2147483658]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_10_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483658,2147483658]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_10_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_10_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_10_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_10_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_10_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_10_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 10, i64 10>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_11_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_11_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483659,2147483659]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_11_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483659,2147483659]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_11_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483659,2147483659]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_11_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483659,2147483659]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_11_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [11,11]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_11_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [11,11]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_11_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [11,11]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_11_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_11_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [11,11]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_11_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 11, i64 11>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_11_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_11_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483659,2147483659]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_11_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483659,2147483659]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_11_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483659,2147483659]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_11_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483659,2147483659]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_11_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_11_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_11_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_11_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_11_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_11_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 11, i64 11>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_12_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_12_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483660,2147483660]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_12_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483660,2147483660]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_12_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483660,2147483660]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_12_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483660,2147483660]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_12_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [12,12]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_12_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [12,12]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_12_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [12,12]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_12_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_12_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [12,12]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_12_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 12, i64 12>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_12_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_12_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483660,2147483660]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_12_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483660,2147483660]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_12_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483660,2147483660]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_12_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483660,2147483660]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_12_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_12_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_12_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_12_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_12_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_12_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 12, i64 12>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_13_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_13_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483661,2147483661]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_13_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483661,2147483661]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_13_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483661,2147483661]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_13_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483661,2147483661]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_13_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [13,13]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_13_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [13,13]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_13_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [13,13]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_13_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_13_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [13,13]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_13_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 13, i64 13>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_13_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_13_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483661,2147483661]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_13_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483661,2147483661]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_13_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483661,2147483661]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_13_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483661,2147483661]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_13_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_13_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_13_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_13_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_13_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_13_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 13, i64 13>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_14_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_14_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483662,2147483662]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_14_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483662,2147483662]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_14_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483662,2147483662]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_14_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483662,2147483662]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_14_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [14,14]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_14_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [14,14]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_14_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [14,14]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_14_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_14_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [14,14]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_14_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 14, i64 14>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_14_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_14_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483662,2147483662]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_14_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483662,2147483662]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_14_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483662,2147483662]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_14_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483662,2147483662]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_14_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_14_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_14_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_14_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_14_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_14_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 14, i64 14>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_15_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_15_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483663,2147483663]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_15_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483663,2147483663]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_15_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483663,2147483663]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_15_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483663,2147483663]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_15_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_15_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_15_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_15_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_15_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_15_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 15, i64 15>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_15_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_15_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483663,2147483663]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_15_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483663,2147483663]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_15_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483663,2147483663]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_15_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483663,2147483663]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_15_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_15_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_15_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_15_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_15_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_15_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 15, i64 15>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_16_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_16_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483664,2147483664]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_16_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483664,2147483664]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_16_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483664,2147483664]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_16_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483664,2147483664]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_16_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [16,16]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_16_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [16,16]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_16_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [16,16]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_16_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_16_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [16,16]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_16_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 16, i64 16>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_16_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_16_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483664,2147483664]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_16_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483664,2147483664]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_16_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483664,2147483664]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_16_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483664,2147483664]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_16_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_16_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_16_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_16_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_16_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_16_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 16, i64 16>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_17_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_17_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483665,2147483665]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_17_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483665,2147483665]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_17_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483665,2147483665]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_17_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483665,2147483665]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_17_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [17,17]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_17_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [17,17]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_17_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [17,17]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_17_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_17_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [17,17]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_17_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 17, i64 17>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_17_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_17_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483665,2147483665]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_17_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483665,2147483665]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_17_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483665,2147483665]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_17_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483665,2147483665]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_17_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_17_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_17_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_17_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_17_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_17_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 17, i64 17>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_18_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_18_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483666,2147483666]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_18_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483666,2147483666]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_18_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483666,2147483666]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_18_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483666,2147483666]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_18_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [18,18]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_18_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [18,18]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_18_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [18,18]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_18_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_18_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [18,18]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_18_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 18, i64 18>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_18_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_18_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483666,2147483666]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_18_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483666,2147483666]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_18_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483666,2147483666]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_18_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483666,2147483666]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_18_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_18_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_18_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_18_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_18_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_18_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 18, i64 18>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_19_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_19_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483667,2147483667]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_19_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483667,2147483667]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_19_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483667,2147483667]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_19_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483667,2147483667]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_19_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [19,19]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_19_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [19,19]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_19_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [19,19]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_19_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_19_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [19,19]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_19_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 19, i64 19>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_19_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_19_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483667,2147483667]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_19_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483667,2147483667]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_19_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483667,2147483667]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_19_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483667,2147483667]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_19_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_19_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_19_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_19_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_19_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_19_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 19, i64 19>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_20_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_20_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483668,2147483668]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_20_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483668,2147483668]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_20_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483668,2147483668]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_20_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483668,2147483668]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_20_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [20,20]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_20_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [20,20]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_20_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [20,20]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_20_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_20_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [20,20]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_20_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 20, i64 20>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_20_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_20_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483668,2147483668]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_20_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483668,2147483668]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_20_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483668,2147483668]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_20_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483668,2147483668]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_20_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_20_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_20_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_20_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_20_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_20_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 20, i64 20>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_21_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_21_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483669,2147483669]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_21_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483669,2147483669]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_21_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483669,2147483669]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_21_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483669,2147483669]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_21_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [21,21]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_21_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [21,21]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_21_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [21,21]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_21_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_21_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [21,21]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_21_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 21, i64 21>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_21_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_21_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483669,2147483669]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_21_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483669,2147483669]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_21_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483669,2147483669]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_21_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483669,2147483669]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_21_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_21_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_21_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_21_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_21_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_21_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 21, i64 21>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_22_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_22_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483670,2147483670]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_22_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483670,2147483670]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_22_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483670,2147483670]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_22_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483670,2147483670]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_22_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [22,22]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_22_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [22,22]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_22_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [22,22]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_22_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_22_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [22,22]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_22_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 22, i64 22>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_22_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_22_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483670,2147483670]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_22_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483670,2147483670]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_22_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483670,2147483670]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_22_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483670,2147483670]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_22_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_22_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_22_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_22_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_22_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_22_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 22, i64 22>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_23_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_23_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483671,2147483671]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_23_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483671,2147483671]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_23_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483671,2147483671]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_23_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483671,2147483671]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_23_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [23,23]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_23_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [23,23]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_23_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [23,23]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_23_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_23_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [23,23]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_23_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 23, i64 23>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_23_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_23_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483671,2147483671]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_23_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483671,2147483671]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_23_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483671,2147483671]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_23_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483671,2147483671]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_23_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_23_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_23_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_23_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_23_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_23_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 23, i64 23>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_24_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_24_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483672,2147483672]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_24_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483672,2147483672]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_24_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483672,2147483672]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_24_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483672,2147483672]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_24_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [24,24]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_24_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [24,24]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_24_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [24,24]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_24_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_24_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [24,24]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_24_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 24, i64 24>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_24_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_24_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483672,2147483672]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_24_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483672,2147483672]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_24_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483672,2147483672]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_24_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483672,2147483672]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_24_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_24_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_24_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_24_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_24_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_24_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 24, i64 24>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_25_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_25_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483673,2147483673]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_25_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483673,2147483673]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_25_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483673,2147483673]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_25_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483673,2147483673]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_25_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [25,25]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_25_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [25,25]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_25_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [25,25]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_25_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_25_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [25,25]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_25_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 25, i64 25>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_25_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_25_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483673,2147483673]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_25_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483673,2147483673]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_25_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483673,2147483673]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_25_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483673,2147483673]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_25_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_25_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_25_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_25_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_25_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_25_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 25, i64 25>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_26_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_26_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483674,2147483674]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_26_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483674,2147483674]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_26_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483674,2147483674]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_26_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483674,2147483674]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_26_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [26,26]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_26_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [26,26]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_26_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [26,26]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_26_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_26_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [26,26]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_26_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 26, i64 26>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_26_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_26_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483674,2147483674]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_26_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483674,2147483674]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_26_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483674,2147483674]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_26_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483674,2147483674]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_26_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_26_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_26_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_26_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_26_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_26_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 26, i64 26>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_27_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_27_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483675,2147483675]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_27_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483675,2147483675]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_27_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483675,2147483675]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_27_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483675,2147483675]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_27_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [27,27]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_27_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [27,27]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_27_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [27,27]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_27_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_27_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [27,27]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_27_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 27, i64 27>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_27_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_27_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483675,2147483675]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_27_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483675,2147483675]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_27_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483675,2147483675]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_27_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483675,2147483675]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_27_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_27_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_27_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_27_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_27_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_27_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 27, i64 27>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_28_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_28_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483676,2147483676]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_28_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483676,2147483676]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_28_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483676,2147483676]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_28_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483676,2147483676]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_28_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [28,28]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_28_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [28,28]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_28_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [28,28]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_28_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_28_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [28,28]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_28_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 28, i64 28>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_28_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_28_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483676,2147483676]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_28_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483676,2147483676]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_28_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483676,2147483676]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_28_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483676,2147483676]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_28_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_28_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_28_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_28_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_28_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_28_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 28, i64 28>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_29_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_29_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483677,2147483677]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_29_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483677,2147483677]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_29_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483677,2147483677]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_29_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483677,2147483677]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_29_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [29,29]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_29_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [29,29]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_29_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [29,29]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_29_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_29_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [29,29]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_29_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 29, i64 29>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_29_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_29_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483677,2147483677]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_29_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483677,2147483677]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_29_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483677,2147483677]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_29_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483677,2147483677]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_29_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_29_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_29_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_29_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_29_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_29_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 29, i64 29>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_30_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_30_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483678,2147483678]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_30_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483678,2147483678]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_30_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483678,2147483678]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_30_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483678,2147483678]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_30_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [30,30]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_30_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [30,30]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_30_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [30,30]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_30_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_30_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [30,30]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_30_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 30, i64 30>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_30_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_30_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483678,2147483678]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_30_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483678,2147483678]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_30_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483678,2147483678]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_30_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483678,2147483678]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_30_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_30_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_30_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_30_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_30_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_30_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 30, i64 30>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_31_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_31_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483679,2147483679]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_31_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483679,2147483679]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_31_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483679,2147483679]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_31_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483679,2147483679]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_31_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [31,31]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_31_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [31,31]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_31_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [31,31]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_31_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_31_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [31,31]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_31_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 31, i64 31>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_31_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_31_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483679,2147483679]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_31_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483679,2147483679]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_31_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483679,2147483679]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_31_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483679,2147483679]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_31_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_31_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_31_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_31_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_31_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_31_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 31, i64 31>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_32_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_32_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483680,2147483680]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_32_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483680,2147483680]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_32_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483680,2147483680]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_32_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483680,2147483680]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_32_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [32,32]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_32_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [32,32]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_32_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [32,32]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_32_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_32_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [32,32]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_32_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 32, i64 32>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_32_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_32_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483680,2147483680]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_32_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483680,2147483680]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_32_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483680,2147483680]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_32_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483680,2147483680]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_32_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_32_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_32_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_32_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_32_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_32_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 32, i64 32>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_33_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_33_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483681,2147483681]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_33_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483681,2147483681]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_33_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483681,2147483681]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_33_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483681,2147483681]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_33_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [33,33]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_33_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [33,33]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_33_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [33,33]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_33_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_33_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [33,33]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_33_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 33, i64 33>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_33_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_33_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483681,2147483681]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_33_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483681,2147483681]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_33_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483681,2147483681]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_33_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483681,2147483681]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_33_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_33_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_33_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_33_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_33_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_33_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 33, i64 33>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_34_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_34_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483682,2147483682]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_34_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483682,2147483682]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_34_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483682,2147483682]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_34_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483682,2147483682]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_34_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [34,34]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_34_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [34,34]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_34_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [34,34]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_34_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_34_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [34,34]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_34_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 34, i64 34>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_34_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_34_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483682,2147483682]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_34_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483682,2147483682]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_34_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483682,2147483682]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_34_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483682,2147483682]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_34_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_34_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_34_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_34_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_34_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_34_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 34, i64 34>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_35_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_35_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483683,2147483683]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_35_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483683,2147483683]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_35_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483683,2147483683]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_35_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483683,2147483683]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_35_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [35,35]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_35_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [35,35]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_35_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [35,35]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_35_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_35_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [35,35]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_35_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 35, i64 35>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_35_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_35_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483683,2147483683]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_35_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483683,2147483683]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_35_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483683,2147483683]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_35_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483683,2147483683]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_35_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_35_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_35_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_35_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_35_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_35_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 35, i64 35>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_36_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_36_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483684,2147483684]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_36_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483684,2147483684]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_36_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483684,2147483684]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_36_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483684,2147483684]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_36_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [36,36]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_36_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [36,36]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_36_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [36,36]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_36_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_36_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [36,36]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_36_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 36, i64 36>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_36_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_36_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483684,2147483684]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_36_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483684,2147483684]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_36_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483684,2147483684]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_36_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483684,2147483684]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_36_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_36_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_36_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_36_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_36_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_36_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 36, i64 36>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_37_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_37_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483685,2147483685]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_37_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483685,2147483685]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_37_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483685,2147483685]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_37_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483685,2147483685]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_37_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [37,37]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_37_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [37,37]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_37_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [37,37]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_37_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_37_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [37,37]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_37_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 37, i64 37>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_37_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_37_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483685,2147483685]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_37_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483685,2147483685]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_37_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483685,2147483685]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_37_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483685,2147483685]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_37_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_37_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_37_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_37_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_37_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_37_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 37, i64 37>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_38_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_38_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483686,2147483686]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_38_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483686,2147483686]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_38_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483686,2147483686]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_38_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483686,2147483686]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_38_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [38,38]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_38_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [38,38]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_38_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [38,38]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_38_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_38_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [38,38]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_38_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 38, i64 38>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_38_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_38_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483686,2147483686]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_38_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483686,2147483686]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_38_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483686,2147483686]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_38_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483686,2147483686]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_38_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_38_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_38_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_38_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_38_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_38_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 38, i64 38>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_39_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_39_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483687,2147483687]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_39_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483687,2147483687]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_39_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483687,2147483687]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_39_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483687,2147483687]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_39_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [39,39]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_39_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [39,39]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_39_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [39,39]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_39_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_39_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [39,39]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_39_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 39, i64 39>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_39_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_39_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483687,2147483687]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_39_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483687,2147483687]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_39_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483687,2147483687]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_39_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483687,2147483687]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_39_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_39_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_39_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_39_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_39_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_39_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 39, i64 39>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_40_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_40_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483688,2147483688]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_40_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483688,2147483688]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_40_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483688,2147483688]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_40_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483688,2147483688]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_40_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [40,40]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_40_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [40,40]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_40_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [40,40]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_40_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_40_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [40,40]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_40_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 40, i64 40>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_40_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_40_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483688,2147483688]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_40_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483688,2147483688]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_40_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483688,2147483688]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_40_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483688,2147483688]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_40_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_40_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_40_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_40_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_40_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_40_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 40, i64 40>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_41_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_41_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483689,2147483689]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_41_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483689,2147483689]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_41_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483689,2147483689]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_41_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483689,2147483689]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_41_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [41,41]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_41_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [41,41]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_41_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [41,41]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_41_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_41_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [41,41]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_41_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 41, i64 41>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_41_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_41_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483689,2147483689]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_41_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483689,2147483689]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_41_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483689,2147483689]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_41_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483689,2147483689]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_41_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_41_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_41_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_41_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_41_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_41_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 41, i64 41>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_42_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_42_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483690,2147483690]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_42_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483690,2147483690]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_42_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483690,2147483690]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_42_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483690,2147483690]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_42_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [42,42]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_42_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [42,42]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_42_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [42,42]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_42_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_42_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [42,42]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_42_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 42, i64 42>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_42_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_42_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483690,2147483690]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_42_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483690,2147483690]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_42_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483690,2147483690]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_42_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483690,2147483690]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_42_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_42_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_42_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_42_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_42_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_42_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 42, i64 42>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_43_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_43_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483691,2147483691]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_43_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483691,2147483691]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_43_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483691,2147483691]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_43_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483691,2147483691]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_43_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [43,43]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_43_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [43,43]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_43_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [43,43]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_43_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_43_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [43,43]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_43_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 43, i64 43>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_43_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_43_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483691,2147483691]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_43_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483691,2147483691]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_43_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483691,2147483691]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_43_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483691,2147483691]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_43_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_43_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_43_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_43_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_43_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_43_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 43, i64 43>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_44_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_44_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483692,2147483692]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_44_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483692,2147483692]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_44_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483692,2147483692]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_44_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483692,2147483692]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_44_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [44,44]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_44_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [44,44]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_44_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [44,44]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_44_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_44_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [44,44]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_44_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 44, i64 44>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_44_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_44_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483692,2147483692]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_44_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483692,2147483692]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_44_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483692,2147483692]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_44_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483692,2147483692]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_44_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_44_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_44_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_44_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_44_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_44_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 44, i64 44>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_45_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_45_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483693,2147483693]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_45_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483693,2147483693]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_45_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483693,2147483693]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_45_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483693,2147483693]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_45_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [45,45]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_45_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [45,45]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_45_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [45,45]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_45_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_45_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [45,45]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_45_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 45, i64 45>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_45_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_45_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483693,2147483693]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_45_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483693,2147483693]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_45_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483693,2147483693]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_45_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483693,2147483693]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_45_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_45_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_45_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_45_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_45_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_45_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 45, i64 45>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_46_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_46_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483694,2147483694]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_46_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483694,2147483694]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_46_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483694,2147483694]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_46_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483694,2147483694]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_46_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [46,46]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_46_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [46,46]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_46_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [46,46]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_46_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_46_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [46,46]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_46_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 46, i64 46>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_46_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_46_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483694,2147483694]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_46_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483694,2147483694]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_46_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483694,2147483694]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_46_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483694,2147483694]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_46_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_46_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_46_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_46_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_46_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_46_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 46, i64 46>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_47_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_47_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483695,2147483695]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_47_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483695,2147483695]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_47_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483695,2147483695]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_47_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483695,2147483695]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_47_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [47,47]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_47_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [47,47]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_47_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [47,47]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_47_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_47_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [47,47]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_47_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 47, i64 47>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_47_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_47_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483695,2147483695]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_47_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483695,2147483695]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_47_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483695,2147483695]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_47_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483695,2147483695]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_47_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_47_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_47_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_47_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_47_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_47_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 47, i64 47>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_48_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_48_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483696,2147483696]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_48_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483696,2147483696]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_48_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483696,2147483696]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_48_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483696,2147483696]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_48_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [48,48]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_48_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [48,48]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_48_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [48,48]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_48_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_48_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [48,48]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_48_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 48, i64 48>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_48_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_48_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483696,2147483696]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_48_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483696,2147483696]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_48_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483696,2147483696]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_48_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483696,2147483696]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_48_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_48_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_48_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_48_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_48_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_48_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 48, i64 48>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_49_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_49_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483697,2147483697]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_49_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483697,2147483697]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_49_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483697,2147483697]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_49_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483697,2147483697]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_49_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [49,49]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_49_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [49,49]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_49_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [49,49]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_49_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_49_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [49,49]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_49_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 49, i64 49>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_49_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_49_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483697,2147483697]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_49_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483697,2147483697]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_49_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483697,2147483697]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_49_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483697,2147483697]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_49_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_49_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_49_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_49_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_49_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_49_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 49, i64 49>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_50_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_50_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483698,2147483698]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_50_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483698,2147483698]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_50_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483698,2147483698]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_50_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483698,2147483698]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_50_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [50,50]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_50_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [50,50]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_50_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [50,50]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_50_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_50_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [50,50]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_50_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 50, i64 50>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_50_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_50_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483698,2147483698]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_50_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483698,2147483698]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_50_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483698,2147483698]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_50_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483698,2147483698]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_50_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_50_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_50_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_50_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_50_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_50_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 50, i64 50>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_51_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_51_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483699,2147483699]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_51_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483699,2147483699]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_51_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483699,2147483699]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_51_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483699,2147483699]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_51_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [51,51]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_51_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [51,51]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_51_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [51,51]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_51_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_51_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [51,51]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_51_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 51, i64 51>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_51_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_51_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483699,2147483699]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_51_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483699,2147483699]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_51_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483699,2147483699]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_51_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483699,2147483699]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_51_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_51_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_51_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_51_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_51_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_51_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 51, i64 51>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_52_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_52_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483700,2147483700]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_52_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483700,2147483700]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_52_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483700,2147483700]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_52_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483700,2147483700]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_52_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [52,52]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_52_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [52,52]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_52_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [52,52]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_52_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_52_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [52,52]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_52_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 52, i64 52>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_52_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_52_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483700,2147483700]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_52_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483700,2147483700]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_52_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483700,2147483700]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_52_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483700,2147483700]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_52_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_52_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_52_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_52_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_52_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_52_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 52, i64 52>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_53_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_53_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483701,2147483701]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_53_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483701,2147483701]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_53_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483701,2147483701]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_53_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483701,2147483701]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_53_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [53,53]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_53_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [53,53]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_53_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [53,53]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_53_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_53_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [53,53]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_53_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 53, i64 53>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_53_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_53_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483701,2147483701]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_53_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483701,2147483701]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_53_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483701,2147483701]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_53_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483701,2147483701]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_53_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_53_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_53_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_53_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_53_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_53_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 53, i64 53>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_54_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_54_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483702,2147483702]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_54_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483702,2147483702]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_54_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483702,2147483702]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_54_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483702,2147483702]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_54_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [54,54]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_54_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [54,54]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_54_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [54,54]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_54_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_54_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [54,54]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_54_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 54, i64 54>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_54_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_54_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483702,2147483702]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_54_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483702,2147483702]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_54_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483702,2147483702]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_54_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483702,2147483702]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_54_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_54_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_54_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_54_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_54_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_54_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 54, i64 54>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_55_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_55_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483703,2147483703]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_55_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483703,2147483703]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_55_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483703,2147483703]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_55_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483703,2147483703]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_55_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [55,55]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_55_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [55,55]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_55_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [55,55]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_55_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_55_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [55,55]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_55_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 55, i64 55>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_55_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_55_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483703,2147483703]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_55_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483703,2147483703]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_55_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483703,2147483703]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_55_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483703,2147483703]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_55_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_55_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_55_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_55_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_55_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_55_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 55, i64 55>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_56_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_56_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483704,2147483704]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_56_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483704,2147483704]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_56_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483704,2147483704]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_56_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483704,2147483704]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_56_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [56,56]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_56_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [56,56]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_56_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [56,56]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_56_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_56_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [56,56]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_56_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 56, i64 56>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_56_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_56_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483704,2147483704]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_56_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483704,2147483704]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_56_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483704,2147483704]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_56_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483704,2147483704]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_56_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_56_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_56_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_56_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_56_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_56_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 56, i64 56>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_57_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_57_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483705,2147483705]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_57_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483705,2147483705]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_57_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483705,2147483705]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_57_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483705,2147483705]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_57_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [57,57]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_57_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [57,57]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_57_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [57,57]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_57_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_57_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [57,57]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_57_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 57, i64 57>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_57_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_57_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483705,2147483705]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_57_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483705,2147483705]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_57_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483705,2147483705]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_57_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483705,2147483705]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_57_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_57_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_57_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_57_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_57_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_57_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 57, i64 57>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_58_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_58_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483706,2147483706]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_58_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483706,2147483706]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_58_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483706,2147483706]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_58_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483706,2147483706]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_58_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [58,58]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_58_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [58,58]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_58_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [58,58]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_58_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_58_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [58,58]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_58_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 58, i64 58>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_58_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_58_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483706,2147483706]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_58_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483706,2147483706]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_58_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483706,2147483706]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_58_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483706,2147483706]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_58_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_58_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_58_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_58_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_58_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_58_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 58, i64 58>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_59_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_59_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483707,2147483707]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_59_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483707,2147483707]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_59_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483707,2147483707]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_59_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483707,2147483707]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_59_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [59,59]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_59_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [59,59]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_59_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [59,59]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_59_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_59_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [59,59]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_59_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 59, i64 59>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_59_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_59_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483707,2147483707]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_59_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483707,2147483707]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_59_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483707,2147483707]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_59_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483707,2147483707]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_59_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_59_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_59_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_59_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_59_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_59_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 59, i64 59>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_60_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_60_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483708,2147483708]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_60_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483708,2147483708]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_60_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483708,2147483708]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_60_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483708,2147483708]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_60_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [60,60]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_60_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [60,60]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_60_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [60,60]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_60_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_60_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [60,60]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_60_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 60, i64 60>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_60_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_60_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483708,2147483708]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_60_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483708,2147483708]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_60_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483708,2147483708]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_60_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483708,2147483708]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_60_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_60_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_60_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_60_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_60_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_60_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 60, i64 60>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_61_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_61_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483709,2147483709]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_61_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483709,2147483709]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_61_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483709,2147483709]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_61_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483709,2147483709]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_61_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [61,61]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_61_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [61,61]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_61_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [61,61]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_61_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_61_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [61,61]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_61_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 61, i64 61>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_61_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_61_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483709,2147483709]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_61_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483709,2147483709]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_61_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483709,2147483709]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_61_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483709,2147483709]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_61_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_61_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_61_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_61_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_61_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_61_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 61, i64 61>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_62_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_62_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483710,2147483710]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_62_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483710,2147483710]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_62_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483710,2147483710]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_62_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483710,2147483710]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_62_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [62,62]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_62_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [62,62]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_62_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [62,62]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_62_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_62_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [62,62]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_62_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 62, i64 62>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ugt_62_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ugt_62_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483710,2147483710]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ugt_62_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483710,2147483710]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ugt_62_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483710,2147483710]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ugt_62_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483710,2147483710]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm1, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ugt_62_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ugt_62_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ugt_62_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ugt_62_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ugt_62_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq {{.*}}(%rip), %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ugt_62_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpnleuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ugt <2 x i64> %2, <i64 62, i64 62>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
define <2 x i64> @ult_63_v2i64(<2 x i64> %0) {
|
|
; SSE2-LABEL: ult_63_v2i64:
|
|
; SSE2: # %bb.0:
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $1, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: psubb %xmm1, %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE2-NEXT: pand %xmm1, %xmm2
|
|
; SSE2-NEXT: psrlw $2, %xmm0
|
|
; SSE2-NEXT: pand %xmm1, %xmm0
|
|
; SSE2-NEXT: paddb %xmm2, %xmm0
|
|
; SSE2-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE2-NEXT: psrlw $4, %xmm1
|
|
; SSE2-NEXT: paddb %xmm0, %xmm1
|
|
; SSE2-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE2-NEXT: pxor %xmm0, %xmm0
|
|
; SSE2-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE2-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483711,2147483711]
|
|
; SSE2-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE2-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE2-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE2-NEXT: pand %xmm3, %xmm1
|
|
; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE2-NEXT: por %xmm1, %xmm0
|
|
; SSE2-NEXT: retq
|
|
;
|
|
; SSE3-LABEL: ult_63_v2i64:
|
|
; SSE3: # %bb.0:
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $1, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: psubb %xmm1, %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSE3-NEXT: psrlw $2, %xmm0
|
|
; SSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSE3-NEXT: paddb %xmm2, %xmm0
|
|
; SSE3-NEXT: movdqa %xmm0, %xmm1
|
|
; SSE3-NEXT: psrlw $4, %xmm1
|
|
; SSE3-NEXT: paddb %xmm0, %xmm1
|
|
; SSE3-NEXT: pand {{.*}}(%rip), %xmm1
|
|
; SSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSE3-NEXT: psadbw %xmm1, %xmm0
|
|
; SSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483711,2147483711]
|
|
; SSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE3-NEXT: por %xmm1, %xmm0
|
|
; SSE3-NEXT: retq
|
|
;
|
|
; SSSE3-LABEL: ult_63_v2i64:
|
|
; SSSE3: # %bb.0:
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSSE3-NEXT: movdqa %xmm0, %xmm2
|
|
; SSSE3-NEXT: pand %xmm1, %xmm2
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSSE3-NEXT: movdqa %xmm3, %xmm4
|
|
; SSSE3-NEXT: pshufb %xmm2, %xmm4
|
|
; SSSE3-NEXT: psrlw $4, %xmm0
|
|
; SSSE3-NEXT: pand %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufb %xmm0, %xmm3
|
|
; SSSE3-NEXT: paddb %xmm4, %xmm3
|
|
; SSSE3-NEXT: pxor %xmm0, %xmm0
|
|
; SSSE3-NEXT: psadbw %xmm3, %xmm0
|
|
; SSSE3-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483711,2147483711]
|
|
; SSSE3-NEXT: movdqa %xmm1, %xmm2
|
|
; SSSE3-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSSE3-NEXT: pand %xmm3, %xmm1
|
|
; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSSE3-NEXT: por %xmm1, %xmm0
|
|
; SSSE3-NEXT: retq
|
|
;
|
|
; SSE41-LABEL: ult_63_v2i64:
|
|
; SSE41: # %bb.0:
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; SSE41-NEXT: movdqa %xmm0, %xmm2
|
|
; SSE41-NEXT: pand %xmm1, %xmm2
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; SSE41-NEXT: movdqa %xmm3, %xmm4
|
|
; SSE41-NEXT: pshufb %xmm2, %xmm4
|
|
; SSE41-NEXT: psrlw $4, %xmm0
|
|
; SSE41-NEXT: pand %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufb %xmm0, %xmm3
|
|
; SSE41-NEXT: paddb %xmm4, %xmm3
|
|
; SSE41-NEXT: pxor %xmm0, %xmm0
|
|
; SSE41-NEXT: psadbw %xmm3, %xmm0
|
|
; SSE41-NEXT: por {{.*}}(%rip), %xmm0
|
|
; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [2147483711,2147483711]
|
|
; SSE41-NEXT: movdqa %xmm1, %xmm2
|
|
; SSE41-NEXT: pcmpgtd %xmm0, %xmm2
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
|
|
; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
|
|
; SSE41-NEXT: pand %xmm3, %xmm1
|
|
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
|
|
; SSE41-NEXT: por %xmm1, %xmm0
|
|
; SSE41-NEXT: retq
|
|
;
|
|
; AVX1-LABEL: ult_63_v2i64:
|
|
; AVX1: # %bb.0:
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [63,63]
|
|
; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX1-NEXT: retq
|
|
;
|
|
; AVX2-LABEL: ult_63_v2i64:
|
|
; AVX2: # %bb.0:
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
|
|
; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2
|
|
; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
|
|
; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0
|
|
; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [63,63]
|
|
; AVX2-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX2-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQ-LABEL: ult_63_v2i64:
|
|
; AVX512VPOPCNTDQ: # %bb.0:
|
|
; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} xmm1 = [63,63]
|
|
; AVX512VPOPCNTDQ-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; AVX512VPOPCNTDQ-NEXT: vzeroupper
|
|
; AVX512VPOPCNTDQ-NEXT: retq
|
|
;
|
|
; AVX512VPOPCNTDQVL-LABEL: ult_63_v2i64:
|
|
; AVX512VPOPCNTDQVL: # %bb.0:
|
|
; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; AVX512VPOPCNTDQVL-NEXT: retq
|
|
;
|
|
; BITALG_NOVLX-LABEL: ult_63_v2i64:
|
|
; BITALG_NOVLX: # %bb.0:
|
|
; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
|
|
; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
|
|
; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [63,63]
|
|
; BITALG_NOVLX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0
|
|
; BITALG_NOVLX-NEXT: vzeroupper
|
|
; BITALG_NOVLX-NEXT: retq
|
|
;
|
|
; BITALG-LABEL: ult_63_v2i64:
|
|
; BITALG: # %bb.0:
|
|
; BITALG-NEXT: vpopcntb %xmm0, %xmm0
|
|
; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
|
|
; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
|
|
; BITALG-NEXT: vpcmpltuq {{.*}}(%rip), %xmm0, %k1
|
|
; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
|
|
; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
|
|
; BITALG-NEXT: retq
|
|
%2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0)
|
|
%3 = icmp ult <2 x i64> %2, <i64 63, i64 63>
|
|
%4 = sext <2 x i1> %3 to <2 x i64>
|
|
ret <2 x i64> %4
|
|
}
|
|
|
|
declare <16 x i8> @llvm.ctpop.v16i8(<16 x i8>)
|
|
declare <8 x i16> @llvm.ctpop.v8i16(<8 x i16>)
|
|
declare <4 x i32> @llvm.ctpop.v4i32(<4 x i32>)
|
|
declare <2 x i64> @llvm.ctpop.v2i64(<2 x i64>)
|