Compiler projects using llvm
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -mtriple=x86_64-apple-darwin -mattr=+avx512f | FileCheck --check-prefixes=CHECK,KNL %s
; RUN: llc < %s -mtriple=x86_64-apple-darwin -mattr=+avx512f,+avx512bw,+avx512vl,+avx512dq | FileCheck --check-prefixes=CHECK,SKX %s
; RUN: llc < %s -mtriple=x86_64-apple-darwin -mattr=+avx512f,+avx512bw,+avx512vl,+avx512dq,+avx512vbmi | FileCheck --check-prefixes=CHECK,SKX %s

define <16 x float> @test1(<16 x float> %x, ptr %br, float %y) nounwind {
; CHECK-LABEL: test1:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vinsertps {{.*#+}} xmm2 = xmm0[0],mem[0],xmm0[2,3]
; CHECK-NEXT:    vinsertf32x4 $0, %xmm2, %zmm0, %zmm2
; CHECK-NEXT:    vbroadcastss %xmm1, %zmm1
; CHECK-NEXT:    vmovaps {{.*#+}} zmm0 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,30,15]
; CHECK-NEXT:    vpermi2ps %zmm1, %zmm2, %zmm0
; CHECK-NEXT:    retq
  %rrr = load float, ptr %br
  %rrr2 = insertelement <16 x float> %x, float %rrr, i32 1
  %rrr3 = insertelement <16 x float> %rrr2, float %y, i32 14
  ret <16 x float> %rrr3
}

define <8 x double> @test2(<8 x double> %x, ptr %br, double %y) nounwind {
; KNL-LABEL: test2:
; KNL:       ## %bb.0:
; KNL-NEXT:    vmovhpd {{.*#+}} xmm2 = xmm0[0],mem[0]
; KNL-NEXT:    vinsertf32x4 $0, %xmm2, %zmm0, %zmm0
; KNL-NEXT:    movb $64, %al
; KNL-NEXT:    kmovw %eax, %k1
; KNL-NEXT:    vbroadcastsd %xmm1, %zmm0 {%k1}
; KNL-NEXT:    retq
;
; SKX-LABEL: test2:
; SKX:       ## %bb.0:
; SKX-NEXT:    vmovhpd {{.*#+}} xmm2 = xmm0[0],mem[0]
; SKX-NEXT:    vinsertf32x4 $0, %xmm2, %zmm0, %zmm0
; SKX-NEXT:    movb $64, %al
; SKX-NEXT:    kmovd %eax, %k1
; SKX-NEXT:    vbroadcastsd %xmm1, %zmm0 {%k1}
; SKX-NEXT:    retq
  %rrr = load double, ptr %br
  %rrr2 = insertelement <8 x double> %x, double %rrr, i32 1
  %rrr3 = insertelement <8 x double> %rrr2, double %y, i32 6
  ret <8 x double> %rrr3
}

define <16 x float> @test3(<16 x float> %x) nounwind {
; CHECK-LABEL: test3:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vextractf128 $1, %ymm0, %xmm1
; CHECK-NEXT:    vinsertps {{.*#+}} xmm1 = xmm0[0],xmm1[0],xmm0[2,3]
; CHECK-NEXT:    vinsertf32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:    retq
  %eee = extractelement <16 x float> %x, i32 4
  %rrr2 = insertelement <16 x float> %x, float %eee, i32 1
  ret <16 x float> %rrr2
}

define <8 x i64> @test4(<8 x i64> %x) nounwind {
; CHECK-LABEL: test4:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vextractf32x4 $2, %zmm0, %xmm1
; CHECK-NEXT:    vmovlhps {{.*#+}} xmm1 = xmm0[0],xmm1[0]
; CHECK-NEXT:    vinsertf32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:    retq
  %eee = extractelement <8 x i64> %x, i32 4
  %rrr2 = insertelement <8 x i64> %x, i64 %eee, i32 1
  ret <8 x i64> %rrr2
}

define i32 @test5(<4 x float> %x) nounwind {
; CHECK-LABEL: test5:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vextractps $3, %xmm0, %eax
; CHECK-NEXT:    retq
  %ef = extractelement <4 x float> %x, i32 3
  %ei = bitcast float %ef to i32
  ret i32 %ei
}

define void @test6(<4 x float> %x, ptr %out) nounwind {
; CHECK-LABEL: test6:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vextractps $3, %xmm0, (%rdi)
; CHECK-NEXT:    retq
  %ef = extractelement <4 x float> %x, i32 3
  store float %ef, ptr %out, align 4
  ret void
}

define float @test7(<16 x float> %x, i32 %ind) nounwind {
; CHECK-LABEL: test7:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    pushq %rbp
; CHECK-NEXT:    movq %rsp, %rbp
; CHECK-NEXT:    andq $-64, %rsp
; CHECK-NEXT:    subq $128, %rsp
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %zmm0, (%rsp)
; CHECK-NEXT:    andl $15, %edi
; CHECK-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
; CHECK-NEXT:    movq %rbp, %rsp
; CHECK-NEXT:    popq %rbp
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %e = extractelement <16 x float> %x, i32 %ind
  ret float %e
}

define double @test8(<8 x double> %x, i32 %ind) nounwind {
; CHECK-LABEL: test8:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    pushq %rbp
; CHECK-NEXT:    movq %rsp, %rbp
; CHECK-NEXT:    andq $-64, %rsp
; CHECK-NEXT:    subq $128, %rsp
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %zmm0, (%rsp)
; CHECK-NEXT:    andl $7, %edi
; CHECK-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
; CHECK-NEXT:    movq %rbp, %rsp
; CHECK-NEXT:    popq %rbp
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %e = extractelement <8 x double> %x, i32 %ind
  ret double %e
}

define float @test9(<8 x float> %x, i32 %ind) nounwind {
; CHECK-LABEL: test9:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    pushq %rbp
; CHECK-NEXT:    movq %rsp, %rbp
; CHECK-NEXT:    andq $-32, %rsp
; CHECK-NEXT:    subq $64, %rsp
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %ymm0, (%rsp)
; CHECK-NEXT:    andl $7, %edi
; CHECK-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
; CHECK-NEXT:    movq %rbp, %rsp
; CHECK-NEXT:    popq %rbp
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %e = extractelement <8 x float> %x, i32 %ind
  ret float %e
}

define i32 @test10(<16 x i32> %x, i32 %ind) nounwind {
; CHECK-LABEL: test10:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    pushq %rbp
; CHECK-NEXT:    movq %rsp, %rbp
; CHECK-NEXT:    andq $-64, %rsp
; CHECK-NEXT:    subq $128, %rsp
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %zmm0, (%rsp)
; CHECK-NEXT:    andl $15, %edi
; CHECK-NEXT:    movl (%rsp,%rdi,4), %eax
; CHECK-NEXT:    movq %rbp, %rsp
; CHECK-NEXT:    popq %rbp
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %e = extractelement <16 x i32> %x, i32 %ind
  ret i32 %e
}

define <16 x i32> @test11(<16 x i32>%a, <16 x i32>%b) {
; KNL-LABEL: test11:
; KNL:       ## %bb.0:
; KNL-NEXT:    vpcmpltud %zmm1, %zmm0, %k0
; KNL-NEXT:    kshiftrw $4, %k0, %k0
; KNL-NEXT:    kmovw %k0, %eax
; KNL-NEXT:    testb $1, %al
; KNL-NEXT:    je LBB10_2
; KNL-NEXT:  ## %bb.1: ## %A
; KNL-NEXT:    vmovdqa64 %zmm1, %zmm0
; KNL-NEXT:    retq
; KNL-NEXT:  LBB10_2: ## %B
; KNL-NEXT:    vpaddd %zmm0, %zmm1, %zmm0
; KNL-NEXT:    retq
;
; SKX-LABEL: test11:
; SKX:       ## %bb.0:
; SKX-NEXT:    vpcmpltud %zmm1, %zmm0, %k0
; SKX-NEXT:    kshiftrw $4, %k0, %k0
; SKX-NEXT:    kmovd %k0, %eax
; SKX-NEXT:    testb $1, %al
; SKX-NEXT:    je LBB10_2
; SKX-NEXT:  ## %bb.1: ## %A
; SKX-NEXT:    vmovdqa64 %zmm1, %zmm0
; SKX-NEXT:    retq
; SKX-NEXT:  LBB10_2: ## %B
; SKX-NEXT:    vpaddd %zmm0, %zmm1, %zmm0
; SKX-NEXT:    retq
  %cmp_res = icmp ult <16 x i32> %a, %b
  %ia = extractelement <16 x i1> %cmp_res, i32 4
  br i1 %ia, label %A, label %B
  A:
    ret <16 x i32>%b
  B:
   %c = add <16 x i32>%b, %a
   ret <16 x i32>%c
}

define i64 @test12(<16 x i64>%a, <16 x i64>%b, i64 %a1, i64 %b1) {
; KNL-LABEL: test12:
; KNL:       ## %bb.0:
; KNL-NEXT:    movq %rdi, %rax
; KNL-NEXT:    vpcmpgtq %zmm0, %zmm2, %k0
; KNL-NEXT:    kmovw %k0, %ecx
; KNL-NEXT:    testb $1, %cl
; KNL-NEXT:    cmoveq %rsi, %rax
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: test12:
; SKX:       ## %bb.0:
; SKX-NEXT:    movq %rdi, %rax
; SKX-NEXT:    vpcmpgtq %zmm0, %zmm2, %k0
; SKX-NEXT:    kmovd %k0, %ecx
; SKX-NEXT:    testb $1, %cl
; SKX-NEXT:    cmoveq %rsi, %rax
; SKX-NEXT:    vzeroupper
; SKX-NEXT:    retq
  %cmpvector_func.i = icmp slt <16 x i64> %a, %b
  %extract24vector_func.i = extractelement <16 x i1> %cmpvector_func.i, i32 0
  %res = select i1 %extract24vector_func.i, i64 %a1, i64 %b1
  ret i64 %res
}

define i16 @test13(i32 %a, i32 %b) {
; KNL-LABEL: test13:
; KNL:       ## %bb.0:
; KNL-NEXT:    cmpl %esi, %edi
; KNL-NEXT:    setb %al
; KNL-NEXT:    movw $-4, %cx
; KNL-NEXT:    kmovw %ecx, %k0
; KNL-NEXT:    kshiftrw $1, %k0, %k0
; KNL-NEXT:    kshiftlw $1, %k0, %k0
; KNL-NEXT:    andl $1, %eax
; KNL-NEXT:    kmovw %eax, %k1
; KNL-NEXT:    korw %k1, %k0, %k0
; KNL-NEXT:    kmovw %k0, %eax
; KNL-NEXT:    ## kill: def $ax killed $ax killed $eax
; KNL-NEXT:    retq
;
; SKX-LABEL: test13:
; SKX:       ## %bb.0:
; SKX-NEXT:    cmpl %esi, %edi
; SKX-NEXT:    setb %al
; SKX-NEXT:    movw $-4, %cx
; SKX-NEXT:    kmovd %ecx, %k0
; SKX-NEXT:    kshiftrw $1, %k0, %k0
; SKX-NEXT:    kshiftlw $1, %k0, %k0
; SKX-NEXT:    andl $1, %eax
; SKX-NEXT:    kmovw %eax, %k1
; SKX-NEXT:    korw %k1, %k0, %k0
; SKX-NEXT:    kmovd %k0, %eax
; SKX-NEXT:    ## kill: def $ax killed $ax killed $eax
; SKX-NEXT:    retq
  %cmp_res = icmp ult i32 %a, %b
  %maskv = insertelement <16 x i1> <i1 true, i1 false, i1 true, i1 true, i1 true, i1 true, i1 true, i1 true, i1 true, i1 true, i1 true, i1 true, i1 true, i1 true, i1 true, i1 true>, i1 %cmp_res, i32 0
  %res = bitcast <16 x i1> %maskv to i16
  ret i16 %res
}

define i64 @test14(<8 x i64>%a, <8 x i64>%b, i64 %a1, i64 %b1) {
; KNL-LABEL: test14:
; KNL:       ## %bb.0:
; KNL-NEXT:    movq %rdi, %rax
; KNL-NEXT:    vpcmpgtq %zmm0, %zmm1, %k0
; KNL-NEXT:    kshiftrw $4, %k0, %k0
; KNL-NEXT:    kmovw %k0, %ecx
; KNL-NEXT:    testb $1, %cl
; KNL-NEXT:    cmoveq %rsi, %rax
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: test14:
; SKX:       ## %bb.0:
; SKX-NEXT:    movq %rdi, %rax
; SKX-NEXT:    vpcmpgtq %zmm0, %zmm1, %k0
; SKX-NEXT:    kshiftrb $4, %k0, %k0
; SKX-NEXT:    kmovd %k0, %ecx
; SKX-NEXT:    testb $1, %cl
; SKX-NEXT:    cmoveq %rsi, %rax
; SKX-NEXT:    vzeroupper
; SKX-NEXT:    retq
  %cmpvector_func.i = icmp slt <8 x i64> %a, %b
  %extract24vector_func.i = extractelement <8 x i1> %cmpvector_func.i, i32 4
  %res = select i1 %extract24vector_func.i, i64 %a1, i64 %b1
  ret i64 %res
}

define i16 @test15(ptr%addr) {
; CHECK-LABEL: test15:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    xorl %ecx, %ecx
; CHECK-NEXT:    cmpb $0, (%rdi)
; CHECK-NEXT:    movl $65535, %eax ## imm = 0xFFFF
; CHECK-NEXT:    cmovel %ecx, %eax
; CHECK-NEXT:    ## kill: def $ax killed $ax killed $eax
; CHECK-NEXT:    retq
  %x = load i1 , ptr %addr, align 1
  %x1 = insertelement <16 x i1> undef, i1 %x, i32 10
  %x2 = bitcast <16 x i1>%x1 to i16
  ret i16 %x2
}

define i16 @test16(ptr%addr, i16 %a) {
; KNL-LABEL: test16:
; KNL:       ## %bb.0:
; KNL-NEXT:    movzbl (%rdi), %eax
; KNL-NEXT:    kmovw %esi, %k0
; KNL-NEXT:    movw $-1025, %cx ## imm = 0xFBFF
; KNL-NEXT:    kmovw %ecx, %k1
; KNL-NEXT:    kandw %k1, %k0, %k0
; KNL-NEXT:    kmovw %eax, %k1
; KNL-NEXT:    kshiftlw $15, %k1, %k1
; KNL-NEXT:    kshiftrw $5, %k1, %k1
; KNL-NEXT:    korw %k1, %k0, %k0
; KNL-NEXT:    kmovw %k0, %eax
; KNL-NEXT:    ## kill: def $ax killed $ax killed $eax
; KNL-NEXT:    retq
;
; SKX-LABEL: test16:
; SKX:       ## %bb.0:
; SKX-NEXT:    kmovb (%rdi), %k0
; SKX-NEXT:    kmovd %esi, %k1
; SKX-NEXT:    movw $-1025, %ax ## imm = 0xFBFF
; SKX-NEXT:    kmovd %eax, %k2
; SKX-NEXT:    kandw %k2, %k1, %k1
; SKX-NEXT:    kshiftlw $15, %k0, %k0
; SKX-NEXT:    kshiftrw $5, %k0, %k0
; SKX-NEXT:    korw %k0, %k1, %k0
; SKX-NEXT:    kmovd %k0, %eax
; SKX-NEXT:    ## kill: def $ax killed $ax killed $eax
; SKX-NEXT:    retq
  %x = load i1 , ptr %addr, align 128
  %a1 = bitcast i16 %a to <16 x i1>
  %x1 = insertelement <16 x i1> %a1, i1 %x, i32 10
  %x2 = bitcast <16 x i1>%x1 to i16
  ret i16 %x2
}

define i8 @test17(ptr%addr, i8 %a) {
; KNL-LABEL: test17:
; KNL:       ## %bb.0:
; KNL-NEXT:    movzbl (%rdi), %eax
; KNL-NEXT:    kmovw %esi, %k0
; KNL-NEXT:    movw $-17, %cx
; KNL-NEXT:    kmovw %ecx, %k1
; KNL-NEXT:    kandw %k1, %k0, %k0
; KNL-NEXT:    kmovw %eax, %k1
; KNL-NEXT:    kshiftlw $15, %k1, %k1
; KNL-NEXT:    kshiftrw $11, %k1, %k1
; KNL-NEXT:    korw %k1, %k0, %k0
; KNL-NEXT:    kmovw %k0, %eax
; KNL-NEXT:    ## kill: def $al killed $al killed $eax
; KNL-NEXT:    retq
;
; SKX-LABEL: test17:
; SKX:       ## %bb.0:
; SKX-NEXT:    kmovb (%rdi), %k0
; SKX-NEXT:    kmovd %esi, %k1
; SKX-NEXT:    movb $-17, %al
; SKX-NEXT:    kmovd %eax, %k2
; SKX-NEXT:    kandb %k2, %k1, %k1
; SKX-NEXT:    kshiftlb $7, %k0, %k0
; SKX-NEXT:    kshiftrb $3, %k0, %k0
; SKX-NEXT:    korb %k0, %k1, %k0
; SKX-NEXT:    kmovd %k0, %eax
; SKX-NEXT:    ## kill: def $al killed $al killed $eax
; SKX-NEXT:    retq
  %x = load i1 , ptr %addr, align 128
  %a1 = bitcast i8 %a to <8 x i1>
  %x1 = insertelement <8 x i1> %a1, i1 %x, i32 4
  %x2 = bitcast <8 x i1>%x1 to i8
  ret i8 %x2
}

define i64 @extract_v8i64(<8 x i64> %x, ptr %dst) {
; CHECK-LABEL: extract_v8i64:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vpextrq $1, %xmm0, %rax
; CHECK-NEXT:    vextracti128 $1, %ymm0, %xmm0
; CHECK-NEXT:    vpextrq $1, %xmm0, (%rdi)
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %r1 = extractelement <8 x i64> %x, i32 1
  %r2 = extractelement <8 x i64> %x, i32 3
  store i64 %r2, ptr %dst, align 1
  ret i64 %r1
}

define i64 @extract_v4i64(<4 x i64> %x, ptr %dst) {
; CHECK-LABEL: extract_v4i64:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vpextrq $1, %xmm0, %rax
; CHECK-NEXT:    vextracti128 $1, %ymm0, %xmm0
; CHECK-NEXT:    vpextrq $1, %xmm0, (%rdi)
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %r1 = extractelement <4 x i64> %x, i32 1
  %r2 = extractelement <4 x i64> %x, i32 3
  store i64 %r2, ptr %dst, align 1
  ret i64 %r1
}

define i64 @extract_v2i64(<2 x i64> %x, ptr %dst) {
; CHECK-LABEL: extract_v2i64:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vmovq %xmm0, %rax
; CHECK-NEXT:    vpextrq $1, %xmm0, (%rdi)
; CHECK-NEXT:    retq
  %r1 = extractelement <2 x i64> %x, i32 0
  %r2 = extractelement <2 x i64> %x, i32 1
  store i64 %r2, ptr %dst, align 1
  ret i64 %r1
}

define i32 @extract_v16i32(<16 x i32> %x, ptr %dst) {
; CHECK-LABEL: extract_v16i32:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vextractps $1, %xmm0, %eax
; CHECK-NEXT:    vextractf128 $1, %ymm0, %xmm0
; CHECK-NEXT:    vextractps $1, %xmm0, (%rdi)
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %r1 = extractelement <16 x i32> %x, i32 1
  %r2 = extractelement <16 x i32> %x, i32 5
  store i32 %r2, ptr %dst, align 1
  ret i32 %r1
}

define i32 @extract_v8i32(<8 x i32> %x, ptr %dst) {
; CHECK-LABEL: extract_v8i32:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vextractps $1, %xmm0, %eax
; CHECK-NEXT:    vextractf128 $1, %ymm0, %xmm0
; CHECK-NEXT:    vextractps $1, %xmm0, (%rdi)
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %r1 = extractelement <8 x i32> %x, i32 1
  %r2 = extractelement <8 x i32> %x, i32 5
  store i32 %r2, ptr %dst, align 1
  ret i32 %r1
}

define i32 @extract_v4i32(<4 x i32> %x, ptr %dst) {
; CHECK-LABEL: extract_v4i32:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vextractps $1, %xmm0, %eax
; CHECK-NEXT:    vextractps $3, %xmm0, (%rdi)
; CHECK-NEXT:    retq
  %r1 = extractelement <4 x i32> %x, i32 1
  %r2 = extractelement <4 x i32> %x, i32 3
  store i32 %r2, ptr %dst, align 1
  ret i32 %r1
}

define i16 @extract_v32i16(<32 x i16> %x, ptr %dst) {
; CHECK-LABEL: extract_v32i16:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vpextrw $1, %xmm0, %eax
; CHECK-NEXT:    vextracti128 $1, %ymm0, %xmm0
; CHECK-NEXT:    vpextrw $1, %xmm0, (%rdi)
; CHECK-NEXT:    ## kill: def $ax killed $ax killed $eax
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %r1 = extractelement <32 x i16> %x, i32 1
  %r2 = extractelement <32 x i16> %x, i32 9
  store i16 %r2, ptr %dst, align 1
  ret i16 %r1
}

define i16 @extract_v16i16(<16 x i16> %x, ptr %dst) {
; CHECK-LABEL: extract_v16i16:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vpextrw $1, %xmm0, %eax
; CHECK-NEXT:    vextracti128 $1, %ymm0, %xmm0
; CHECK-NEXT:    vpextrw $1, %xmm0, (%rdi)
; CHECK-NEXT:    ## kill: def $ax killed $ax killed $eax
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %r1 = extractelement <16 x i16> %x, i32 1
  %r2 = extractelement <16 x i16> %x, i32 9
  store i16 %r2, ptr %dst, align 1
  ret i16 %r1
}

define i16 @extract_v8i16(<8 x i16> %x, ptr %dst) {
; CHECK-LABEL: extract_v8i16:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vpextrw $1, %xmm0, %eax
; CHECK-NEXT:    vpextrw $3, %xmm0, (%rdi)
; CHECK-NEXT:    ## kill: def $ax killed $ax killed $eax
; CHECK-NEXT:    retq
  %r1 = extractelement <8 x i16> %x, i32 1
  %r2 = extractelement <8 x i16> %x, i32 3
  store i16 %r2, ptr %dst, align 1
  ret i16 %r1
}

define i8 @extract_v64i8(<64 x i8> %x, ptr %dst) {
; CHECK-LABEL: extract_v64i8:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vpextrb $1, %xmm0, %eax
; CHECK-NEXT:    vextracti128 $1, %ymm0, %xmm0
; CHECK-NEXT:    vpextrb $1, %xmm0, (%rdi)
; CHECK-NEXT:    ## kill: def $al killed $al killed $eax
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %r1 = extractelement <64 x i8> %x, i32 1
  %r2 = extractelement <64 x i8> %x, i32 17
  store i8 %r2, ptr %dst, align 1
  ret i8 %r1
}

define i8 @extract_v32i8(<32 x i8> %x, ptr %dst) {
; CHECK-LABEL: extract_v32i8:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vpextrb $1, %xmm0, %eax
; CHECK-NEXT:    vextracti128 $1, %ymm0, %xmm0
; CHECK-NEXT:    vpextrb $1, %xmm0, (%rdi)
; CHECK-NEXT:    ## kill: def $al killed $al killed $eax
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %r1 = extractelement <32 x i8> %x, i32 1
  %r2 = extractelement <32 x i8> %x, i32 17
  store i8 %r2, ptr %dst, align 1
  ret i8 %r1
}

define i8 @extract_v16i8(<16 x i8> %x, ptr %dst) {
; CHECK-LABEL: extract_v16i8:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vpextrb $1, %xmm0, %eax
; CHECK-NEXT:    vpextrb $3, %xmm0, (%rdi)
; CHECK-NEXT:    ## kill: def $al killed $al killed $eax
; CHECK-NEXT:    retq
  %r1 = extractelement <16 x i8> %x, i32 1
  %r2 = extractelement <16 x i8> %x, i32 3
  store i8 %r2, ptr %dst, align 1
  ret i8 %r1
}

define <8 x i64> @insert_v8i64(<8 x i64> %x, i64 %y , ptr %ptr) {
; KNL-LABEL: insert_v8i64:
; KNL:       ## %bb.0:
; KNL-NEXT:    vpinsrq $1, (%rsi), %xmm0, %xmm1
; KNL-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; KNL-NEXT:    movb $8, %al
; KNL-NEXT:    kmovw %eax, %k1
; KNL-NEXT:    vpbroadcastq %rdi, %zmm0 {%k1}
; KNL-NEXT:    retq
;
; SKX-LABEL: insert_v8i64:
; SKX:       ## %bb.0:
; SKX-NEXT:    vpinsrq $1, (%rsi), %xmm0, %xmm1
; SKX-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; SKX-NEXT:    movb $8, %al
; SKX-NEXT:    kmovd %eax, %k1
; SKX-NEXT:    vpbroadcastq %rdi, %zmm0 {%k1}
; SKX-NEXT:    retq
  %val = load i64, ptr %ptr
  %r1 = insertelement <8 x i64> %x, i64 %val, i32 1
  %r2 = insertelement <8 x i64> %r1, i64 %y, i32 3
  ret <8 x i64> %r2
}

define <4 x i64> @insert_v4i64(<4 x i64> %x, i64 %y , ptr %ptr) {
; KNL-LABEL: insert_v4i64:
; KNL:       ## %bb.0:
; KNL-NEXT:    vpinsrq $1, (%rsi), %xmm0, %xmm1
; KNL-NEXT:    vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
; KNL-NEXT:    vmovq %rdi, %xmm1
; KNL-NEXT:    vpbroadcastq %xmm1, %ymm1
; KNL-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
; KNL-NEXT:    retq
;
; SKX-LABEL: insert_v4i64:
; SKX:       ## %bb.0:
; SKX-NEXT:    vpinsrq $1, (%rsi), %xmm0, %xmm1
; SKX-NEXT:    vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
; SKX-NEXT:    vpbroadcastq %rdi, %ymm1
; SKX-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
; SKX-NEXT:    retq
  %val = load i64, ptr %ptr
  %r1 = insertelement <4 x i64> %x, i64 %val, i32 1
  %r2 = insertelement <4 x i64> %r1, i64 %y, i32 3
  ret <4 x i64> %r2
}

define <2 x i64> @insert_v2i64(<2 x i64> %x, i64 %y , ptr %ptr) {
; CHECK-LABEL: insert_v2i64:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vmovq {{.*#+}} xmm0 = mem[0],zero
; CHECK-NEXT:    vmovq %rdi, %xmm1
; CHECK-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
; CHECK-NEXT:    retq
  %val = load i64, ptr %ptr
  %r1 = insertelement <2 x i64> %x, i64 %val, i32 1
  %r2 = insertelement <2 x i64> %r1, i64 %y, i32 0
  ret <2 x i64> %r2
}

define <16 x i32> @insert_v16i32(<16 x i32> %x, i32 %y, ptr %ptr) {
; KNL-LABEL: insert_v16i32:
; KNL:       ## %bb.0:
; KNL-NEXT:    vpinsrd $1, (%rsi), %xmm0, %xmm1
; KNL-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; KNL-NEXT:    movw $32, %ax
; KNL-NEXT:    kmovw %eax, %k1
; KNL-NEXT:    vpbroadcastd %edi, %zmm0 {%k1}
; KNL-NEXT:    retq
;
; SKX-LABEL: insert_v16i32:
; SKX:       ## %bb.0:
; SKX-NEXT:    vpinsrd $1, (%rsi), %xmm0, %xmm1
; SKX-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; SKX-NEXT:    movw $32, %ax
; SKX-NEXT:    kmovd %eax, %k1
; SKX-NEXT:    vpbroadcastd %edi, %zmm0 {%k1}
; SKX-NEXT:    retq
  %val = load i32, ptr %ptr
  %r1 = insertelement <16 x i32> %x, i32 %val, i32 1
  %r2 = insertelement <16 x i32> %r1, i32 %y, i32 5
  ret <16 x i32> %r2
}

define <8 x i32> @insert_v8i32(<8 x i32> %x, i32 %y, ptr %ptr) {
; KNL-LABEL: insert_v8i32:
; KNL:       ## %bb.0:
; KNL-NEXT:    vpinsrd $1, (%rsi), %xmm0, %xmm1
; KNL-NEXT:    vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
; KNL-NEXT:    vmovd %edi, %xmm1
; KNL-NEXT:    vpbroadcastd %xmm1, %ymm1
; KNL-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm1[5],ymm0[6,7]
; KNL-NEXT:    retq
;
; SKX-LABEL: insert_v8i32:
; SKX:       ## %bb.0:
; SKX-NEXT:    vpinsrd $1, (%rsi), %xmm0, %xmm1
; SKX-NEXT:    vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
; SKX-NEXT:    vpbroadcastd %edi, %ymm1
; SKX-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm1[5],ymm0[6,7]
; SKX-NEXT:    retq
  %val = load i32, ptr %ptr
  %r1 = insertelement <8 x i32> %x, i32 %val, i32 1
  %r2 = insertelement <8 x i32> %r1, i32 %y, i32 5
  ret <8 x i32> %r2
}

define <4 x i32> @insert_v4i32(<4 x i32> %x, i32 %y, ptr %ptr) {
; CHECK-LABEL: insert_v4i32:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vpinsrd $1, (%rsi), %xmm0, %xmm0
; CHECK-NEXT:    vpinsrd $3, %edi, %xmm0, %xmm0
; CHECK-NEXT:    retq
  %val = load i32, ptr %ptr
  %r1 = insertelement <4 x i32> %x, i32 %val, i32 1
  %r2 = insertelement <4 x i32> %r1, i32 %y, i32 3
  ret <4 x i32> %r2
}

define <32 x i16> @insert_v32i16(<32 x i16> %x, i16 %y, ptr %ptr) {
; KNL-LABEL: insert_v32i16:
; KNL:       ## %bb.0:
; KNL-NEXT:    vpinsrw $1, (%rsi), %xmm0, %xmm1
; KNL-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm1
; KNL-NEXT:    vmovd %edi, %xmm0
; KNL-NEXT:    vpbroadcastw %xmm0, %ymm0
; KNL-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm0
; KNL-NEXT:    retq
;
; SKX-LABEL: insert_v32i16:
; SKX:       ## %bb.0:
; SKX-NEXT:    vpinsrw $1, (%rsi), %xmm0, %xmm1
; SKX-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; SKX-NEXT:    movl $512, %eax ## imm = 0x200
; SKX-NEXT:    kmovd %eax, %k1
; SKX-NEXT:    vpbroadcastw %edi, %zmm0 {%k1}
; SKX-NEXT:    retq
  %val = load i16, ptr %ptr
  %r1 = insertelement <32 x i16> %x, i16 %val, i32 1
  %r2 = insertelement <32 x i16> %r1, i16 %y, i32 9
  ret <32 x i16> %r2
}

define <16 x i16> @insert_v16i16(<16 x i16> %x, i16 %y, ptr %ptr) {
; KNL-LABEL: insert_v16i16:
; KNL:       ## %bb.0:
; KNL-NEXT:    vpinsrw $1, (%rsi), %xmm0, %xmm1
; KNL-NEXT:    vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
; KNL-NEXT:    vmovd %edi, %xmm1
; KNL-NEXT:    vpbroadcastw %xmm1, %ymm1
; KNL-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0],ymm1[1],ymm0[2,3,4,5,6,7,8],ymm1[9],ymm0[10,11,12,13,14,15]
; KNL-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; KNL-NEXT:    retq
;
; SKX-LABEL: insert_v16i16:
; SKX:       ## %bb.0:
; SKX-NEXT:    vpinsrw $1, (%rsi), %xmm0, %xmm1
; SKX-NEXT:    vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
; SKX-NEXT:    vpbroadcastw %edi, %ymm1
; SKX-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0],ymm1[1],ymm0[2,3,4,5,6,7,8],ymm1[9],ymm0[10,11,12,13,14,15]
; SKX-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; SKX-NEXT:    retq
  %val = load i16, ptr %ptr
  %r1 = insertelement <16 x i16> %x, i16 %val, i32 1
  %r2 = insertelement <16 x i16> %r1, i16 %y, i32 9
  ret <16 x i16> %r2
}

define <8 x i16> @insert_v8i16(<8 x i16> %x, i16 %y, ptr %ptr) {
; CHECK-LABEL: insert_v8i16:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vpinsrw $1, (%rsi), %xmm0, %xmm0
; CHECK-NEXT:    vpinsrw $5, %edi, %xmm0, %xmm0
; CHECK-NEXT:    retq
  %val = load i16, ptr %ptr
  %r1 = insertelement <8 x i16> %x, i16 %val, i32 1
  %r2 = insertelement <8 x i16> %r1, i16 %y, i32 5
  ret <8 x i16> %r2
}

define <64 x i8> @insert_v64i8(<64 x i8> %x, i8 %y, ptr %ptr) {
; CHECK-LABEL: insert_v64i8:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vpinsrb $1, (%rsi), %xmm0, %xmm1
; CHECK-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm1
; CHECK-NEXT:    vextracti32x4 $3, %zmm0, %xmm0
; CHECK-NEXT:    vpinsrb $2, %edi, %xmm0, %xmm0
; CHECK-NEXT:    vinserti32x4 $3, %xmm0, %zmm1, %zmm0
; CHECK-NEXT:    retq
  %val = load i8, ptr %ptr
  %r1 = insertelement <64 x i8> %x, i8 %val, i32 1
  %r2 = insertelement <64 x i8> %r1, i8 %y, i32 50
  ret <64 x i8> %r2
}

define <32 x i8> @insert_v32i8(<32 x i8> %x, i8 %y, ptr %ptr) {
; CHECK-LABEL: insert_v32i8:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vpinsrb $1, (%rsi), %xmm0, %xmm1
; CHECK-NEXT:    vextracti128 $1, %ymm0, %xmm0
; CHECK-NEXT:    vpinsrb $1, %edi, %xmm0, %xmm0
; CHECK-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
; CHECK-NEXT:    retq
  %val = load i8, ptr %ptr
  %r1 = insertelement <32 x i8> %x, i8 %val, i32 1
  %r2 = insertelement <32 x i8> %r1, i8 %y, i32 17
  ret <32 x i8> %r2
}

define <16 x i8> @insert_v16i8(<16 x i8> %x, i8 %y, ptr %ptr) {
; CHECK-LABEL: insert_v16i8:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vpinsrb $3, (%rsi), %xmm0, %xmm0
; CHECK-NEXT:    vpinsrb $10, %edi, %xmm0, %xmm0
; CHECK-NEXT:    retq
  %val = load i8, ptr %ptr
  %r1 = insertelement <16 x i8> %x, i8 %val, i32 3
  %r2 = insertelement <16 x i8> %r1, i8 %y, i32 10
  ret <16 x i8> %r2
}

define <8 x i64> @test_insert_128_v8i64(<8 x i64> %x, i64 %y) {
; CHECK-LABEL: test_insert_128_v8i64:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vpinsrq $1, %rdi, %xmm0, %xmm1
; CHECK-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:    retq
  %r = insertelement <8 x i64> %x, i64 %y, i32 1
  ret <8 x i64> %r
}

define <16 x i32> @test_insert_128_v16i32(<16 x i32> %x, i32 %y) {
; CHECK-LABEL: test_insert_128_v16i32:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vpinsrd $1, %edi, %xmm0, %xmm1
; CHECK-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:    retq
  %r = insertelement <16 x i32> %x, i32 %y, i32 1
  ret <16 x i32> %r
}

define <8 x double> @test_insert_128_v8f64(<8 x double> %x, double %y) {
; CHECK-LABEL: test_insert_128_v8f64:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vmovlhps {{.*#+}} xmm1 = xmm0[0],xmm1[0]
; CHECK-NEXT:    vinsertf32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:    retq
  %r = insertelement <8 x double> %x, double %y, i32 1
  ret <8 x double> %r
}

define <16 x float> @test_insert_128_v16f32(<16 x float> %x, float %y) {
; CHECK-LABEL: test_insert_128_v16f32:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vinsertps {{.*#+}} xmm1 = xmm0[0],xmm1[0],xmm0[2,3]
; CHECK-NEXT:    vinsertf32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:    retq
  %r = insertelement <16 x float> %x, float %y, i32 1
  ret <16 x float> %r
}

define <16 x i16> @test_insert_128_v16i16(<16 x i16> %x, i16 %y) {
; KNL-LABEL: test_insert_128_v16i16:
; KNL:       ## %bb.0:
; KNL-NEXT:    vmovd %edi, %xmm1
; KNL-NEXT:    vpbroadcastw %xmm1, %ymm1
; KNL-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0,1],ymm1[2],ymm0[3,4,5,6,7,8,9],ymm1[10],ymm0[11,12,13,14,15]
; KNL-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; KNL-NEXT:    retq
;
; SKX-LABEL: test_insert_128_v16i16:
; SKX:       ## %bb.0:
; SKX-NEXT:    vpbroadcastw %edi, %ymm1
; SKX-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0,1],ymm1[2],ymm0[3,4,5,6,7,8,9],ymm1[10],ymm0[11,12,13,14,15]
; SKX-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; SKX-NEXT:    retq
  %r = insertelement <16 x i16> %x, i16 %y, i32 10
  ret <16 x i16> %r
}

define <32 x i8> @test_insert_128_v32i8(<32 x i8> %x, i8 %y) {
; CHECK-LABEL: test_insert_128_v32i8:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vextracti128 $1, %ymm0, %xmm1
; CHECK-NEXT:    vpinsrb $4, %edi, %xmm1, %xmm1
; CHECK-NEXT:    vinserti128 $1, %xmm1, %ymm0, %ymm0
; CHECK-NEXT:    retq
  %r = insertelement <32 x i8> %x, i8 %y, i32 20
  ret <32 x i8> %r
}

define i32 @test_insertelement_v32i1(i32 %a, i32 %b, <32 x i32> %x , <32 x i32> %y) {
; KNL-LABEL: test_insertelement_v32i1:
; KNL:       ## %bb.0:
; KNL-NEXT:    cmpl %esi, %edi
; KNL-NEXT:    setb %al
; KNL-NEXT:    vpcmpltud %zmm3, %zmm1, %k0
; KNL-NEXT:    kmovw %k0, %ecx
; KNL-NEXT:    shll $16, %ecx
; KNL-NEXT:    movw $-17, %dx
; KNL-NEXT:    kmovw %edx, %k1
; KNL-NEXT:    vpcmpltud %zmm2, %zmm0, %k0 {%k1}
; KNL-NEXT:    kmovw %eax, %k1
; KNL-NEXT:    kshiftlw $15, %k1, %k1
; KNL-NEXT:    kshiftrw $11, %k1, %k1
; KNL-NEXT:    korw %k1, %k0, %k0
; KNL-NEXT:    kmovw %k0, %eax
; KNL-NEXT:    orl %ecx, %eax
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: test_insertelement_v32i1:
; SKX:       ## %bb.0:
; SKX-NEXT:    cmpl %esi, %edi
; SKX-NEXT:    setb %al
; SKX-NEXT:    vpcmpltud %zmm2, %zmm0, %k0
; SKX-NEXT:    vpcmpltud %zmm3, %zmm1, %k1
; SKX-NEXT:    kunpckwd %k0, %k1, %k0
; SKX-NEXT:    movl $-17, %ecx
; SKX-NEXT:    kmovd %ecx, %k1
; SKX-NEXT:    kandd %k1, %k0, %k0
; SKX-NEXT:    kmovd %eax, %k1
; SKX-NEXT:    kshiftld $31, %k1, %k1
; SKX-NEXT:    kshiftrd $27, %k1, %k1
; SKX-NEXT:    kord %k1, %k0, %k0
; SKX-NEXT:    kmovd %k0, %eax
; SKX-NEXT:    vzeroupper
; SKX-NEXT:    retq
  %cmp_res_i1 = icmp ult i32 %a, %b
  %cmp_cmp_vec = icmp ult <32 x i32> %x, %y
  %maskv = insertelement <32 x i1> %cmp_cmp_vec, i1 %cmp_res_i1, i32 4
  %res = bitcast <32 x i1> %maskv to i32
  ret i32 %res
}

define i8 @test_iinsertelement_v4i1(i32 %a, i32 %b, <4 x i32> %x , <4 x i32> %y) {
; KNL-LABEL: test_iinsertelement_v4i1:
; KNL:       ## %bb.0:
; KNL-NEXT:    ## kill: def $xmm1 killed $xmm1 def $zmm1
; KNL-NEXT:    ## kill: def $xmm0 killed $xmm0 def $zmm0
; KNL-NEXT:    cmpl %esi, %edi
; KNL-NEXT:    setb %al
; KNL-NEXT:    movw $-5, %cx
; KNL-NEXT:    kmovw %ecx, %k1
; KNL-NEXT:    vpcmpltud %zmm1, %zmm0, %k0 {%k1}
; KNL-NEXT:    kmovw %eax, %k1
; KNL-NEXT:    kshiftlw $15, %k1, %k1
; KNL-NEXT:    kshiftrw $13, %k1, %k1
; KNL-NEXT:    korw %k1, %k0, %k0
; KNL-NEXT:    kmovw %k0, %eax
; KNL-NEXT:    ## kill: def $al killed $al killed $eax
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: test_iinsertelement_v4i1:
; SKX:       ## %bb.0:
; SKX-NEXT:    cmpl %esi, %edi
; SKX-NEXT:    setb %al
; SKX-NEXT:    movb $-5, %cl
; SKX-NEXT:    kmovd %ecx, %k1
; SKX-NEXT:    vpcmpltud %xmm1, %xmm0, %k0 {%k1}
; SKX-NEXT:    kmovd %eax, %k1
; SKX-NEXT:    kshiftlb $7, %k1, %k1
; SKX-NEXT:    kshiftrb $5, %k1, %k1
; SKX-NEXT:    korw %k1, %k0, %k0
; SKX-NEXT:    kmovd %k0, %eax
; SKX-NEXT:    ## kill: def $al killed $al killed $eax
; SKX-NEXT:    retq
  %cmp_res_i1 = icmp ult i32 %a, %b
  %cmp_cmp_vec = icmp ult <4 x i32> %x, %y
  %maskv = insertelement <4 x i1> %cmp_cmp_vec, i1 %cmp_res_i1, i32 2
  %res0 = shufflevector <4 x i1> %maskv, <4 x i1> undef , <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 4, i32 4, i32 4>
  %res = bitcast <8 x i1> %res0 to i8
  ret i8 %res
}

define i8 @test_iinsertelement_v2i1(i32 %a, i32 %b, <2 x i64> %x , <2 x i64> %y) {
; KNL-LABEL: test_iinsertelement_v2i1:
; KNL:       ## %bb.0:
; KNL-NEXT:    ## kill: def $xmm1 killed $xmm1 def $zmm1
; KNL-NEXT:    ## kill: def $xmm0 killed $xmm0 def $zmm0
; KNL-NEXT:    cmpl %esi, %edi
; KNL-NEXT:    setb %al
; KNL-NEXT:    vpcmpltuq %zmm1, %zmm0, %k0
; KNL-NEXT:    kshiftlw $15, %k0, %k0
; KNL-NEXT:    kshiftrw $15, %k0, %k0
; KNL-NEXT:    kmovw %eax, %k1
; KNL-NEXT:    kshiftlw $1, %k1, %k1
; KNL-NEXT:    korw %k1, %k0, %k0
; KNL-NEXT:    kmovw %k0, %eax
; KNL-NEXT:    ## kill: def $al killed $al killed $eax
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: test_iinsertelement_v2i1:
; SKX:       ## %bb.0:
; SKX-NEXT:    cmpl %esi, %edi
; SKX-NEXT:    setb %al
; SKX-NEXT:    vpcmpltuq %xmm1, %xmm0, %k0
; SKX-NEXT:    kshiftlb $7, %k0, %k0
; SKX-NEXT:    kshiftrb $7, %k0, %k0
; SKX-NEXT:    kmovd %eax, %k1
; SKX-NEXT:    kshiftlb $1, %k1, %k1
; SKX-NEXT:    korw %k1, %k0, %k0
; SKX-NEXT:    kmovd %k0, %eax
; SKX-NEXT:    ## kill: def $al killed $al killed $eax
; SKX-NEXT:    retq
  %cmp_res_i1 = icmp ult i32 %a, %b
  %cmp_cmp_vec = icmp ult <2 x i64> %x, %y
  %maskv = insertelement <2 x i1> %cmp_cmp_vec, i1 %cmp_res_i1, i32 1
  %res0 = shufflevector <2 x i1> %maskv, <2 x i1> undef , <8 x i32> <i32 0, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2>
  %res = bitcast <8 x i1> %res0 to i8
  ret i8 %res
}

define zeroext i8 @test_extractelement_v2i1(<2 x i64> %a, <2 x i64> %b) {
; KNL-LABEL: test_extractelement_v2i1:
; KNL:       ## %bb.0:
; KNL-NEXT:    ## kill: def $xmm1 killed $xmm1 def $zmm1
; KNL-NEXT:    ## kill: def $xmm0 killed $xmm0 def $zmm0
; KNL-NEXT:    vpcmpnleuq %zmm1, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %ecx
; KNL-NEXT:    andl $1, %ecx
; KNL-NEXT:    movl $4, %eax
; KNL-NEXT:    subl %ecx, %eax
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: test_extractelement_v2i1:
; SKX:       ## %bb.0:
; SKX-NEXT:    vpcmpnleuq %xmm1, %xmm0, %k0
; SKX-NEXT:    kmovd %k0, %ecx
; SKX-NEXT:    andl $1, %ecx
; SKX-NEXT:    movl $4, %eax
; SKX-NEXT:    subl %ecx, %eax
; SKX-NEXT:    retq
  %t1 = icmp ugt <2 x i64> %a, %b
  %t2 = extractelement <2 x i1> %t1, i32 0
  %res = select i1 %t2, i8 3, i8 4
  ret i8 %res
}

define zeroext i8 @extractelement_v2i1_alt(<2 x i64> %a, <2 x i64> %b) {
; KNL-LABEL: extractelement_v2i1_alt:
; KNL:       ## %bb.0:
; KNL-NEXT:    ## kill: def $xmm1 killed $xmm1 def $zmm1
; KNL-NEXT:    ## kill: def $xmm0 killed $xmm0 def $zmm0
; KNL-NEXT:    vpcmpnleuq %zmm1, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %eax
; KNL-NEXT:    andb $1, %al
; KNL-NEXT:    movb $4, %cl
; KNL-NEXT:    subb %al, %cl
; KNL-NEXT:    movzbl %cl, %eax
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: extractelement_v2i1_alt:
; SKX:       ## %bb.0:
; SKX-NEXT:    vpcmpnleuq %xmm1, %xmm0, %k0
; SKX-NEXT:    kmovd %k0, %eax
; SKX-NEXT:    andb $1, %al
; SKX-NEXT:    movb $4, %cl
; SKX-NEXT:    subb %al, %cl
; SKX-NEXT:    movzbl %cl, %eax
; SKX-NEXT:    retq
  %t1 = icmp ugt <2 x i64> %a, %b
  %t2 = extractelement <2 x i1> %t1, i32 0
  %sext = sext i1 %t2 to i8
  %res = add i8 %sext, 4
  ret i8 %res
}

define zeroext i8 @test_extractelement_v4i1(<4 x i32> %a, <4 x i32> %b) {
; KNL-LABEL: test_extractelement_v4i1:
; KNL:       ## %bb.0:
; KNL-NEXT:    ## kill: def $xmm1 killed $xmm1 def $zmm1
; KNL-NEXT:    ## kill: def $xmm0 killed $xmm0 def $zmm0
; KNL-NEXT:    vpcmpnleud %zmm1, %zmm0, %k0
; KNL-NEXT:    kshiftrw $3, %k0, %k0
; KNL-NEXT:    kmovw %k0, %eax
; KNL-NEXT:    andl $1, %eax
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: test_extractelement_v4i1:
; SKX:       ## %bb.0:
; SKX-NEXT:    vpcmpnleud %xmm1, %xmm0, %k0
; SKX-NEXT:    kshiftrb $3, %k0, %k0
; SKX-NEXT:    kmovd %k0, %eax
; SKX-NEXT:    andl $1, %eax
; SKX-NEXT:    retq
  %t1 = icmp ugt <4 x i32> %a, %b
  %t2 = extractelement <4 x i1> %t1, i32 3
  %res = zext i1 %t2 to i8
  ret i8 %res
}

define zeroext i8 @test_extractelement_v32i1(<32 x i8> %a, <32 x i8> %b) {
; KNL-LABEL: test_extractelement_v32i1:
; KNL:       ## %bb.0:
; KNL-NEXT:    vpminub %xmm1, %xmm0, %xmm1
; KNL-NEXT:    vpcmpeqb %xmm1, %xmm0, %xmm0
; KNL-NEXT:    vpternlogq $15, %zmm0, %zmm0, %zmm0
; KNL-NEXT:    vpmovsxbd %xmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kshiftrw $2, %k0, %k0
; KNL-NEXT:    kmovw %k0, %eax
; KNL-NEXT:    andl $1, %eax
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: test_extractelement_v32i1:
; SKX:       ## %bb.0:
; SKX-NEXT:    vpcmpnleub %ymm1, %ymm0, %k0
; SKX-NEXT:    kshiftrd $2, %k0, %k0
; SKX-NEXT:    kmovd %k0, %eax
; SKX-NEXT:    andl $1, %eax
; SKX-NEXT:    vzeroupper
; SKX-NEXT:    retq
  %t1 = icmp ugt <32 x i8> %a, %b
  %t2 = extractelement <32 x i1> %t1, i32 2
  %res = zext i1 %t2 to i8
  ret i8 %res
}

define zeroext i8 @test_extractelement_v64i1(<64 x i8> %a, <64 x i8> %b) {
; KNL-LABEL: test_extractelement_v64i1:
; KNL:       ## %bb.0:
; KNL-NEXT:    vextracti64x4 $1, %zmm1, %ymm1
; KNL-NEXT:    vextracti128 $1, %ymm1, %xmm1
; KNL-NEXT:    vextracti64x4 $1, %zmm0, %ymm0
; KNL-NEXT:    vextracti128 $1, %ymm0, %xmm0
; KNL-NEXT:    vpminub %xmm1, %xmm0, %xmm1
; KNL-NEXT:    vpcmpeqb %xmm1, %xmm0, %xmm0
; KNL-NEXT:    vpternlogq $15, %zmm0, %zmm0, %zmm0
; KNL-NEXT:    vpmovsxbd %xmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kshiftrw $15, %k0, %k0
; KNL-NEXT:    kmovw %k0, %ecx
; KNL-NEXT:    andl $1, %ecx
; KNL-NEXT:    movl $4, %eax
; KNL-NEXT:    subl %ecx, %eax
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: test_extractelement_v64i1:
; SKX:       ## %bb.0:
; SKX-NEXT:    vpcmpnleub %zmm1, %zmm0, %k0
; SKX-NEXT:    kshiftrq $63, %k0, %k0
; SKX-NEXT:    kmovd %k0, %ecx
; SKX-NEXT:    andl $1, %ecx
; SKX-NEXT:    movl $4, %eax
; SKX-NEXT:    subl %ecx, %eax
; SKX-NEXT:    vzeroupper
; SKX-NEXT:    retq
  %t1 = icmp ugt <64 x i8> %a, %b
  %t2 = extractelement <64 x i1> %t1, i32 63
  %res = select i1 %t2, i8 3, i8 4
  ret i8 %res
}

define zeroext i8 @extractelement_v64i1_alt(<64 x i8> %a, <64 x i8> %b) {
; KNL-LABEL: extractelement_v64i1_alt:
; KNL:       ## %bb.0:
; KNL-NEXT:    vextracti64x4 $1, %zmm1, %ymm1
; KNL-NEXT:    vextracti128 $1, %ymm1, %xmm1
; KNL-NEXT:    vextracti64x4 $1, %zmm0, %ymm0
; KNL-NEXT:    vextracti128 $1, %ymm0, %xmm0
; KNL-NEXT:    vpminub %xmm1, %xmm0, %xmm1
; KNL-NEXT:    vpcmpeqb %xmm1, %xmm0, %xmm0
; KNL-NEXT:    vpternlogq $15, %zmm0, %zmm0, %zmm0
; KNL-NEXT:    vpmovsxbd %xmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kshiftrw $15, %k0, %k0
; KNL-NEXT:    kmovw %k0, %eax
; KNL-NEXT:    andb $1, %al
; KNL-NEXT:    movb $4, %cl
; KNL-NEXT:    subb %al, %cl
; KNL-NEXT:    movzbl %cl, %eax
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: extractelement_v64i1_alt:
; SKX:       ## %bb.0:
; SKX-NEXT:    vpcmpnleub %zmm1, %zmm0, %k0
; SKX-NEXT:    kshiftrq $63, %k0, %k0
; SKX-NEXT:    kmovd %k0, %eax
; SKX-NEXT:    andb $1, %al
; SKX-NEXT:    movb $4, %cl
; SKX-NEXT:    subb %al, %cl
; SKX-NEXT:    movzbl %cl, %eax
; SKX-NEXT:    vzeroupper
; SKX-NEXT:    retq
  %t1 = icmp ugt <64 x i8> %a, %b
  %t2 = extractelement <64 x i1> %t1, i32 63
  %sext = sext i1 %t2 to i8
  %res = add i8 %sext, 4
  ret i8 %res
}

define i64 @test_extractelement_variable_v2i64(<2 x i64> %t1, i32 %index) {
; CHECK-LABEL: test_extractelement_variable_v2i64:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %xmm0, -{{[0-9]+}}(%rsp)
; CHECK-NEXT:    andl $1, %edi
; CHECK-NEXT:    movq -24(%rsp,%rdi,8), %rax
; CHECK-NEXT:    retq
  %t2 = extractelement <2 x i64> %t1, i32 %index
  ret i64 %t2
}

define i64 @test_extractelement_variable_v4i64(<4 x i64> %t1, i32 %index) {
; CHECK-LABEL: test_extractelement_variable_v4i64:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    pushq %rbp
; CHECK-NEXT:    .cfi_def_cfa_offset 16
; CHECK-NEXT:    .cfi_offset %rbp, -16
; CHECK-NEXT:    movq %rsp, %rbp
; CHECK-NEXT:    .cfi_def_cfa_register %rbp
; CHECK-NEXT:    andq $-32, %rsp
; CHECK-NEXT:    subq $64, %rsp
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %ymm0, (%rsp)
; CHECK-NEXT:    andl $3, %edi
; CHECK-NEXT:    movq (%rsp,%rdi,8), %rax
; CHECK-NEXT:    movq %rbp, %rsp
; CHECK-NEXT:    popq %rbp
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %t2 = extractelement <4 x i64> %t1, i32 %index
  ret i64 %t2
}

define i64 @test_extractelement_variable_v8i64(<8 x i64> %t1, i32 %index) {
; CHECK-LABEL: test_extractelement_variable_v8i64:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    pushq %rbp
; CHECK-NEXT:    .cfi_def_cfa_offset 16
; CHECK-NEXT:    .cfi_offset %rbp, -16
; CHECK-NEXT:    movq %rsp, %rbp
; CHECK-NEXT:    .cfi_def_cfa_register %rbp
; CHECK-NEXT:    andq $-64, %rsp
; CHECK-NEXT:    subq $128, %rsp
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %zmm0, (%rsp)
; CHECK-NEXT:    andl $7, %edi
; CHECK-NEXT:    movq (%rsp,%rdi,8), %rax
; CHECK-NEXT:    movq %rbp, %rsp
; CHECK-NEXT:    popq %rbp
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %t2 = extractelement <8 x i64> %t1, i32 %index
  ret i64 %t2
}

define double @test_extractelement_variable_v2f64(<2 x double> %t1, i32 %index) {
; CHECK-LABEL: test_extractelement_variable_v2f64:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %xmm0, -{{[0-9]+}}(%rsp)
; CHECK-NEXT:    andl $1, %edi
; CHECK-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
; CHECK-NEXT:    retq
  %t2 = extractelement <2 x double> %t1, i32 %index
  ret double %t2
}

define double @test_extractelement_variable_v4f64(<4 x double> %t1, i32 %index) {
; CHECK-LABEL: test_extractelement_variable_v4f64:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    pushq %rbp
; CHECK-NEXT:    .cfi_def_cfa_offset 16
; CHECK-NEXT:    .cfi_offset %rbp, -16
; CHECK-NEXT:    movq %rsp, %rbp
; CHECK-NEXT:    .cfi_def_cfa_register %rbp
; CHECK-NEXT:    andq $-32, %rsp
; CHECK-NEXT:    subq $64, %rsp
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %ymm0, (%rsp)
; CHECK-NEXT:    andl $3, %edi
; CHECK-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
; CHECK-NEXT:    movq %rbp, %rsp
; CHECK-NEXT:    popq %rbp
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %t2 = extractelement <4 x double> %t1, i32 %index
  ret double %t2
}

define double @test_extractelement_variable_v8f64(<8 x double> %t1, i32 %index) {
; CHECK-LABEL: test_extractelement_variable_v8f64:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    pushq %rbp
; CHECK-NEXT:    .cfi_def_cfa_offset 16
; CHECK-NEXT:    .cfi_offset %rbp, -16
; CHECK-NEXT:    movq %rsp, %rbp
; CHECK-NEXT:    .cfi_def_cfa_register %rbp
; CHECK-NEXT:    andq $-64, %rsp
; CHECK-NEXT:    subq $128, %rsp
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %zmm0, (%rsp)
; CHECK-NEXT:    andl $7, %edi
; CHECK-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
; CHECK-NEXT:    movq %rbp, %rsp
; CHECK-NEXT:    popq %rbp
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %t2 = extractelement <8 x double> %t1, i32 %index
  ret double %t2
}

define i32 @test_extractelement_variable_v4i32(<4 x i32> %t1, i32 %index) {
; CHECK-LABEL: test_extractelement_variable_v4i32:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %xmm0, -{{[0-9]+}}(%rsp)
; CHECK-NEXT:    andl $3, %edi
; CHECK-NEXT:    movl -24(%rsp,%rdi,4), %eax
; CHECK-NEXT:    retq
  %t2 = extractelement <4 x i32> %t1, i32 %index
  ret i32 %t2
}

define i32 @test_extractelement_variable_v8i32(<8 x i32> %t1, i32 %index) {
; CHECK-LABEL: test_extractelement_variable_v8i32:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    pushq %rbp
; CHECK-NEXT:    .cfi_def_cfa_offset 16
; CHECK-NEXT:    .cfi_offset %rbp, -16
; CHECK-NEXT:    movq %rsp, %rbp
; CHECK-NEXT:    .cfi_def_cfa_register %rbp
; CHECK-NEXT:    andq $-32, %rsp
; CHECK-NEXT:    subq $64, %rsp
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %ymm0, (%rsp)
; CHECK-NEXT:    andl $7, %edi
; CHECK-NEXT:    movl (%rsp,%rdi,4), %eax
; CHECK-NEXT:    movq %rbp, %rsp
; CHECK-NEXT:    popq %rbp
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %t2 = extractelement <8 x i32> %t1, i32 %index
  ret i32 %t2
}

define i32 @test_extractelement_variable_v16i32(<16 x i32> %t1, i32 %index) {
; CHECK-LABEL: test_extractelement_variable_v16i32:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    pushq %rbp
; CHECK-NEXT:    .cfi_def_cfa_offset 16
; CHECK-NEXT:    .cfi_offset %rbp, -16
; CHECK-NEXT:    movq %rsp, %rbp
; CHECK-NEXT:    .cfi_def_cfa_register %rbp
; CHECK-NEXT:    andq $-64, %rsp
; CHECK-NEXT:    subq $128, %rsp
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %zmm0, (%rsp)
; CHECK-NEXT:    andl $15, %edi
; CHECK-NEXT:    movl (%rsp,%rdi,4), %eax
; CHECK-NEXT:    movq %rbp, %rsp
; CHECK-NEXT:    popq %rbp
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %t2 = extractelement <16 x i32> %t1, i32 %index
  ret i32 %t2
}

define float @test_extractelement_variable_v4f32(<4 x float> %t1, i32 %index) {
; CHECK-LABEL: test_extractelement_variable_v4f32:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %xmm0, -{{[0-9]+}}(%rsp)
; CHECK-NEXT:    andl $3, %edi
; CHECK-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
; CHECK-NEXT:    retq
  %t2 = extractelement <4 x float> %t1, i32 %index
  ret float %t2
}

define float @test_extractelement_variable_v8f32(<8 x float> %t1, i32 %index) {
; CHECK-LABEL: test_extractelement_variable_v8f32:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    pushq %rbp
; CHECK-NEXT:    .cfi_def_cfa_offset 16
; CHECK-NEXT:    .cfi_offset %rbp, -16
; CHECK-NEXT:    movq %rsp, %rbp
; CHECK-NEXT:    .cfi_def_cfa_register %rbp
; CHECK-NEXT:    andq $-32, %rsp
; CHECK-NEXT:    subq $64, %rsp
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %ymm0, (%rsp)
; CHECK-NEXT:    andl $7, %edi
; CHECK-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
; CHECK-NEXT:    movq %rbp, %rsp
; CHECK-NEXT:    popq %rbp
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %t2 = extractelement <8 x float> %t1, i32 %index
  ret float %t2
}

define float @test_extractelement_variable_v16f32(<16 x float> %t1, i32 %index) {
; CHECK-LABEL: test_extractelement_variable_v16f32:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    pushq %rbp
; CHECK-NEXT:    .cfi_def_cfa_offset 16
; CHECK-NEXT:    .cfi_offset %rbp, -16
; CHECK-NEXT:    movq %rsp, %rbp
; CHECK-NEXT:    .cfi_def_cfa_register %rbp
; CHECK-NEXT:    andq $-64, %rsp
; CHECK-NEXT:    subq $128, %rsp
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %zmm0, (%rsp)
; CHECK-NEXT:    andl $15, %edi
; CHECK-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
; CHECK-NEXT:    movq %rbp, %rsp
; CHECK-NEXT:    popq %rbp
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %t2 = extractelement <16 x float> %t1, i32 %index
  ret float %t2
}

define i16 @test_extractelement_variable_v8i16(<8 x i16> %t1, i32 %index) {
; CHECK-LABEL: test_extractelement_variable_v8i16:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %xmm0, -{{[0-9]+}}(%rsp)
; CHECK-NEXT:    andl $7, %edi
; CHECK-NEXT:    movzwl -24(%rsp,%rdi,2), %eax
; CHECK-NEXT:    retq
  %t2 = extractelement <8 x i16> %t1, i32 %index
  ret i16 %t2
}

define i16 @test_extractelement_variable_v16i16(<16 x i16> %t1, i32 %index) {
; CHECK-LABEL: test_extractelement_variable_v16i16:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    pushq %rbp
; CHECK-NEXT:    .cfi_def_cfa_offset 16
; CHECK-NEXT:    .cfi_offset %rbp, -16
; CHECK-NEXT:    movq %rsp, %rbp
; CHECK-NEXT:    .cfi_def_cfa_register %rbp
; CHECK-NEXT:    andq $-32, %rsp
; CHECK-NEXT:    subq $64, %rsp
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %ymm0, (%rsp)
; CHECK-NEXT:    andl $15, %edi
; CHECK-NEXT:    movzwl (%rsp,%rdi,2), %eax
; CHECK-NEXT:    movq %rbp, %rsp
; CHECK-NEXT:    popq %rbp
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %t2 = extractelement <16 x i16> %t1, i32 %index
  ret i16 %t2
}

define i16 @test_extractelement_variable_v32i16(<32 x i16> %t1, i32 %index) {
; CHECK-LABEL: test_extractelement_variable_v32i16:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    pushq %rbp
; CHECK-NEXT:    .cfi_def_cfa_offset 16
; CHECK-NEXT:    .cfi_offset %rbp, -16
; CHECK-NEXT:    movq %rsp, %rbp
; CHECK-NEXT:    .cfi_def_cfa_register %rbp
; CHECK-NEXT:    andq $-64, %rsp
; CHECK-NEXT:    subq $128, %rsp
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %zmm0, (%rsp)
; CHECK-NEXT:    andl $31, %edi
; CHECK-NEXT:    movzwl (%rsp,%rdi,2), %eax
; CHECK-NEXT:    movq %rbp, %rsp
; CHECK-NEXT:    popq %rbp
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
  %t2 = extractelement <32 x i16> %t1, i32 %index
  ret i16 %t2
}

define i8 @test_extractelement_variable_v16i8(<16 x i8> %t1, i32 %index) {
; CHECK-LABEL: test_extractelement_variable_v16i8:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %xmm0, -{{[0-9]+}}(%rsp)
; CHECK-NEXT:    andl $15, %edi
; CHECK-NEXT:    movzbl -24(%rsp,%rdi), %eax
; CHECK-NEXT:    retq
  %t2 = extractelement <16 x i8> %t1, i32 %index
  ret i8 %t2
}

define i8 @test_extractelement_variable_v32i8(<32 x i8> %t1, i32 %index) {
; CHECK-LABEL: test_extractelement_variable_v32i8:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    pushq %rbp
; CHECK-NEXT:    .cfi_def_cfa_offset 16
; CHECK-NEXT:    .cfi_offset %rbp, -16
; CHECK-NEXT:    movq %rsp, %rbp
; CHECK-NEXT:    .cfi_def_cfa_register %rbp
; CHECK-NEXT:    andq $-32, %rsp
; CHECK-NEXT:    subq $64, %rsp
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %ymm0, (%rsp)
; CHECK-NEXT:    andl $31, %edi
; CHECK-NEXT:    movzbl (%rsp,%rdi), %eax
; CHECK-NEXT:    movq %rbp, %rsp
; CHECK-NEXT:    popq %rbp
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq

  %t2 = extractelement <32 x i8> %t1, i32 %index
  ret i8 %t2
}

define i8 @test_extractelement_variable_v64i8(<64 x i8> %t1, i32 %index) {
; CHECK-LABEL: test_extractelement_variable_v64i8:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    pushq %rbp
; CHECK-NEXT:    .cfi_def_cfa_offset 16
; CHECK-NEXT:    .cfi_offset %rbp, -16
; CHECK-NEXT:    movq %rsp, %rbp
; CHECK-NEXT:    .cfi_def_cfa_register %rbp
; CHECK-NEXT:    andq $-64, %rsp
; CHECK-NEXT:    subq $128, %rsp
; CHECK-NEXT:    ## kill: def $edi killed $edi def $rdi
; CHECK-NEXT:    vmovaps %zmm0, (%rsp)
; CHECK-NEXT:    andl $63, %edi
; CHECK-NEXT:    movzbl (%rsp,%rdi), %eax
; CHECK-NEXT:    movq %rbp, %rsp
; CHECK-NEXT:    popq %rbp
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq

  %t2 = extractelement <64 x i8> %t1, i32 %index
  ret i8 %t2
}

define i8 @test_extractelement_variable_v64i8_indexi8(<64 x i8> %t1, i8 %index) {
; CHECK-LABEL: test_extractelement_variable_v64i8_indexi8:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    pushq %rbp
; CHECK-NEXT:    .cfi_def_cfa_offset 16
; CHECK-NEXT:    .cfi_offset %rbp, -16
; CHECK-NEXT:    movq %rsp, %rbp
; CHECK-NEXT:    .cfi_def_cfa_register %rbp
; CHECK-NEXT:    andq $-64, %rsp
; CHECK-NEXT:    subq $128, %rsp
; CHECK-NEXT:    addb %dil, %dil
; CHECK-NEXT:    vmovaps %zmm0, (%rsp)
; CHECK-NEXT:    movzbl %dil, %eax
; CHECK-NEXT:    andl $63, %eax
; CHECK-NEXT:    movzbl (%rsp,%rax), %eax
; CHECK-NEXT:    movq %rbp, %rsp
; CHECK-NEXT:    popq %rbp
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq

  %i  = add i8 %index, %index
  %t2 = extractelement <64 x i8> %t1, i8 %i
  ret i8 %t2
}

define zeroext i8 @test_extractelement_varible_v2i1(<2 x i64> %a, <2 x i64> %b, i32 %index) {
; KNL-LABEL: test_extractelement_varible_v2i1:
; KNL:       ## %bb.0:
; KNL-NEXT:    ## kill: def $edi killed $edi def $rdi
; KNL-NEXT:    ## kill: def $xmm1 killed $xmm1 def $zmm1
; KNL-NEXT:    ## kill: def $xmm0 killed $xmm0 def $zmm0
; KNL-NEXT:    vpcmpnleuq %zmm1, %zmm0, %k1
; KNL-NEXT:    vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
; KNL-NEXT:    vmovdqa %xmm0, -{{[0-9]+}}(%rsp)
; KNL-NEXT:    andl $1, %edi
; KNL-NEXT:    movzbl -24(%rsp,%rdi,8), %eax
; KNL-NEXT:    andl $1, %eax
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: test_extractelement_varible_v2i1:
; SKX:       ## %bb.0:
; SKX-NEXT:    ## kill: def $edi killed $edi def $rdi
; SKX-NEXT:    vpcmpnleuq %xmm1, %xmm0, %k0
; SKX-NEXT:    vpmovm2q %k0, %xmm0
; SKX-NEXT:    vmovdqa %xmm0, -{{[0-9]+}}(%rsp)
; SKX-NEXT:    andl $1, %edi
; SKX-NEXT:    movzbl -24(%rsp,%rdi,8), %eax
; SKX-NEXT:    andl $1, %eax
; SKX-NEXT:    retq
  %t1 = icmp ugt <2 x i64> %a, %b
  %t2 = extractelement <2 x i1> %t1, i32 %index
  %res = zext i1 %t2 to i8
  ret i8 %res
}

define zeroext i8 @test_extractelement_varible_v4i1(<4 x i32> %a, <4 x i32> %b, i32 %index) {
; KNL-LABEL: test_extractelement_varible_v4i1:
; KNL:       ## %bb.0:
; KNL-NEXT:    ## kill: def $edi killed $edi def $rdi
; KNL-NEXT:    ## kill: def $xmm1 killed $xmm1 def $zmm1
; KNL-NEXT:    ## kill: def $xmm0 killed $xmm0 def $zmm0
; KNL-NEXT:    vpcmpnleud %zmm1, %zmm0, %k1
; KNL-NEXT:    vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
; KNL-NEXT:    vmovdqa %xmm0, -{{[0-9]+}}(%rsp)
; KNL-NEXT:    andl $3, %edi
; KNL-NEXT:    movzbl -24(%rsp,%rdi,4), %eax
; KNL-NEXT:    andl $1, %eax
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: test_extractelement_varible_v4i1:
; SKX:       ## %bb.0:
; SKX-NEXT:    ## kill: def $edi killed $edi def $rdi
; SKX-NEXT:    vpcmpnleud %xmm1, %xmm0, %k0
; SKX-NEXT:    vpmovm2d %k0, %xmm0
; SKX-NEXT:    vmovdqa %xmm0, -{{[0-9]+}}(%rsp)
; SKX-NEXT:    andl $3, %edi
; SKX-NEXT:    movzbl -24(%rsp,%rdi,4), %eax
; SKX-NEXT:    andl $1, %eax
; SKX-NEXT:    retq
  %t1 = icmp ugt <4 x i32> %a, %b
  %t2 = extractelement <4 x i1> %t1, i32 %index
  %res = zext i1 %t2 to i8
  ret i8 %res
}

define zeroext i8 @test_extractelement_varible_v8i1(<8 x i32> %a, <8 x i32> %b, i32 %index) {
; KNL-LABEL: test_extractelement_varible_v8i1:
; KNL:       ## %bb.0:
; KNL-NEXT:    ## kill: def $edi killed $edi def $rdi
; KNL-NEXT:    ## kill: def $ymm1 killed $ymm1 def $zmm1
; KNL-NEXT:    ## kill: def $ymm0 killed $ymm0 def $zmm0
; KNL-NEXT:    vpcmpnleud %zmm1, %zmm0, %k1
; KNL-NEXT:    vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
; KNL-NEXT:    vpmovdw %zmm0, %ymm0
; KNL-NEXT:    vmovdqa %xmm0, -{{[0-9]+}}(%rsp)
; KNL-NEXT:    andl $7, %edi
; KNL-NEXT:    movzbl -24(%rsp,%rdi,2), %eax
; KNL-NEXT:    andl $1, %eax
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: test_extractelement_varible_v8i1:
; SKX:       ## %bb.0:
; SKX-NEXT:    ## kill: def $edi killed $edi def $rdi
; SKX-NEXT:    vpcmpnleud %ymm1, %ymm0, %k0
; SKX-NEXT:    vpmovm2w %k0, %xmm0
; SKX-NEXT:    vmovdqa %xmm0, -{{[0-9]+}}(%rsp)
; SKX-NEXT:    andl $7, %edi
; SKX-NEXT:    movzbl -24(%rsp,%rdi,2), %eax
; SKX-NEXT:    andl $1, %eax
; SKX-NEXT:    vzeroupper
; SKX-NEXT:    retq
  %t1 = icmp ugt <8 x i32> %a, %b
  %t2 = extractelement <8 x i1> %t1, i32 %index
  %res = zext i1 %t2 to i8
  ret i8 %res
}

define zeroext i8 @test_extractelement_varible_v16i1(<16 x i32> %a, <16 x i32> %b, i32 %index) {
; KNL-LABEL: test_extractelement_varible_v16i1:
; KNL:       ## %bb.0:
; KNL-NEXT:    ## kill: def $edi killed $edi def $rdi
; KNL-NEXT:    vpcmpnleud %zmm1, %zmm0, %k1
; KNL-NEXT:    vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
; KNL-NEXT:    vpmovdb %zmm0, -{{[0-9]+}}(%rsp)
; KNL-NEXT:    andl $15, %edi
; KNL-NEXT:    movzbl -24(%rsp,%rdi), %eax
; KNL-NEXT:    andl $1, %eax
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: test_extractelement_varible_v16i1:
; SKX:       ## %bb.0:
; SKX-NEXT:    ## kill: def $edi killed $edi def $rdi
; SKX-NEXT:    vpcmpnleud %zmm1, %zmm0, %k0
; SKX-NEXT:    vpmovm2b %k0, %xmm0
; SKX-NEXT:    vmovdqa %xmm0, -{{[0-9]+}}(%rsp)
; SKX-NEXT:    andl $15, %edi
; SKX-NEXT:    movzbl -24(%rsp,%rdi), %eax
; SKX-NEXT:    andl $1, %eax
; SKX-NEXT:    vzeroupper
; SKX-NEXT:    retq
  %t1 = icmp ugt <16 x i32> %a, %b
  %t2 = extractelement <16 x i1> %t1, i32 %index
  %res = zext i1 %t2 to i8
  ret i8 %res
}

define zeroext i8 @test_extractelement_varible_v32i1(<32 x i8> %a, <32 x i8> %b, i32 %index) {
; KNL-LABEL: test_extractelement_varible_v32i1:
; KNL:       ## %bb.0:
; KNL-NEXT:    vpminub %ymm1, %ymm0, %ymm1
; KNL-NEXT:    vpcmpeqb %ymm1, %ymm0, %ymm0
; KNL-NEXT:    vpmovmskb %ymm0, %ecx
; KNL-NEXT:    xorl %eax, %eax
; KNL-NEXT:    btl %edi, %ecx
; KNL-NEXT:    setae %al
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: test_extractelement_varible_v32i1:
; SKX:       ## %bb.0:
; SKX-NEXT:    pushq %rbp
; SKX-NEXT:    .cfi_def_cfa_offset 16
; SKX-NEXT:    .cfi_offset %rbp, -16
; SKX-NEXT:    movq %rsp, %rbp
; SKX-NEXT:    .cfi_def_cfa_register %rbp
; SKX-NEXT:    andq $-32, %rsp
; SKX-NEXT:    subq $64, %rsp
; SKX-NEXT:    ## kill: def $edi killed $edi def $rdi
; SKX-NEXT:    vpcmpnleub %ymm1, %ymm0, %k0
; SKX-NEXT:    vpmovm2b %k0, %ymm0
; SKX-NEXT:    vmovdqa %ymm0, (%rsp)
; SKX-NEXT:    andl $31, %edi
; SKX-NEXT:    movzbl (%rsp,%rdi), %eax
; SKX-NEXT:    andl $1, %eax
; SKX-NEXT:    movq %rbp, %rsp
; SKX-NEXT:    popq %rbp
; SKX-NEXT:    vzeroupper
; SKX-NEXT:    retq
  %t1 = icmp ugt <32 x i8> %a, %b
  %t2 = extractelement <32 x i1> %t1, i32 %index
  %res = zext i1 %t2 to i8
  ret i8 %res
}

define <8 x i64> @insert_double_zero(<2 x i64> %a) nounwind {
; CHECK-LABEL: insert_double_zero:
; CHECK:       ## %bb.0:
; CHECK-NEXT:    vxorps %xmm1, %xmm1, %xmm1
; CHECK-NEXT:    vinsertf32x4 $2, %xmm0, %zmm1, %zmm0
; CHECK-NEXT:    retq
  %b = shufflevector <2 x i64> %a, <2 x i64> zeroinitializer, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
  %d = shufflevector <4 x i64> %b, <4 x i64> undef, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
  %e = shufflevector <8 x i64> %d, <8 x i64> zeroinitializer, <8 x i32> <i32 8, i32 9, i32 10, i32 11, i32 0, i32 1, i32 2, i32 3>
  ret <8 x i64> %e
}

define i32 @test_insertelement_variable_v32i1(<32 x i8> %a, i8 %b, i32 %index) {
; KNL-LABEL: test_insertelement_variable_v32i1:
; KNL:       ## %bb.0:
; KNL-NEXT:    pushq %rbp
; KNL-NEXT:    .cfi_def_cfa_offset 16
; KNL-NEXT:    .cfi_offset %rbp, -16
; KNL-NEXT:    movq %rsp, %rbp
; KNL-NEXT:    .cfi_def_cfa_register %rbp
; KNL-NEXT:    andq $-32, %rsp
; KNL-NEXT:    subq $64, %rsp
; KNL-NEXT:    ## kill: def $esi killed $esi def $rsi
; KNL-NEXT:    vpxor %xmm1, %xmm1, %xmm1
; KNL-NEXT:    vpcmpeqb %ymm1, %ymm0, %ymm0
; KNL-NEXT:    vpternlogq $15, %zmm0, %zmm0, %zmm0
; KNL-NEXT:    andl $31, %esi
; KNL-NEXT:    testb %dil, %dil
; KNL-NEXT:    vmovdqa %ymm0, (%rsp)
; KNL-NEXT:    setne (%rsp,%rsi)
; KNL-NEXT:    vpmovsxbd (%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %ecx
; KNL-NEXT:    vpmovsxbd {{[0-9]+}}(%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %eax
; KNL-NEXT:    shll $16, %eax
; KNL-NEXT:    orl %ecx, %eax
; KNL-NEXT:    movq %rbp, %rsp
; KNL-NEXT:    popq %rbp
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: test_insertelement_variable_v32i1:
; SKX:       ## %bb.0:
; SKX-NEXT:    vptestmb %ymm0, %ymm0, %k0
; SKX-NEXT:    testb %dil, %dil
; SKX-NEXT:    setne %al
; SKX-NEXT:    vpbroadcastb %esi, %ymm0
; SKX-NEXT:    vpcmpeqb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k1
; SKX-NEXT:    vpmovm2b %k0, %ymm0
; SKX-NEXT:    vpbroadcastb %eax, %ymm0 {%k1}
; SKX-NEXT:    vpsllw $7, %ymm0, %ymm0
; SKX-NEXT:    vpmovb2m %ymm0, %k0
; SKX-NEXT:    kmovd %k0, %eax
; SKX-NEXT:    vzeroupper
; SKX-NEXT:    retq
  %t1 = icmp ugt <32 x i8> %a, zeroinitializer
  %t2 = icmp ugt i8 %b, 0
  %t3 = insertelement <32 x i1> %t1, i1 %t2, i32 %index
  %t4 = bitcast <32 x i1> %t3 to i32
  ret i32 %t4
}

define i64 @test_insertelement_variable_v64i1(<64 x i8> %a, i8 %b, i32 %index) {
; KNL-LABEL: test_insertelement_variable_v64i1:
; KNL:       ## %bb.0:
; KNL-NEXT:    pushq %rbp
; KNL-NEXT:    .cfi_def_cfa_offset 16
; KNL-NEXT:    .cfi_offset %rbp, -16
; KNL-NEXT:    movq %rsp, %rbp
; KNL-NEXT:    .cfi_def_cfa_register %rbp
; KNL-NEXT:    andq $-64, %rsp
; KNL-NEXT:    subq $128, %rsp
; KNL-NEXT:    ## kill: def $esi killed $esi def $rsi
; KNL-NEXT:    vextracti64x4 $1, %zmm0, %ymm1
; KNL-NEXT:    vpxor %xmm2, %xmm2, %xmm2
; KNL-NEXT:    vpcmpeqb %ymm2, %ymm1, %ymm1
; KNL-NEXT:    vpcmpeqb %ymm2, %ymm0, %ymm0
; KNL-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; KNL-NEXT:    vpternlogq $15, %zmm0, %zmm0, %zmm0
; KNL-NEXT:    andl $63, %esi
; KNL-NEXT:    testb %dil, %dil
; KNL-NEXT:    vmovdqa64 %zmm0, (%rsp)
; KNL-NEXT:    setne (%rsp,%rsi)
; KNL-NEXT:    vpmovsxbd (%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %eax
; KNL-NEXT:    vpmovsxbd {{[0-9]+}}(%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %ecx
; KNL-NEXT:    shll $16, %ecx
; KNL-NEXT:    orl %eax, %ecx
; KNL-NEXT:    vpmovsxbd {{[0-9]+}}(%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %edx
; KNL-NEXT:    vpmovsxbd {{[0-9]+}}(%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %eax
; KNL-NEXT:    shll $16, %eax
; KNL-NEXT:    orl %edx, %eax
; KNL-NEXT:    shlq $32, %rax
; KNL-NEXT:    orq %rcx, %rax
; KNL-NEXT:    movq %rbp, %rsp
; KNL-NEXT:    popq %rbp
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: test_insertelement_variable_v64i1:
; SKX:       ## %bb.0:
; SKX-NEXT:    vptestmb %zmm0, %zmm0, %k0
; SKX-NEXT:    testb %dil, %dil
; SKX-NEXT:    setne %al
; SKX-NEXT:    vpbroadcastb %esi, %zmm0
; SKX-NEXT:    vpcmpeqb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k1
; SKX-NEXT:    vpmovm2b %k0, %zmm0
; SKX-NEXT:    vpbroadcastb %eax, %zmm0 {%k1}
; SKX-NEXT:    vpsllw $7, %zmm0, %zmm0
; SKX-NEXT:    vpmovb2m %zmm0, %k0
; SKX-NEXT:    kmovq %k0, %rax
; SKX-NEXT:    vzeroupper
; SKX-NEXT:    retq
  %t1 = icmp ugt <64 x i8> %a, zeroinitializer
  %t2 = icmp ugt i8 %b, 0
  %t3 = insertelement <64 x i1> %t1, i1 %t2, i32 %index
  %t4 = bitcast <64 x i1> %t3 to i64
  ret i64 %t4
}

define i96 @test_insertelement_variable_v96i1(<96 x i8> %a, i8 %b, i32 %index) {
; KNL-LABEL: test_insertelement_variable_v96i1:
; KNL:       ## %bb.0:
; KNL-NEXT:    pushq %rbp
; KNL-NEXT:    .cfi_def_cfa_offset 16
; KNL-NEXT:    .cfi_offset %rbp, -16
; KNL-NEXT:    movq %rsp, %rbp
; KNL-NEXT:    .cfi_def_cfa_register %rbp
; KNL-NEXT:    andq $-64, %rsp
; KNL-NEXT:    subq $192, %rsp
; KNL-NEXT:    movl 744(%rbp), %eax
; KNL-NEXT:    andl $127, %eax
; KNL-NEXT:    vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
; KNL-NEXT:    vpinsrb $1, 232(%rbp), %xmm0, %xmm0
; KNL-NEXT:    vpinsrb $2, 240(%rbp), %xmm0, %xmm0
; KNL-NEXT:    vpinsrb $3, 248(%rbp), %xmm0, %xmm0
; KNL-NEXT:    vpinsrb $4, 256(%rbp), %xmm0, %xmm0
; KNL-NEXT:    vpinsrb $5, 264(%rbp), %xmm0, %xmm0
; KNL-NEXT:    vpinsrb $6, 272(%rbp), %xmm0, %xmm0
; KNL-NEXT:    vpinsrb $7, 280(%rbp), %xmm0, %xmm0
; KNL-NEXT:    vpinsrb $8, 288(%rbp), %xmm0, %xmm0
; KNL-NEXT:    vpinsrb $9, 296(%rbp), %xmm0, %xmm0
; KNL-NEXT:    vpinsrb $10, 304(%rbp), %xmm0, %xmm0
; KNL-NEXT:    vpinsrb $11, 312(%rbp), %xmm0, %xmm0
; KNL-NEXT:    vpinsrb $12, 320(%rbp), %xmm0, %xmm0
; KNL-NEXT:    vpinsrb $13, 328(%rbp), %xmm0, %xmm0
; KNL-NEXT:    vpinsrb $14, 336(%rbp), %xmm0, %xmm0
; KNL-NEXT:    vpinsrb $15, 344(%rbp), %xmm0, %xmm0
; KNL-NEXT:    vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
; KNL-NEXT:    vpinsrb $1, 360(%rbp), %xmm1, %xmm1
; KNL-NEXT:    vpinsrb $2, 368(%rbp), %xmm1, %xmm1
; KNL-NEXT:    vpinsrb $3, 376(%rbp), %xmm1, %xmm1
; KNL-NEXT:    vpinsrb $4, 384(%rbp), %xmm1, %xmm1
; KNL-NEXT:    vpinsrb $5, 392(%rbp), %xmm1, %xmm1
; KNL-NEXT:    vpinsrb $6, 400(%rbp), %xmm1, %xmm1
; KNL-NEXT:    vpinsrb $7, 408(%rbp), %xmm1, %xmm1
; KNL-NEXT:    vpinsrb $8, 416(%rbp), %xmm1, %xmm1
; KNL-NEXT:    vpinsrb $9, 424(%rbp), %xmm1, %xmm1
; KNL-NEXT:    vpinsrb $10, 432(%rbp), %xmm1, %xmm1
; KNL-NEXT:    vpinsrb $11, 440(%rbp), %xmm1, %xmm1
; KNL-NEXT:    vpinsrb $12, 448(%rbp), %xmm1, %xmm1
; KNL-NEXT:    vpinsrb $13, 456(%rbp), %xmm1, %xmm1
; KNL-NEXT:    vpinsrb $14, 464(%rbp), %xmm1, %xmm1
; KNL-NEXT:    vpinsrb $15, 472(%rbp), %xmm1, %xmm1
; KNL-NEXT:    vinserti128 $1, %xmm1, %ymm0, %ymm1
; KNL-NEXT:    vpxor %xmm0, %xmm0, %xmm0
; KNL-NEXT:    vpcmpeqb %ymm0, %ymm1, %ymm1
; KNL-NEXT:    vmovd %edi, %xmm2
; KNL-NEXT:    vpinsrb $1, %esi, %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $2, %edx, %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $3, %ecx, %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $4, %r8d, %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $5, %r9d, %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $6, 16(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $7, 24(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $8, 32(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $9, 40(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $10, 48(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $11, 56(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $12, 64(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $13, 72(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $14, 80(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $15, 88(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vmovd {{.*#+}} xmm3 = mem[0],zero,zero,zero
; KNL-NEXT:    vpinsrb $1, 104(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $2, 112(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $3, 120(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $4, 128(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $5, 136(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $6, 144(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $7, 152(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $8, 160(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $9, 168(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $10, 176(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $11, 184(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $12, 192(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $13, 200(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $14, 208(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $15, 216(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vinserti128 $1, %xmm3, %ymm2, %ymm2
; KNL-NEXT:    vpcmpeqb %ymm0, %ymm2, %ymm2
; KNL-NEXT:    vinserti64x4 $1, %ymm1, %zmm2, %zmm1
; KNL-NEXT:    vpternlogq $15, %zmm1, %zmm1, %zmm1
; KNL-NEXT:    vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
; KNL-NEXT:    vpinsrb $1, 488(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $2, 496(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $3, 504(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $4, 512(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $5, 520(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $6, 528(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $7, 536(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $8, 544(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $9, 552(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $10, 560(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $11, 568(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $12, 576(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $13, 584(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $14, 592(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vpinsrb $15, 600(%rbp), %xmm2, %xmm2
; KNL-NEXT:    vmovd {{.*#+}} xmm3 = mem[0],zero,zero,zero
; KNL-NEXT:    vpinsrb $1, 616(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $2, 624(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $3, 632(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $4, 640(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $5, 648(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $6, 656(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $7, 664(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $8, 672(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $9, 680(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $10, 688(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $11, 696(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $12, 704(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $13, 712(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $14, 720(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vpinsrb $15, 728(%rbp), %xmm3, %xmm3
; KNL-NEXT:    vinserti128 $1, %xmm3, %ymm2, %ymm2
; KNL-NEXT:    vpcmpeqb %ymm0, %ymm2, %ymm0
; KNL-NEXT:    vpternlogq $15, %zmm0, %zmm0, %zmm0
; KNL-NEXT:    cmpb $0, 736(%rbp)
; KNL-NEXT:    vmovdqa %ymm0, {{[0-9]+}}(%rsp)
; KNL-NEXT:    vmovdqa64 %zmm1, (%rsp)
; KNL-NEXT:    setne (%rsp,%rax)
; KNL-NEXT:    vpmovsxbd (%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %eax
; KNL-NEXT:    vpmovsxbd {{[0-9]+}}(%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %ecx
; KNL-NEXT:    shll $16, %ecx
; KNL-NEXT:    orl %eax, %ecx
; KNL-NEXT:    vpmovsxbd {{[0-9]+}}(%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %edx
; KNL-NEXT:    vpmovsxbd {{[0-9]+}}(%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %eax
; KNL-NEXT:    shll $16, %eax
; KNL-NEXT:    orl %edx, %eax
; KNL-NEXT:    shlq $32, %rax
; KNL-NEXT:    orq %rcx, %rax
; KNL-NEXT:    vpmovsxbd {{[0-9]+}}(%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %ecx
; KNL-NEXT:    vpmovsxbd {{[0-9]+}}(%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %esi
; KNL-NEXT:    shll $16, %esi
; KNL-NEXT:    orl %ecx, %esi
; KNL-NEXT:    vpmovsxbd {{[0-9]+}}(%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %ecx
; KNL-NEXT:    vpmovsxbd {{[0-9]+}}(%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %edx
; KNL-NEXT:    shll $16, %edx
; KNL-NEXT:    orl %ecx, %edx
; KNL-NEXT:    shlq $32, %rdx
; KNL-NEXT:    orq %rsi, %rdx
; KNL-NEXT:    movq %rbp, %rsp
; KNL-NEXT:    popq %rbp
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: test_insertelement_variable_v96i1:
; SKX:       ## %bb.0:
; SKX-NEXT:    pushq %rbp
; SKX-NEXT:    .cfi_def_cfa_offset 16
; SKX-NEXT:    .cfi_offset %rbp, -16
; SKX-NEXT:    movq %rsp, %rbp
; SKX-NEXT:    .cfi_def_cfa_register %rbp
; SKX-NEXT:    andq $-64, %rsp
; SKX-NEXT:    subq $192, %rsp
; SKX-NEXT:    vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
; SKX-NEXT:    vpinsrb $1, 232(%rbp), %xmm0, %xmm0
; SKX-NEXT:    vpinsrb $2, 240(%rbp), %xmm0, %xmm0
; SKX-NEXT:    vpinsrb $3, 248(%rbp), %xmm0, %xmm0
; SKX-NEXT:    vpinsrb $4, 256(%rbp), %xmm0, %xmm0
; SKX-NEXT:    vpinsrb $5, 264(%rbp), %xmm0, %xmm0
; SKX-NEXT:    vpinsrb $6, 272(%rbp), %xmm0, %xmm0
; SKX-NEXT:    vpinsrb $7, 280(%rbp), %xmm0, %xmm0
; SKX-NEXT:    vpinsrb $8, 288(%rbp), %xmm0, %xmm0
; SKX-NEXT:    vpinsrb $9, 296(%rbp), %xmm0, %xmm0
; SKX-NEXT:    vpinsrb $10, 304(%rbp), %xmm0, %xmm0
; SKX-NEXT:    vpinsrb $11, 312(%rbp), %xmm0, %xmm0
; SKX-NEXT:    vpinsrb $12, 320(%rbp), %xmm0, %xmm0
; SKX-NEXT:    vpinsrb $13, 328(%rbp), %xmm0, %xmm0
; SKX-NEXT:    vpinsrb $14, 336(%rbp), %xmm0, %xmm0
; SKX-NEXT:    vpinsrb $15, 344(%rbp), %xmm0, %xmm0
; SKX-NEXT:    vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
; SKX-NEXT:    vpinsrb $1, 360(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $2, 368(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $3, 376(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $4, 384(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $5, 392(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $6, 400(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $7, 408(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $8, 416(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $9, 424(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $10, 432(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $11, 440(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $12, 448(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $13, 456(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $14, 464(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $15, 472(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vinserti128 $1, %xmm1, %ymm0, %ymm0
; SKX-NEXT:    vmovd %edi, %xmm1
; SKX-NEXT:    vpinsrb $1, %esi, %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $2, %edx, %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $3, %ecx, %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $4, %r8d, %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $5, %r9d, %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $6, 16(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $7, 24(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $8, 32(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $9, 40(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $10, 48(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $11, 56(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $12, 64(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $13, 72(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $14, 80(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $15, 88(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
; SKX-NEXT:    vpinsrb $1, 104(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $2, 112(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $3, 120(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $4, 128(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $5, 136(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $6, 144(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $7, 152(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $8, 160(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $9, 168(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $10, 176(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $11, 184(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $12, 192(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $13, 200(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $14, 208(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $15, 216(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vinserti128 $1, %xmm2, %ymm1, %ymm1
; SKX-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
; SKX-NEXT:    vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
; SKX-NEXT:    vpinsrb $1, 488(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $2, 496(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $3, 504(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $4, 512(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $5, 520(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $6, 528(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $7, 536(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $8, 544(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $9, 552(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $10, 560(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $11, 568(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $12, 576(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $13, 584(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $14, 592(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vpinsrb $15, 600(%rbp), %xmm1, %xmm1
; SKX-NEXT:    vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
; SKX-NEXT:    vpinsrb $1, 616(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $2, 624(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $3, 632(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $4, 640(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $5, 648(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $6, 656(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $7, 664(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $8, 672(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $9, 680(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $10, 688(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $11, 696(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $12, 704(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $13, 712(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $14, 720(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vpinsrb $15, 728(%rbp), %xmm2, %xmm2
; SKX-NEXT:    vinserti128 $1, %xmm2, %ymm1, %ymm1
; SKX-NEXT:    movl 744(%rbp), %eax
; SKX-NEXT:    andl $127, %eax
; SKX-NEXT:    vptestmb %zmm0, %zmm0, %k0
; SKX-NEXT:    vptestmb %zmm1, %zmm1, %k1
; SKX-NEXT:    cmpb $0, 736(%rbp)
; SKX-NEXT:    vpmovm2b %k1, %zmm0
; SKX-NEXT:    vmovdqa64 %zmm0, {{[0-9]+}}(%rsp)
; SKX-NEXT:    vpmovm2b %k0, %zmm0
; SKX-NEXT:    vmovdqa64 %zmm0, (%rsp)
; SKX-NEXT:    setne (%rsp,%rax)
; SKX-NEXT:    vpsllw $7, {{[0-9]+}}(%rsp), %zmm0
; SKX-NEXT:    vpmovb2m %zmm0, %k0
; SKX-NEXT:    vpsllw $7, (%rsp), %zmm0
; SKX-NEXT:    vpmovb2m %zmm0, %k1
; SKX-NEXT:    kmovq %k1, %rax
; SKX-NEXT:    kmovq %k0, %rdx
; SKX-NEXT:    movq %rbp, %rsp
; SKX-NEXT:    popq %rbp
; SKX-NEXT:    vzeroupper
; SKX-NEXT:    retq
  %t1 = icmp ugt <96 x i8> %a, zeroinitializer
  %t2 = icmp ugt i8 %b, 0
  %t3 = insertelement <96 x i1> %t1, i1 %t2, i32 %index
  %t4 = bitcast <96 x i1> %t3 to i96
  ret i96 %t4
}

define i128 @test_insertelement_variable_v128i1(<128 x i8> %a, i8 %b, i32 %index) {
; KNL-LABEL: test_insertelement_variable_v128i1:
; KNL:       ## %bb.0:
; KNL-NEXT:    pushq %rbp
; KNL-NEXT:    .cfi_def_cfa_offset 16
; KNL-NEXT:    .cfi_offset %rbp, -16
; KNL-NEXT:    movq %rsp, %rbp
; KNL-NEXT:    .cfi_def_cfa_register %rbp
; KNL-NEXT:    andq $-64, %rsp
; KNL-NEXT:    subq $192, %rsp
; KNL-NEXT:    ## kill: def $esi killed $esi def $rsi
; KNL-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; KNL-NEXT:    vpxor %xmm3, %xmm3, %xmm3
; KNL-NEXT:    vpcmpeqb %ymm3, %ymm2, %ymm2
; KNL-NEXT:    vpcmpeqb %ymm3, %ymm0, %ymm0
; KNL-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm0
; KNL-NEXT:    vpternlogq $15, %zmm0, %zmm0, %zmm0
; KNL-NEXT:    vextracti64x4 $1, %zmm1, %ymm2
; KNL-NEXT:    vpcmpeqb %ymm3, %ymm2, %ymm2
; KNL-NEXT:    vpcmpeqb %ymm3, %ymm1, %ymm1
; KNL-NEXT:    vinserti64x4 $1, %ymm2, %zmm1, %zmm1
; KNL-NEXT:    vpternlogq $15, %zmm1, %zmm1, %zmm1
; KNL-NEXT:    andl $127, %esi
; KNL-NEXT:    testb %dil, %dil
; KNL-NEXT:    vmovdqa64 %zmm1, {{[0-9]+}}(%rsp)
; KNL-NEXT:    vmovdqa64 %zmm0, (%rsp)
; KNL-NEXT:    setne (%rsp,%rsi)
; KNL-NEXT:    vpmovsxbd (%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %eax
; KNL-NEXT:    vpmovsxbd {{[0-9]+}}(%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %ecx
; KNL-NEXT:    shll $16, %ecx
; KNL-NEXT:    orl %eax, %ecx
; KNL-NEXT:    vpmovsxbd {{[0-9]+}}(%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %edx
; KNL-NEXT:    vpmovsxbd {{[0-9]+}}(%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %eax
; KNL-NEXT:    shll $16, %eax
; KNL-NEXT:    orl %edx, %eax
; KNL-NEXT:    shlq $32, %rax
; KNL-NEXT:    orq %rcx, %rax
; KNL-NEXT:    vpmovsxbd {{[0-9]+}}(%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %ecx
; KNL-NEXT:    vpmovsxbd {{[0-9]+}}(%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %esi
; KNL-NEXT:    shll $16, %esi
; KNL-NEXT:    orl %ecx, %esi
; KNL-NEXT:    vpmovsxbd {{[0-9]+}}(%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %ecx
; KNL-NEXT:    vpmovsxbd {{[0-9]+}}(%rsp), %zmm0
; KNL-NEXT:    vpslld $31, %zmm0, %zmm0
; KNL-NEXT:    vptestmd %zmm0, %zmm0, %k0
; KNL-NEXT:    kmovw %k0, %edx
; KNL-NEXT:    shll $16, %edx
; KNL-NEXT:    orl %ecx, %edx
; KNL-NEXT:    shlq $32, %rdx
; KNL-NEXT:    orq %rsi, %rdx
; KNL-NEXT:    movq %rbp, %rsp
; KNL-NEXT:    popq %rbp
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: test_insertelement_variable_v128i1:
; SKX:       ## %bb.0:
; SKX-NEXT:    pushq %rbp
; SKX-NEXT:    .cfi_def_cfa_offset 16
; SKX-NEXT:    .cfi_offset %rbp, -16
; SKX-NEXT:    movq %rsp, %rbp
; SKX-NEXT:    .cfi_def_cfa_register %rbp
; SKX-NEXT:    andq $-64, %rsp
; SKX-NEXT:    subq $192, %rsp
; SKX-NEXT:    ## kill: def $esi killed $esi def $rsi
; SKX-NEXT:    vptestmb %zmm0, %zmm0, %k0
; SKX-NEXT:    vptestmb %zmm1, %zmm1, %k1
; SKX-NEXT:    andl $127, %esi
; SKX-NEXT:    testb %dil, %dil
; SKX-NEXT:    vpmovm2b %k1, %zmm0
; SKX-NEXT:    vmovdqa64 %zmm0, {{[0-9]+}}(%rsp)
; SKX-NEXT:    vpmovm2b %k0, %zmm0
; SKX-NEXT:    vmovdqa64 %zmm0, (%rsp)
; SKX-NEXT:    setne (%rsp,%rsi)
; SKX-NEXT:    vpsllw $7, {{[0-9]+}}(%rsp), %zmm0
; SKX-NEXT:    vpmovb2m %zmm0, %k0
; SKX-NEXT:    vpsllw $7, (%rsp), %zmm0
; SKX-NEXT:    vpmovb2m %zmm0, %k1
; SKX-NEXT:    kmovq %k1, %rax
; SKX-NEXT:    kmovq %k0, %rdx
; SKX-NEXT:    movq %rbp, %rsp
; SKX-NEXT:    popq %rbp
; SKX-NEXT:    vzeroupper
; SKX-NEXT:    retq
  %t1 = icmp ugt <128 x i8> %a, zeroinitializer
  %t2 = icmp ugt i8 %b, 0
  %t3 = insertelement <128 x i1> %t1, i1 %t2, i32 %index
  %t4 = bitcast <128 x i1> %t3 to i128
  ret i128 %t4
}

define void @test_concat_v2i1(ptr %arg, ptr %arg1, ptr %arg2) {
; KNL-LABEL: test_concat_v2i1:
; KNL:       ## %bb.0:
; KNL-NEXT:    vmovq {{.*#+}} xmm0 = mem[0],zero
; KNL-NEXT:    vpextrw $0, %xmm0, %eax
; KNL-NEXT:    movzwl %ax, %eax
; KNL-NEXT:    vmovd %eax, %xmm1
; KNL-NEXT:    vcvtph2ps %xmm1, %xmm1
; KNL-NEXT:    vmovss {{.*#+}} xmm2 = mem[0],zero,zero,zero
; KNL-NEXT:    vucomiss %xmm2, %xmm1
; KNL-NEXT:    setb %al
; KNL-NEXT:    andl $1, %eax
; KNL-NEXT:    kmovw %eax, %k0
; KNL-NEXT:    vpsrld $16, %xmm0, %xmm0
; KNL-NEXT:    vpextrw $0, %xmm0, %eax
; KNL-NEXT:    movzwl %ax, %eax
; KNL-NEXT:    vmovd %eax, %xmm0
; KNL-NEXT:    vcvtph2ps %xmm0, %xmm0
; KNL-NEXT:    vucomiss %xmm2, %xmm0
; KNL-NEXT:    setb %al
; KNL-NEXT:    kmovw %eax, %k1
; KNL-NEXT:    kshiftlw $1, %k1, %k1
; KNL-NEXT:    korw %k1, %k0, %k0
; KNL-NEXT:    vxorps %xmm2, %xmm2, %xmm2
; KNL-NEXT:    vucomiss %xmm2, %xmm1
; KNL-NEXT:    seta %al
; KNL-NEXT:    andl $1, %eax
; KNL-NEXT:    kmovw %eax, %k1
; KNL-NEXT:    vucomiss %xmm2, %xmm0
; KNL-NEXT:    seta %al
; KNL-NEXT:    kmovw %eax, %k2
; KNL-NEXT:    kshiftlw $1, %k2, %k2
; KNL-NEXT:    korw %k2, %k1, %k1
; KNL-NEXT:    kandw %k1, %k0, %k1
; KNL-NEXT:    vmovq {{.*#+}} xmm0 = mem[0],zero
; KNL-NEXT:    vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
; KNL-NEXT:    vpmovdw %zmm1, %ymm1
; KNL-NEXT:    vpand %xmm0, %xmm1, %xmm0
; KNL-NEXT:    vmovd %xmm0, (%rdx)
; KNL-NEXT:    vzeroupper
; KNL-NEXT:    retq
;
; SKX-LABEL: test_concat_v2i1:
; SKX:       ## %bb.0:
; SKX-NEXT:    vmovq {{.*#+}} xmm0 = mem[0],zero
; SKX-NEXT:    vpsrld $16, %xmm0, %xmm1
; SKX-NEXT:    vpextrw $0, %xmm1, %eax
; SKX-NEXT:    movzwl %ax, %eax
; SKX-NEXT:    vmovd %eax, %xmm1
; SKX-NEXT:    vcvtph2ps %xmm1, %xmm1
; SKX-NEXT:    vmovss {{.*#+}} xmm2 = mem[0],zero,zero,zero
; SKX-NEXT:    vucomiss %xmm2, %xmm1
; SKX-NEXT:    setb %al
; SKX-NEXT:    kmovd %eax, %k0
; SKX-NEXT:    kshiftlb $1, %k0, %k0
; SKX-NEXT:    vpextrw $0, %xmm0, %eax
; SKX-NEXT:    movzwl %ax, %eax
; SKX-NEXT:    vmovd %eax, %xmm0
; SKX-NEXT:    vcvtph2ps %xmm0, %xmm0
; SKX-NEXT:    vucomiss %xmm2, %xmm0
; SKX-NEXT:    setb %al
; SKX-NEXT:    kmovd %eax, %k1
; SKX-NEXT:    kshiftlb $7, %k1, %k1
; SKX-NEXT:    kshiftrb $7, %k1, %k1
; SKX-NEXT:    korw %k0, %k1, %k0
; SKX-NEXT:    vxorps %xmm2, %xmm2, %xmm2
; SKX-NEXT:    vucomiss %xmm2, %xmm1
; SKX-NEXT:    seta %al
; SKX-NEXT:    kmovd %eax, %k1
; SKX-NEXT:    kshiftlb $1, %k1, %k1
; SKX-NEXT:    vucomiss %xmm2, %xmm0
; SKX-NEXT:    seta %al
; SKX-NEXT:    kmovd %eax, %k2
; SKX-NEXT:    kshiftlb $7, %k2, %k2
; SKX-NEXT:    kshiftrb $7, %k2, %k2
; SKX-NEXT:    korw %k1, %k2, %k1
; SKX-NEXT:    kandw %k1, %k0, %k1
; SKX-NEXT:    vmovq {{.*#+}} xmm0 = mem[0],zero
; SKX-NEXT:    vmovdqu16 %xmm0, %xmm0 {%k1} {z}
; SKX-NEXT:    vmovd %xmm0, (%rdx)
; SKX-NEXT:    retq
  %tmp = load <2 x half>, ptr %arg, align 8
  %tmp3 = fcmp fast olt <2 x half> %tmp, <half 0xH4600, half 0xH4600>
  %tmp4 = fcmp fast ogt <2 x half> %tmp, zeroinitializer
  %tmp5 = and <2 x i1> %tmp3, %tmp4
  %tmp6 = load <2 x half>, ptr %arg1, align 8
  %tmp7 = select <2 x i1> %tmp5, <2 x half> %tmp6, <2 x half> zeroinitializer
  store <2 x half> %tmp7, ptr %arg2, align 8
  ret void
}