; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=avx512fp16 -O3 | FileCheck %s --check-prefixes=CHECK,X86 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=avx512fp16 -O3 | FileCheck %s --check-prefixes=CHECK,X64 declare <32 x half> @llvm.experimental.constrained.sitofp.v32f16.v32i1(<32 x i1>, metadata, metadata) declare <32 x half> @llvm.experimental.constrained.uitofp.v32f16.v32i1(<32 x i1>, metadata, metadata) declare <32 x half> @llvm.experimental.constrained.sitofp.v32f16.v32i8(<32 x i8>, metadata, metadata) declare <32 x half> @llvm.experimental.constrained.uitofp.v32f16.v32i8(<32 x i8>, metadata, metadata) declare <32 x half> @llvm.experimental.constrained.sitofp.v32f16.v32i16(<32 x i16>, metadata, metadata) declare <32 x half> @llvm.experimental.constrained.uitofp.v32f16.v32i16(<32 x i16>, metadata, metadata) declare <16 x half> @llvm.experimental.constrained.sitofp.v16f16.v16i32(<16 x i32>, metadata, metadata) declare <16 x half> @llvm.experimental.constrained.uitofp.v16f16.v16i32(<16 x i32>, metadata, metadata) declare <8 x half> @llvm.experimental.constrained.sitofp.v8f16.v8i64(<8 x i64>, metadata, metadata) declare <8 x half> @llvm.experimental.constrained.uitofp.v8f16.v8i64(<8 x i64>, metadata, metadata) define <32 x half> @sitofp_v32i1_v32f16(<32 x i1> %x) #0 { ; CHECK-LABEL: sitofp_v32i1_v32f16: ; CHECK: # %bb.0: ; CHECK-NEXT: vpmovzxbw {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero,ymm0[16],zero,ymm0[17],zero,ymm0[18],zero,ymm0[19],zero,ymm0[20],zero,ymm0[21],zero,ymm0[22],zero,ymm0[23],zero,ymm0[24],zero,ymm0[25],zero,ymm0[26],zero,ymm0[27],zero,ymm0[28],zero,ymm0[29],zero,ymm0[30],zero,ymm0[31],zero ; CHECK-NEXT: vpsllw $15, %zmm0, %zmm0 ; CHECK-NEXT: vpsraw $15, %zmm0, %zmm0 ; CHECK-NEXT: vcvtw2ph %zmm0, %zmm0 ; CHECK-NEXT: ret{{[l|q]}} %result = call <32 x half> @llvm.experimental.constrained.sitofp.v32f16.v32i1(<32 x i1> %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0 ret <32 x half> %result } define <32 x half> @uitofp_v32i1_v32f16(<32 x i1> %x) #0 { ; X86-LABEL: uitofp_v32i1_v32f16: ; X86: # %bb.0: ; X86-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %ymm0, %ymm0 ; X86-NEXT: vpmovzxbw {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero,ymm0[16],zero,ymm0[17],zero,ymm0[18],zero,ymm0[19],zero,ymm0[20],zero,ymm0[21],zero,ymm0[22],zero,ymm0[23],zero,ymm0[24],zero,ymm0[25],zero,ymm0[26],zero,ymm0[27],zero,ymm0[28],zero,ymm0[29],zero,ymm0[30],zero,ymm0[31],zero ; X86-NEXT: vcvtuw2ph %zmm0, %zmm0 ; X86-NEXT: retl ; ; X64-LABEL: uitofp_v32i1_v32f16: ; X64: # %bb.0: ; X64-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; X64-NEXT: vpmovzxbw {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero,ymm0[16],zero,ymm0[17],zero,ymm0[18],zero,ymm0[19],zero,ymm0[20],zero,ymm0[21],zero,ymm0[22],zero,ymm0[23],zero,ymm0[24],zero,ymm0[25],zero,ymm0[26],zero,ymm0[27],zero,ymm0[28],zero,ymm0[29],zero,ymm0[30],zero,ymm0[31],zero ; X64-NEXT: vcvtuw2ph %zmm0, %zmm0 ; X64-NEXT: retq %result = call <32 x half> @llvm.experimental.constrained.uitofp.v32f16.v32i1(<32 x i1> %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0 ret <32 x half> %result } define <32 x half> @sitofp_v32i8_v32f16(<32 x i8> %x) #0 { ; CHECK-LABEL: sitofp_v32i8_v32f16: ; CHECK: # %bb.0: ; CHECK-NEXT: vpmovsxbw %ymm0, %zmm0 ; CHECK-NEXT: vcvtw2ph %zmm0, %zmm0 ; CHECK-NEXT: ret{{[l|q]}} %result = call <32 x half> @llvm.experimental.constrained.sitofp.v32f16.v32i8(<32 x i8> %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0 ret <32 x half> %result } define <32 x half> @uitofp_v32i8_v32f16(<32 x i8> %x) #0 { ; CHECK-LABEL: uitofp_v32i8_v32f16: ; CHECK: # %bb.0: ; CHECK-NEXT: vpmovzxbw {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero,ymm0[16],zero,ymm0[17],zero,ymm0[18],zero,ymm0[19],zero,ymm0[20],zero,ymm0[21],zero,ymm0[22],zero,ymm0[23],zero,ymm0[24],zero,ymm0[25],zero,ymm0[26],zero,ymm0[27],zero,ymm0[28],zero,ymm0[29],zero,ymm0[30],zero,ymm0[31],zero ; CHECK-NEXT: vcvtuw2ph %zmm0, %zmm0 ; CHECK-NEXT: ret{{[l|q]}} %result = call <32 x half> @llvm.experimental.constrained.uitofp.v32f16.v32i8(<32 x i8> %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0 ret <32 x half> %result } define <32 x half> @sitofp_v32i16_v32f16(<32 x i16> %x) #0 { ; CHECK-LABEL: sitofp_v32i16_v32f16: ; CHECK: # %bb.0: ; CHECK-NEXT: vcvtw2ph %zmm0, %zmm0 ; CHECK-NEXT: ret{{[l|q]}} %result = call <32 x half> @llvm.experimental.constrained.sitofp.v32f16.v32i16(<32 x i16> %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0 ret <32 x half> %result } define <32 x half> @uitofp_v32i16_v32f16(<32 x i16> %x) #0 { ; CHECK-LABEL: uitofp_v32i16_v32f16: ; CHECK: # %bb.0: ; CHECK-NEXT: vcvtuw2ph %zmm0, %zmm0 ; CHECK-NEXT: ret{{[l|q]}} %result = call <32 x half> @llvm.experimental.constrained.uitofp.v32f16.v32i16(<32 x i16> %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0 ret <32 x half> %result } define <16 x half> @sitofp_v16i32_v16f16(<16 x i32> %x) #0 { ; CHECK-LABEL: sitofp_v16i32_v16f16: ; CHECK: # %bb.0: ; CHECK-NEXT: vcvtdq2ph %zmm0, %ymm0 ; CHECK-NEXT: ret{{[l|q]}} %result = call <16 x half> @llvm.experimental.constrained.sitofp.v16f16.v16i32(<16 x i32> %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0 ret <16 x half> %result } define <16 x half> @uitofp_v16i32_v16f16(<16 x i32> %x) #0 { ; CHECK-LABEL: uitofp_v16i32_v16f16: ; CHECK: # %bb.0: ; CHECK-NEXT: vcvtudq2ph %zmm0, %ymm0 ; CHECK-NEXT: ret{{[l|q]}} %result = call <16 x half> @llvm.experimental.constrained.uitofp.v16f16.v16i32(<16 x i32> %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0 ret <16 x half> %result } define <8 x half> @sitofp_v8i64_v8f16(<8 x i64> %x) #0 { ; CHECK-LABEL: sitofp_v8i64_v8f16: ; CHECK: # %bb.0: ; CHECK-NEXT: vcvtqq2ph %zmm0, %xmm0 ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: ret{{[l|q]}} %result = call <8 x half> @llvm.experimental.constrained.sitofp.v8f16.v8i64(<8 x i64> %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0 ret <8 x half> %result } define <8 x half> @uitofp_v8i64_v8f16(<8 x i64> %x) #0 { ; CHECK-LABEL: uitofp_v8i64_v8f16: ; CHECK: # %bb.0: ; CHECK-NEXT: vcvtuqq2ph %zmm0, %xmm0 ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: ret{{[l|q]}} %result = call <8 x half> @llvm.experimental.constrained.uitofp.v8f16.v8i64(<8 x i64> %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0 ret <8 x half> %result } attributes #0 = { strictfp }