; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=riscv32 -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefix=RV32I ; RUN: llc -mtriple=riscv64 -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefix=RV64I ; RUN: llc -mtriple=riscv32 -mattr=+zbb -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefixes=RV32ZB,RV32ZBB ; RUN: llc -mtriple=riscv64 -mattr=+zbb -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefixes=RV64ZB,RV64ZBB ; RUN: llc -mtriple=riscv32 -mattr=+zbkb -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefixes=RV32ZB,RV32ZBKB ; RUN: llc -mtriple=riscv64 -mattr=+zbkb -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefixes=RV64ZB,RV64ZBKB ; RUN: llc -mtriple=riscv32 -mattr=+experimental-zbp -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefixes=RV32ZBP ; RUN: llc -mtriple=riscv64 -mattr=+experimental-zbp -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefixes=RV64ZBP declare i16 @llvm.bswap.i16(i16) declare i32 @llvm.bswap.i32(i32) declare i64 @llvm.bswap.i64(i64) declare i8 @llvm.bitreverse.i8(i8) declare i16 @llvm.bitreverse.i16(i16) declare i32 @llvm.bitreverse.i32(i32) declare i64 @llvm.bitreverse.i64(i64) define i16 @test_bswap_i16(i16 %a) nounwind { ; RV32I-LABEL: test_bswap_i16: ; RV32I: # %bb.0: ; RV32I-NEXT: slli a1, a0, 8 ; RV32I-NEXT: slli a0, a0, 16 ; RV32I-NEXT: srli a0, a0, 24 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; ; RV64I-LABEL: test_bswap_i16: ; RV64I: # %bb.0: ; RV64I-NEXT: slli a1, a0, 8 ; RV64I-NEXT: slli a0, a0, 48 ; RV64I-NEXT: srli a0, a0, 56 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; ; RV32ZB-LABEL: test_bswap_i16: ; RV32ZB: # %bb.0: ; RV32ZB-NEXT: rev8 a0, a0 ; RV32ZB-NEXT: srli a0, a0, 16 ; RV32ZB-NEXT: ret ; ; RV64ZB-LABEL: test_bswap_i16: ; RV64ZB: # %bb.0: ; RV64ZB-NEXT: rev8 a0, a0 ; RV64ZB-NEXT: srli a0, a0, 48 ; RV64ZB-NEXT: ret ; ; RV32ZBP-LABEL: test_bswap_i16: ; RV32ZBP: # %bb.0: ; RV32ZBP-NEXT: rev8.h a0, a0 ; RV32ZBP-NEXT: ret ; ; RV64ZBP-LABEL: test_bswap_i16: ; RV64ZBP: # %bb.0: ; RV64ZBP-NEXT: rev8.h a0, a0 ; RV64ZBP-NEXT: ret %tmp = call i16 @llvm.bswap.i16(i16 %a) ret i16 %tmp } define i32 @test_bswap_i32(i32 %a) nounwind { ; RV32I-LABEL: test_bswap_i32: ; RV32I: # %bb.0: ; RV32I-NEXT: srli a1, a0, 8 ; RV32I-NEXT: lui a2, 16 ; RV32I-NEXT: addi a2, a2, -256 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: srli a2, a0, 24 ; RV32I-NEXT: or a1, a1, a2 ; RV32I-NEXT: slli a2, a0, 8 ; RV32I-NEXT: lui a3, 4080 ; RV32I-NEXT: and a2, a2, a3 ; RV32I-NEXT: slli a0, a0, 24 ; RV32I-NEXT: or a0, a0, a2 ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; ; RV64I-LABEL: test_bswap_i32: ; RV64I: # %bb.0: ; RV64I-NEXT: srliw a1, a0, 8 ; RV64I-NEXT: lui a2, 16 ; RV64I-NEXT: addiw a2, a2, -256 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: srliw a2, a0, 24 ; RV64I-NEXT: or a1, a1, a2 ; RV64I-NEXT: slli a2, a0, 8 ; RV64I-NEXT: lui a3, 4080 ; RV64I-NEXT: and a2, a2, a3 ; RV64I-NEXT: slliw a0, a0, 24 ; RV64I-NEXT: or a0, a0, a2 ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; ; RV32ZB-LABEL: test_bswap_i32: ; RV32ZB: # %bb.0: ; RV32ZB-NEXT: rev8 a0, a0 ; RV32ZB-NEXT: ret ; ; RV64ZB-LABEL: test_bswap_i32: ; RV64ZB: # %bb.0: ; RV64ZB-NEXT: rev8 a0, a0 ; RV64ZB-NEXT: srli a0, a0, 32 ; RV64ZB-NEXT: ret ; ; RV32ZBP-LABEL: test_bswap_i32: ; RV32ZBP: # %bb.0: ; RV32ZBP-NEXT: rev8 a0, a0 ; RV32ZBP-NEXT: ret ; ; RV64ZBP-LABEL: test_bswap_i32: ; RV64ZBP: # %bb.0: ; RV64ZBP-NEXT: rev8.w a0, a0 ; RV64ZBP-NEXT: ret %tmp = call i32 @llvm.bswap.i32(i32 %a) ret i32 %tmp } define i64 @test_bswap_i64(i64 %a) nounwind { ; RV32I-LABEL: test_bswap_i64: ; RV32I: # %bb.0: ; RV32I-NEXT: srli a2, a1, 8 ; RV32I-NEXT: lui a3, 16 ; RV32I-NEXT: addi a3, a3, -256 ; RV32I-NEXT: and a2, a2, a3 ; RV32I-NEXT: srli a4, a1, 24 ; RV32I-NEXT: or a2, a2, a4 ; RV32I-NEXT: slli a4, a1, 8 ; RV32I-NEXT: lui a5, 4080 ; RV32I-NEXT: and a4, a4, a5 ; RV32I-NEXT: slli a1, a1, 24 ; RV32I-NEXT: or a1, a1, a4 ; RV32I-NEXT: or a2, a1, a2 ; RV32I-NEXT: srli a1, a0, 8 ; RV32I-NEXT: and a1, a1, a3 ; RV32I-NEXT: srli a3, a0, 24 ; RV32I-NEXT: or a1, a1, a3 ; RV32I-NEXT: slli a3, a0, 8 ; RV32I-NEXT: and a3, a3, a5 ; RV32I-NEXT: slli a0, a0, 24 ; RV32I-NEXT: or a0, a0, a3 ; RV32I-NEXT: or a1, a0, a1 ; RV32I-NEXT: mv a0, a2 ; RV32I-NEXT: ret ; ; RV64I-LABEL: test_bswap_i64: ; RV64I: # %bb.0: ; RV64I-NEXT: srli a1, a0, 24 ; RV64I-NEXT: lui a2, 4080 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: srli a2, a0, 8 ; RV64I-NEXT: li a3, 255 ; RV64I-NEXT: slli a4, a3, 24 ; RV64I-NEXT: and a2, a2, a4 ; RV64I-NEXT: or a1, a2, a1 ; RV64I-NEXT: srli a2, a0, 40 ; RV64I-NEXT: lui a4, 16 ; RV64I-NEXT: addiw a4, a4, -256 ; RV64I-NEXT: and a2, a2, a4 ; RV64I-NEXT: srli a4, a0, 56 ; RV64I-NEXT: or a2, a2, a4 ; RV64I-NEXT: or a1, a1, a2 ; RV64I-NEXT: slli a2, a0, 24 ; RV64I-NEXT: slli a4, a3, 40 ; RV64I-NEXT: and a2, a2, a4 ; RV64I-NEXT: srliw a4, a0, 24 ; RV64I-NEXT: slli a4, a4, 32 ; RV64I-NEXT: or a2, a2, a4 ; RV64I-NEXT: slli a4, a0, 40 ; RV64I-NEXT: slli a3, a3, 48 ; RV64I-NEXT: and a3, a4, a3 ; RV64I-NEXT: slli a0, a0, 56 ; RV64I-NEXT: or a0, a0, a3 ; RV64I-NEXT: or a0, a0, a2 ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; ; RV32ZB-LABEL: test_bswap_i64: ; RV32ZB: # %bb.0: ; RV32ZB-NEXT: rev8 a2, a1 ; RV32ZB-NEXT: rev8 a1, a0 ; RV32ZB-NEXT: mv a0, a2 ; RV32ZB-NEXT: ret ; ; RV64ZB-LABEL: test_bswap_i64: ; RV64ZB: # %bb.0: ; RV64ZB-NEXT: rev8 a0, a0 ; RV64ZB-NEXT: ret ; ; RV32ZBP-LABEL: test_bswap_i64: ; RV32ZBP: # %bb.0: ; RV32ZBP-NEXT: rev8 a2, a1 ; RV32ZBP-NEXT: rev8 a1, a0 ; RV32ZBP-NEXT: mv a0, a2 ; RV32ZBP-NEXT: ret ; ; RV64ZBP-LABEL: test_bswap_i64: ; RV64ZBP: # %bb.0: ; RV64ZBP-NEXT: rev8 a0, a0 ; RV64ZBP-NEXT: ret %tmp = call i64 @llvm.bswap.i64(i64 %a) ret i64 %tmp } define i8 @test_bitreverse_i8(i8 %a) nounwind { ; RV32I-LABEL: test_bitreverse_i8: ; RV32I: # %bb.0: ; RV32I-NEXT: andi a1, a0, 15 ; RV32I-NEXT: slli a1, a1, 4 ; RV32I-NEXT: slli a0, a0, 24 ; RV32I-NEXT: srli a0, a0, 28 ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: andi a1, a0, 51 ; RV32I-NEXT: slli a1, a1, 2 ; RV32I-NEXT: srli a0, a0, 2 ; RV32I-NEXT: andi a0, a0, 51 ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: andi a1, a0, 85 ; RV32I-NEXT: slli a1, a1, 1 ; RV32I-NEXT: srli a0, a0, 1 ; RV32I-NEXT: andi a0, a0, 85 ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; ; RV64I-LABEL: test_bitreverse_i8: ; RV64I: # %bb.0: ; RV64I-NEXT: andi a1, a0, 15 ; RV64I-NEXT: slli a1, a1, 4 ; RV64I-NEXT: slli a0, a0, 56 ; RV64I-NEXT: srli a0, a0, 60 ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: andi a1, a0, 51 ; RV64I-NEXT: slli a1, a1, 2 ; RV64I-NEXT: srli a0, a0, 2 ; RV64I-NEXT: andi a0, a0, 51 ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: andi a1, a0, 85 ; RV64I-NEXT: slli a1, a1, 1 ; RV64I-NEXT: srli a0, a0, 1 ; RV64I-NEXT: andi a0, a0, 85 ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; ; RV32ZBB-LABEL: test_bitreverse_i8: ; RV32ZBB: # %bb.0: ; RV32ZBB-NEXT: andi a1, a0, 15 ; RV32ZBB-NEXT: slli a1, a1, 4 ; RV32ZBB-NEXT: slli a0, a0, 24 ; RV32ZBB-NEXT: srli a0, a0, 28 ; RV32ZBB-NEXT: or a0, a0, a1 ; RV32ZBB-NEXT: andi a1, a0, 51 ; RV32ZBB-NEXT: slli a1, a1, 2 ; RV32ZBB-NEXT: srli a0, a0, 2 ; RV32ZBB-NEXT: andi a0, a0, 51 ; RV32ZBB-NEXT: or a0, a0, a1 ; RV32ZBB-NEXT: andi a1, a0, 85 ; RV32ZBB-NEXT: slli a1, a1, 1 ; RV32ZBB-NEXT: srli a0, a0, 1 ; RV32ZBB-NEXT: andi a0, a0, 85 ; RV32ZBB-NEXT: or a0, a0, a1 ; RV32ZBB-NEXT: ret ; ; RV64ZBB-LABEL: test_bitreverse_i8: ; RV64ZBB: # %bb.0: ; RV64ZBB-NEXT: andi a1, a0, 15 ; RV64ZBB-NEXT: slli a1, a1, 4 ; RV64ZBB-NEXT: slli a0, a0, 56 ; RV64ZBB-NEXT: srli a0, a0, 60 ; RV64ZBB-NEXT: or a0, a0, a1 ; RV64ZBB-NEXT: andi a1, a0, 51 ; RV64ZBB-NEXT: slli a1, a1, 2 ; RV64ZBB-NEXT: srli a0, a0, 2 ; RV64ZBB-NEXT: andi a0, a0, 51 ; RV64ZBB-NEXT: or a0, a0, a1 ; RV64ZBB-NEXT: andi a1, a0, 85 ; RV64ZBB-NEXT: slli a1, a1, 1 ; RV64ZBB-NEXT: srli a0, a0, 1 ; RV64ZBB-NEXT: andi a0, a0, 85 ; RV64ZBB-NEXT: or a0, a0, a1 ; RV64ZBB-NEXT: ret ; ; RV32ZBKB-LABEL: test_bitreverse_i8: ; RV32ZBKB: # %bb.0: ; RV32ZBKB-NEXT: rev8 a0, a0 ; RV32ZBKB-NEXT: brev8 a0, a0 ; RV32ZBKB-NEXT: srli a0, a0, 24 ; RV32ZBKB-NEXT: ret ; ; RV64ZBKB-LABEL: test_bitreverse_i8: ; RV64ZBKB: # %bb.0: ; RV64ZBKB-NEXT: rev8 a0, a0 ; RV64ZBKB-NEXT: brev8 a0, a0 ; RV64ZBKB-NEXT: srli a0, a0, 56 ; RV64ZBKB-NEXT: ret ; ; RV32ZBP-LABEL: test_bitreverse_i8: ; RV32ZBP: # %bb.0: ; RV32ZBP-NEXT: rev.b a0, a0 ; RV32ZBP-NEXT: ret ; ; RV64ZBP-LABEL: test_bitreverse_i8: ; RV64ZBP: # %bb.0: ; RV64ZBP-NEXT: rev.b a0, a0 ; RV64ZBP-NEXT: ret %tmp = call i8 @llvm.bitreverse.i8(i8 %a) ret i8 %tmp } define i16 @test_bitreverse_i16(i16 %a) nounwind { ; RV32I-LABEL: test_bitreverse_i16: ; RV32I: # %bb.0: ; RV32I-NEXT: slli a1, a0, 8 ; RV32I-NEXT: slli a0, a0, 16 ; RV32I-NEXT: srli a0, a0, 24 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: srli a1, a0, 4 ; RV32I-NEXT: lui a2, 1 ; RV32I-NEXT: addi a2, a2, -241 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: and a0, a0, a2 ; RV32I-NEXT: slli a0, a0, 4 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: srli a1, a0, 2 ; RV32I-NEXT: lui a2, 3 ; RV32I-NEXT: addi a2, a2, 819 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: and a0, a0, a2 ; RV32I-NEXT: slli a0, a0, 2 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: srli a1, a0, 1 ; RV32I-NEXT: lui a2, 5 ; RV32I-NEXT: addi a2, a2, 1365 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: and a0, a0, a2 ; RV32I-NEXT: slli a0, a0, 1 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; ; RV64I-LABEL: test_bitreverse_i16: ; RV64I: # %bb.0: ; RV64I-NEXT: slli a1, a0, 8 ; RV64I-NEXT: slli a0, a0, 48 ; RV64I-NEXT: srli a0, a0, 56 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: srli a1, a0, 4 ; RV64I-NEXT: lui a2, 1 ; RV64I-NEXT: addiw a2, a2, -241 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: slli a0, a0, 4 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: srli a1, a0, 2 ; RV64I-NEXT: lui a2, 3 ; RV64I-NEXT: addiw a2, a2, 819 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: slli a0, a0, 2 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: srli a1, a0, 1 ; RV64I-NEXT: lui a2, 5 ; RV64I-NEXT: addiw a2, a2, 1365 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: slli a0, a0, 1 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; ; RV32ZBB-LABEL: test_bitreverse_i16: ; RV32ZBB: # %bb.0: ; RV32ZBB-NEXT: rev8 a0, a0 ; RV32ZBB-NEXT: srli a1, a0, 12 ; RV32ZBB-NEXT: lui a2, 15 ; RV32ZBB-NEXT: addi a2, a2, 240 ; RV32ZBB-NEXT: and a1, a1, a2 ; RV32ZBB-NEXT: srli a0, a0, 20 ; RV32ZBB-NEXT: andi a0, a0, -241 ; RV32ZBB-NEXT: or a0, a0, a1 ; RV32ZBB-NEXT: srli a1, a0, 2 ; RV32ZBB-NEXT: lui a2, 3 ; RV32ZBB-NEXT: addi a2, a2, 819 ; RV32ZBB-NEXT: and a1, a1, a2 ; RV32ZBB-NEXT: and a0, a0, a2 ; RV32ZBB-NEXT: slli a0, a0, 2 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: srli a1, a0, 1 ; RV32ZBB-NEXT: lui a2, 5 ; RV32ZBB-NEXT: addi a2, a2, 1365 ; RV32ZBB-NEXT: and a1, a1, a2 ; RV32ZBB-NEXT: and a0, a0, a2 ; RV32ZBB-NEXT: slli a0, a0, 1 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: ret ; ; RV64ZBB-LABEL: test_bitreverse_i16: ; RV64ZBB: # %bb.0: ; RV64ZBB-NEXT: rev8 a0, a0 ; RV64ZBB-NEXT: srli a1, a0, 44 ; RV64ZBB-NEXT: lui a2, 15 ; RV64ZBB-NEXT: addiw a2, a2, 240 ; RV64ZBB-NEXT: and a1, a1, a2 ; RV64ZBB-NEXT: srli a0, a0, 52 ; RV64ZBB-NEXT: andi a0, a0, -241 ; RV64ZBB-NEXT: or a0, a0, a1 ; RV64ZBB-NEXT: srli a1, a0, 2 ; RV64ZBB-NEXT: lui a2, 3 ; RV64ZBB-NEXT: addiw a2, a2, 819 ; RV64ZBB-NEXT: and a1, a1, a2 ; RV64ZBB-NEXT: and a0, a0, a2 ; RV64ZBB-NEXT: slli a0, a0, 2 ; RV64ZBB-NEXT: or a0, a1, a0 ; RV64ZBB-NEXT: srli a1, a0, 1 ; RV64ZBB-NEXT: lui a2, 5 ; RV64ZBB-NEXT: addiw a2, a2, 1365 ; RV64ZBB-NEXT: and a1, a1, a2 ; RV64ZBB-NEXT: and a0, a0, a2 ; RV64ZBB-NEXT: slli a0, a0, 1 ; RV64ZBB-NEXT: or a0, a1, a0 ; RV64ZBB-NEXT: ret ; ; RV32ZBKB-LABEL: test_bitreverse_i16: ; RV32ZBKB: # %bb.0: ; RV32ZBKB-NEXT: rev8 a0, a0 ; RV32ZBKB-NEXT: brev8 a0, a0 ; RV32ZBKB-NEXT: srli a0, a0, 16 ; RV32ZBKB-NEXT: ret ; ; RV64ZBKB-LABEL: test_bitreverse_i16: ; RV64ZBKB: # %bb.0: ; RV64ZBKB-NEXT: rev8 a0, a0 ; RV64ZBKB-NEXT: brev8 a0, a0 ; RV64ZBKB-NEXT: srli a0, a0, 48 ; RV64ZBKB-NEXT: ret ; ; RV32ZBP-LABEL: test_bitreverse_i16: ; RV32ZBP: # %bb.0: ; RV32ZBP-NEXT: rev.h a0, a0 ; RV32ZBP-NEXT: ret ; ; RV64ZBP-LABEL: test_bitreverse_i16: ; RV64ZBP: # %bb.0: ; RV64ZBP-NEXT: rev.h a0, a0 ; RV64ZBP-NEXT: ret %tmp = call i16 @llvm.bitreverse.i16(i16 %a) ret i16 %tmp } define i32 @test_bitreverse_i32(i32 %a) nounwind { ; RV32I-LABEL: test_bitreverse_i32: ; RV32I: # %bb.0: ; RV32I-NEXT: srli a1, a0, 8 ; RV32I-NEXT: lui a2, 16 ; RV32I-NEXT: addi a2, a2, -256 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: srli a2, a0, 24 ; RV32I-NEXT: or a1, a1, a2 ; RV32I-NEXT: slli a2, a0, 8 ; RV32I-NEXT: lui a3, 4080 ; RV32I-NEXT: and a2, a2, a3 ; RV32I-NEXT: slli a0, a0, 24 ; RV32I-NEXT: or a0, a0, a2 ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: srli a1, a0, 4 ; RV32I-NEXT: lui a2, 61681 ; RV32I-NEXT: addi a2, a2, -241 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: and a0, a0, a2 ; RV32I-NEXT: slli a0, a0, 4 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: srli a1, a0, 2 ; RV32I-NEXT: lui a2, 209715 ; RV32I-NEXT: addi a2, a2, 819 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: and a0, a0, a2 ; RV32I-NEXT: slli a0, a0, 2 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: srli a1, a0, 1 ; RV32I-NEXT: lui a2, 349525 ; RV32I-NEXT: addi a2, a2, 1365 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: and a0, a0, a2 ; RV32I-NEXT: slli a0, a0, 1 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; ; RV64I-LABEL: test_bitreverse_i32: ; RV64I: # %bb.0: ; RV64I-NEXT: srliw a1, a0, 8 ; RV64I-NEXT: lui a2, 16 ; RV64I-NEXT: addiw a2, a2, -256 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: srliw a2, a0, 24 ; RV64I-NEXT: or a1, a1, a2 ; RV64I-NEXT: slli a2, a0, 8 ; RV64I-NEXT: lui a3, 4080 ; RV64I-NEXT: and a2, a2, a3 ; RV64I-NEXT: slliw a0, a0, 24 ; RV64I-NEXT: or a0, a0, a2 ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: srli a1, a0, 4 ; RV64I-NEXT: lui a2, 61681 ; RV64I-NEXT: addiw a2, a2, -241 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: slliw a0, a0, 4 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: srli a1, a0, 2 ; RV64I-NEXT: lui a2, 209715 ; RV64I-NEXT: addiw a2, a2, 819 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: slliw a0, a0, 2 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: srli a1, a0, 1 ; RV64I-NEXT: lui a2, 349525 ; RV64I-NEXT: addiw a2, a2, 1365 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: slliw a0, a0, 1 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; ; RV32ZBB-LABEL: test_bitreverse_i32: ; RV32ZBB: # %bb.0: ; RV32ZBB-NEXT: rev8 a0, a0 ; RV32ZBB-NEXT: srli a1, a0, 4 ; RV32ZBB-NEXT: lui a2, 61681 ; RV32ZBB-NEXT: addi a2, a2, -241 ; RV32ZBB-NEXT: and a1, a1, a2 ; RV32ZBB-NEXT: and a0, a0, a2 ; RV32ZBB-NEXT: slli a0, a0, 4 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: srli a1, a0, 2 ; RV32ZBB-NEXT: lui a2, 209715 ; RV32ZBB-NEXT: addi a2, a2, 819 ; RV32ZBB-NEXT: and a1, a1, a2 ; RV32ZBB-NEXT: and a0, a0, a2 ; RV32ZBB-NEXT: slli a0, a0, 2 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: srli a1, a0, 1 ; RV32ZBB-NEXT: lui a2, 349525 ; RV32ZBB-NEXT: addi a2, a2, 1365 ; RV32ZBB-NEXT: and a1, a1, a2 ; RV32ZBB-NEXT: and a0, a0, a2 ; RV32ZBB-NEXT: slli a0, a0, 1 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: ret ; ; RV64ZBB-LABEL: test_bitreverse_i32: ; RV64ZBB: # %bb.0: ; RV64ZBB-NEXT: rev8 a0, a0 ; RV64ZBB-NEXT: srli a1, a0, 36 ; RV64ZBB-NEXT: lui a2, 61681 ; RV64ZBB-NEXT: addiw a2, a2, -241 ; RV64ZBB-NEXT: and a1, a1, a2 ; RV64ZBB-NEXT: srli a0, a0, 28 ; RV64ZBB-NEXT: lui a2, 986895 ; RV64ZBB-NEXT: addiw a2, a2, 240 ; RV64ZBB-NEXT: and a0, a0, a2 ; RV64ZBB-NEXT: sext.w a0, a0 ; RV64ZBB-NEXT: or a0, a1, a0 ; RV64ZBB-NEXT: srli a1, a0, 2 ; RV64ZBB-NEXT: lui a2, 209715 ; RV64ZBB-NEXT: addiw a2, a2, 819 ; RV64ZBB-NEXT: and a1, a1, a2 ; RV64ZBB-NEXT: and a0, a0, a2 ; RV64ZBB-NEXT: slliw a0, a0, 2 ; RV64ZBB-NEXT: or a0, a1, a0 ; RV64ZBB-NEXT: srli a1, a0, 1 ; RV64ZBB-NEXT: lui a2, 349525 ; RV64ZBB-NEXT: addiw a2, a2, 1365 ; RV64ZBB-NEXT: and a1, a1, a2 ; RV64ZBB-NEXT: and a0, a0, a2 ; RV64ZBB-NEXT: slliw a0, a0, 1 ; RV64ZBB-NEXT: or a0, a1, a0 ; RV64ZBB-NEXT: ret ; ; RV32ZBKB-LABEL: test_bitreverse_i32: ; RV32ZBKB: # %bb.0: ; RV32ZBKB-NEXT: rev8 a0, a0 ; RV32ZBKB-NEXT: brev8 a0, a0 ; RV32ZBKB-NEXT: ret ; ; RV64ZBKB-LABEL: test_bitreverse_i32: ; RV64ZBKB: # %bb.0: ; RV64ZBKB-NEXT: rev8 a0, a0 ; RV64ZBKB-NEXT: brev8 a0, a0 ; RV64ZBKB-NEXT: srli a0, a0, 32 ; RV64ZBKB-NEXT: ret ; ; RV32ZBP-LABEL: test_bitreverse_i32: ; RV32ZBP: # %bb.0: ; RV32ZBP-NEXT: rev a0, a0 ; RV32ZBP-NEXT: ret ; ; RV64ZBP-LABEL: test_bitreverse_i32: ; RV64ZBP: # %bb.0: ; RV64ZBP-NEXT: rev.w a0, a0 ; RV64ZBP-NEXT: ret %tmp = call i32 @llvm.bitreverse.i32(i32 %a) ret i32 %tmp } define i64 @test_bitreverse_i64(i64 %a) nounwind { ; RV32I-LABEL: test_bitreverse_i64: ; RV32I: # %bb.0: ; RV32I-NEXT: srli a2, a1, 8 ; RV32I-NEXT: lui a3, 16 ; RV32I-NEXT: addi a3, a3, -256 ; RV32I-NEXT: and a2, a2, a3 ; RV32I-NEXT: srli a4, a1, 24 ; RV32I-NEXT: or a2, a2, a4 ; RV32I-NEXT: slli a4, a1, 8 ; RV32I-NEXT: lui a5, 4080 ; RV32I-NEXT: and a4, a4, a5 ; RV32I-NEXT: slli a1, a1, 24 ; RV32I-NEXT: or a1, a1, a4 ; RV32I-NEXT: or a1, a1, a2 ; RV32I-NEXT: srli a2, a1, 4 ; RV32I-NEXT: lui a4, 61681 ; RV32I-NEXT: addi a4, a4, -241 ; RV32I-NEXT: and a2, a2, a4 ; RV32I-NEXT: and a1, a1, a4 ; RV32I-NEXT: slli a1, a1, 4 ; RV32I-NEXT: or a1, a2, a1 ; RV32I-NEXT: srli a2, a1, 2 ; RV32I-NEXT: lui a6, 209715 ; RV32I-NEXT: addi a6, a6, 819 ; RV32I-NEXT: and a2, a2, a6 ; RV32I-NEXT: and a1, a1, a6 ; RV32I-NEXT: slli a1, a1, 2 ; RV32I-NEXT: or a1, a2, a1 ; RV32I-NEXT: srli a2, a1, 1 ; RV32I-NEXT: lui a7, 349525 ; RV32I-NEXT: addi a7, a7, 1365 ; RV32I-NEXT: and a2, a2, a7 ; RV32I-NEXT: and a1, a1, a7 ; RV32I-NEXT: slli a1, a1, 1 ; RV32I-NEXT: or a2, a2, a1 ; RV32I-NEXT: srli a1, a0, 8 ; RV32I-NEXT: and a1, a1, a3 ; RV32I-NEXT: srli a3, a0, 24 ; RV32I-NEXT: or a1, a1, a3 ; RV32I-NEXT: slli a3, a0, 8 ; RV32I-NEXT: and a3, a3, a5 ; RV32I-NEXT: slli a0, a0, 24 ; RV32I-NEXT: or a0, a0, a3 ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: srli a1, a0, 4 ; RV32I-NEXT: and a1, a1, a4 ; RV32I-NEXT: and a0, a0, a4 ; RV32I-NEXT: slli a0, a0, 4 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: srli a1, a0, 2 ; RV32I-NEXT: and a1, a1, a6 ; RV32I-NEXT: and a0, a0, a6 ; RV32I-NEXT: slli a0, a0, 2 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: srli a1, a0, 1 ; RV32I-NEXT: and a1, a1, a7 ; RV32I-NEXT: and a0, a0, a7 ; RV32I-NEXT: slli a0, a0, 1 ; RV32I-NEXT: or a1, a1, a0 ; RV32I-NEXT: mv a0, a2 ; RV32I-NEXT: ret ; ; RV64I-LABEL: test_bitreverse_i64: ; RV64I: # %bb.0: ; RV64I-NEXT: srli a1, a0, 24 ; RV64I-NEXT: lui a2, 4080 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: srli a2, a0, 8 ; RV64I-NEXT: li a3, 255 ; RV64I-NEXT: slli a4, a3, 24 ; RV64I-NEXT: and a2, a2, a4 ; RV64I-NEXT: or a1, a2, a1 ; RV64I-NEXT: srli a2, a0, 40 ; RV64I-NEXT: lui a4, 16 ; RV64I-NEXT: addiw a4, a4, -256 ; RV64I-NEXT: and a2, a2, a4 ; RV64I-NEXT: srli a4, a0, 56 ; RV64I-NEXT: or a2, a2, a4 ; RV64I-NEXT: or a1, a1, a2 ; RV64I-NEXT: slli a2, a0, 24 ; RV64I-NEXT: slli a4, a3, 40 ; RV64I-NEXT: and a2, a2, a4 ; RV64I-NEXT: srliw a4, a0, 24 ; RV64I-NEXT: slli a4, a4, 32 ; RV64I-NEXT: or a2, a2, a4 ; RV64I-NEXT: slli a4, a0, 40 ; RV64I-NEXT: slli a3, a3, 48 ; RV64I-NEXT: and a3, a4, a3 ; RV64I-NEXT: slli a0, a0, 56 ; RV64I-NEXT: or a0, a0, a3 ; RV64I-NEXT: lui a3, %hi(.LCPI6_0) ; RV64I-NEXT: ld a3, %lo(.LCPI6_0)(a3) ; RV64I-NEXT: or a0, a0, a2 ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: srli a1, a0, 4 ; RV64I-NEXT: and a1, a1, a3 ; RV64I-NEXT: and a0, a0, a3 ; RV64I-NEXT: lui a2, %hi(.LCPI6_1) ; RV64I-NEXT: ld a2, %lo(.LCPI6_1)(a2) ; RV64I-NEXT: slli a0, a0, 4 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: srli a1, a0, 2 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: lui a2, %hi(.LCPI6_2) ; RV64I-NEXT: ld a2, %lo(.LCPI6_2)(a2) ; RV64I-NEXT: slli a0, a0, 2 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: srli a1, a0, 1 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: slli a0, a0, 1 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; ; RV32ZBB-LABEL: test_bitreverse_i64: ; RV32ZBB: # %bb.0: ; RV32ZBB-NEXT: rev8 a1, a1 ; RV32ZBB-NEXT: srli a2, a1, 4 ; RV32ZBB-NEXT: lui a3, 61681 ; RV32ZBB-NEXT: addi a3, a3, -241 ; RV32ZBB-NEXT: and a2, a2, a3 ; RV32ZBB-NEXT: and a1, a1, a3 ; RV32ZBB-NEXT: slli a1, a1, 4 ; RV32ZBB-NEXT: or a1, a2, a1 ; RV32ZBB-NEXT: srli a2, a1, 2 ; RV32ZBB-NEXT: lui a4, 209715 ; RV32ZBB-NEXT: addi a4, a4, 819 ; RV32ZBB-NEXT: and a2, a2, a4 ; RV32ZBB-NEXT: and a1, a1, a4 ; RV32ZBB-NEXT: slli a1, a1, 2 ; RV32ZBB-NEXT: or a1, a2, a1 ; RV32ZBB-NEXT: srli a2, a1, 1 ; RV32ZBB-NEXT: lui a5, 349525 ; RV32ZBB-NEXT: addi a5, a5, 1365 ; RV32ZBB-NEXT: and a2, a2, a5 ; RV32ZBB-NEXT: and a1, a1, a5 ; RV32ZBB-NEXT: slli a1, a1, 1 ; RV32ZBB-NEXT: or a2, a2, a1 ; RV32ZBB-NEXT: rev8 a0, a0 ; RV32ZBB-NEXT: srli a1, a0, 4 ; RV32ZBB-NEXT: and a1, a1, a3 ; RV32ZBB-NEXT: and a0, a0, a3 ; RV32ZBB-NEXT: slli a0, a0, 4 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: srli a1, a0, 2 ; RV32ZBB-NEXT: and a1, a1, a4 ; RV32ZBB-NEXT: and a0, a0, a4 ; RV32ZBB-NEXT: slli a0, a0, 2 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: srli a1, a0, 1 ; RV32ZBB-NEXT: and a1, a1, a5 ; RV32ZBB-NEXT: and a0, a0, a5 ; RV32ZBB-NEXT: slli a0, a0, 1 ; RV32ZBB-NEXT: or a1, a1, a0 ; RV32ZBB-NEXT: mv a0, a2 ; RV32ZBB-NEXT: ret ; ; RV64ZBB-LABEL: test_bitreverse_i64: ; RV64ZBB: # %bb.0: ; RV64ZBB-NEXT: lui a1, %hi(.LCPI6_0) ; RV64ZBB-NEXT: ld a1, %lo(.LCPI6_0)(a1) ; RV64ZBB-NEXT: rev8 a0, a0 ; RV64ZBB-NEXT: srli a2, a0, 4 ; RV64ZBB-NEXT: and a2, a2, a1 ; RV64ZBB-NEXT: and a0, a0, a1 ; RV64ZBB-NEXT: lui a1, %hi(.LCPI6_1) ; RV64ZBB-NEXT: ld a1, %lo(.LCPI6_1)(a1) ; RV64ZBB-NEXT: slli a0, a0, 4 ; RV64ZBB-NEXT: or a0, a2, a0 ; RV64ZBB-NEXT: srli a2, a0, 2 ; RV64ZBB-NEXT: and a2, a2, a1 ; RV64ZBB-NEXT: and a0, a0, a1 ; RV64ZBB-NEXT: lui a1, %hi(.LCPI6_2) ; RV64ZBB-NEXT: ld a1, %lo(.LCPI6_2)(a1) ; RV64ZBB-NEXT: slli a0, a0, 2 ; RV64ZBB-NEXT: or a0, a2, a0 ; RV64ZBB-NEXT: srli a2, a0, 1 ; RV64ZBB-NEXT: and a2, a2, a1 ; RV64ZBB-NEXT: and a0, a0, a1 ; RV64ZBB-NEXT: slli a0, a0, 1 ; RV64ZBB-NEXT: or a0, a2, a0 ; RV64ZBB-NEXT: ret ; ; RV32ZBKB-LABEL: test_bitreverse_i64: ; RV32ZBKB: # %bb.0: ; RV32ZBKB-NEXT: rev8 a1, a1 ; RV32ZBKB-NEXT: brev8 a2, a1 ; RV32ZBKB-NEXT: rev8 a0, a0 ; RV32ZBKB-NEXT: brev8 a1, a0 ; RV32ZBKB-NEXT: mv a0, a2 ; RV32ZBKB-NEXT: ret ; ; RV64ZBKB-LABEL: test_bitreverse_i64: ; RV64ZBKB: # %bb.0: ; RV64ZBKB-NEXT: rev8 a0, a0 ; RV64ZBKB-NEXT: brev8 a0, a0 ; RV64ZBKB-NEXT: ret ; ; RV32ZBP-LABEL: test_bitreverse_i64: ; RV32ZBP: # %bb.0: ; RV32ZBP-NEXT: rev a2, a1 ; RV32ZBP-NEXT: rev a1, a0 ; RV32ZBP-NEXT: mv a0, a2 ; RV32ZBP-NEXT: ret ; ; RV64ZBP-LABEL: test_bitreverse_i64: ; RV64ZBP: # %bb.0: ; RV64ZBP-NEXT: rev a0, a0 ; RV64ZBP-NEXT: ret %tmp = call i64 @llvm.bitreverse.i64(i64 %a) ret i64 %tmp } define i16 @test_bswap_bitreverse_i16(i16 %a) nounwind { ; RV32I-LABEL: test_bswap_bitreverse_i16: ; RV32I: # %bb.0: ; RV32I-NEXT: srli a1, a0, 4 ; RV32I-NEXT: lui a2, 1 ; RV32I-NEXT: addi a2, a2, -241 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: and a0, a0, a2 ; RV32I-NEXT: slli a0, a0, 4 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: srli a1, a0, 2 ; RV32I-NEXT: lui a2, 3 ; RV32I-NEXT: addi a2, a2, 819 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: and a0, a0, a2 ; RV32I-NEXT: slli a0, a0, 2 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: srli a1, a0, 1 ; RV32I-NEXT: lui a2, 5 ; RV32I-NEXT: addi a2, a2, 1365 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: and a0, a0, a2 ; RV32I-NEXT: slli a0, a0, 1 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; ; RV64I-LABEL: test_bswap_bitreverse_i16: ; RV64I: # %bb.0: ; RV64I-NEXT: srli a1, a0, 4 ; RV64I-NEXT: lui a2, 1 ; RV64I-NEXT: addiw a2, a2, -241 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: slli a0, a0, 4 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: srli a1, a0, 2 ; RV64I-NEXT: lui a2, 3 ; RV64I-NEXT: addiw a2, a2, 819 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: slli a0, a0, 2 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: srli a1, a0, 1 ; RV64I-NEXT: lui a2, 5 ; RV64I-NEXT: addiw a2, a2, 1365 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: slli a0, a0, 1 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; ; RV32ZBB-LABEL: test_bswap_bitreverse_i16: ; RV32ZBB: # %bb.0: ; RV32ZBB-NEXT: srli a1, a0, 4 ; RV32ZBB-NEXT: lui a2, 1 ; RV32ZBB-NEXT: addi a2, a2, -241 ; RV32ZBB-NEXT: and a1, a1, a2 ; RV32ZBB-NEXT: and a0, a0, a2 ; RV32ZBB-NEXT: slli a0, a0, 4 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: srli a1, a0, 2 ; RV32ZBB-NEXT: lui a2, 3 ; RV32ZBB-NEXT: addi a2, a2, 819 ; RV32ZBB-NEXT: and a1, a1, a2 ; RV32ZBB-NEXT: and a0, a0, a2 ; RV32ZBB-NEXT: slli a0, a0, 2 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: srli a1, a0, 1 ; RV32ZBB-NEXT: lui a2, 5 ; RV32ZBB-NEXT: addi a2, a2, 1365 ; RV32ZBB-NEXT: and a1, a1, a2 ; RV32ZBB-NEXT: and a0, a0, a2 ; RV32ZBB-NEXT: slli a0, a0, 1 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: ret ; ; RV64ZBB-LABEL: test_bswap_bitreverse_i16: ; RV64ZBB: # %bb.0: ; RV64ZBB-NEXT: srli a1, a0, 4 ; RV64ZBB-NEXT: lui a2, 1 ; RV64ZBB-NEXT: addiw a2, a2, -241 ; RV64ZBB-NEXT: and a1, a1, a2 ; RV64ZBB-NEXT: and a0, a0, a2 ; RV64ZBB-NEXT: slli a0, a0, 4 ; RV64ZBB-NEXT: or a0, a1, a0 ; RV64ZBB-NEXT: srli a1, a0, 2 ; RV64ZBB-NEXT: lui a2, 3 ; RV64ZBB-NEXT: addiw a2, a2, 819 ; RV64ZBB-NEXT: and a1, a1, a2 ; RV64ZBB-NEXT: and a0, a0, a2 ; RV64ZBB-NEXT: slli a0, a0, 2 ; RV64ZBB-NEXT: or a0, a1, a0 ; RV64ZBB-NEXT: srli a1, a0, 1 ; RV64ZBB-NEXT: lui a2, 5 ; RV64ZBB-NEXT: addiw a2, a2, 1365 ; RV64ZBB-NEXT: and a1, a1, a2 ; RV64ZBB-NEXT: and a0, a0, a2 ; RV64ZBB-NEXT: slli a0, a0, 1 ; RV64ZBB-NEXT: or a0, a1, a0 ; RV64ZBB-NEXT: ret ; ; RV32ZBKB-LABEL: test_bswap_bitreverse_i16: ; RV32ZBKB: # %bb.0: ; RV32ZBKB-NEXT: brev8 a0, a0 ; RV32ZBKB-NEXT: ret ; ; RV64ZBKB-LABEL: test_bswap_bitreverse_i16: ; RV64ZBKB: # %bb.0: ; RV64ZBKB-NEXT: brev8 a0, a0 ; RV64ZBKB-NEXT: ret ; ; RV32ZBP-LABEL: test_bswap_bitreverse_i16: ; RV32ZBP: # %bb.0: ; RV32ZBP-NEXT: rev.b a0, a0 ; RV32ZBP-NEXT: ret ; ; RV64ZBP-LABEL: test_bswap_bitreverse_i16: ; RV64ZBP: # %bb.0: ; RV64ZBP-NEXT: rev.b a0, a0 ; RV64ZBP-NEXT: ret %tmp = call i16 @llvm.bswap.i16(i16 %a) %tmp2 = call i16 @llvm.bitreverse.i16(i16 %tmp) ret i16 %tmp2 } define i32 @test_bswap_bitreverse_i32(i32 %a) nounwind { ; RV32I-LABEL: test_bswap_bitreverse_i32: ; RV32I: # %bb.0: ; RV32I-NEXT: srli a1, a0, 4 ; RV32I-NEXT: lui a2, 61681 ; RV32I-NEXT: addi a2, a2, -241 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: and a0, a0, a2 ; RV32I-NEXT: slli a0, a0, 4 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: srli a1, a0, 2 ; RV32I-NEXT: lui a2, 209715 ; RV32I-NEXT: addi a2, a2, 819 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: and a0, a0, a2 ; RV32I-NEXT: slli a0, a0, 2 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: srli a1, a0, 1 ; RV32I-NEXT: lui a2, 349525 ; RV32I-NEXT: addi a2, a2, 1365 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: and a0, a0, a2 ; RV32I-NEXT: slli a0, a0, 1 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; ; RV64I-LABEL: test_bswap_bitreverse_i32: ; RV64I: # %bb.0: ; RV64I-NEXT: srli a1, a0, 4 ; RV64I-NEXT: lui a2, 61681 ; RV64I-NEXT: addiw a2, a2, -241 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: slliw a0, a0, 4 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: srli a1, a0, 2 ; RV64I-NEXT: lui a2, 209715 ; RV64I-NEXT: addiw a2, a2, 819 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: slliw a0, a0, 2 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: srli a1, a0, 1 ; RV64I-NEXT: lui a2, 349525 ; RV64I-NEXT: addiw a2, a2, 1365 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: slliw a0, a0, 1 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; ; RV32ZBB-LABEL: test_bswap_bitreverse_i32: ; RV32ZBB: # %bb.0: ; RV32ZBB-NEXT: srli a1, a0, 4 ; RV32ZBB-NEXT: lui a2, 61681 ; RV32ZBB-NEXT: addi a2, a2, -241 ; RV32ZBB-NEXT: and a1, a1, a2 ; RV32ZBB-NEXT: and a0, a0, a2 ; RV32ZBB-NEXT: slli a0, a0, 4 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: srli a1, a0, 2 ; RV32ZBB-NEXT: lui a2, 209715 ; RV32ZBB-NEXT: addi a2, a2, 819 ; RV32ZBB-NEXT: and a1, a1, a2 ; RV32ZBB-NEXT: and a0, a0, a2 ; RV32ZBB-NEXT: slli a0, a0, 2 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: srli a1, a0, 1 ; RV32ZBB-NEXT: lui a2, 349525 ; RV32ZBB-NEXT: addi a2, a2, 1365 ; RV32ZBB-NEXT: and a1, a1, a2 ; RV32ZBB-NEXT: and a0, a0, a2 ; RV32ZBB-NEXT: slli a0, a0, 1 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: ret ; ; RV64ZBB-LABEL: test_bswap_bitreverse_i32: ; RV64ZBB: # %bb.0: ; RV64ZBB-NEXT: srli a1, a0, 4 ; RV64ZBB-NEXT: lui a2, 61681 ; RV64ZBB-NEXT: addiw a2, a2, -241 ; RV64ZBB-NEXT: and a1, a1, a2 ; RV64ZBB-NEXT: and a0, a0, a2 ; RV64ZBB-NEXT: slliw a0, a0, 4 ; RV64ZBB-NEXT: or a0, a1, a0 ; RV64ZBB-NEXT: srli a1, a0, 2 ; RV64ZBB-NEXT: lui a2, 209715 ; RV64ZBB-NEXT: addiw a2, a2, 819 ; RV64ZBB-NEXT: and a1, a1, a2 ; RV64ZBB-NEXT: and a0, a0, a2 ; RV64ZBB-NEXT: slliw a0, a0, 2 ; RV64ZBB-NEXT: or a0, a1, a0 ; RV64ZBB-NEXT: srli a1, a0, 1 ; RV64ZBB-NEXT: lui a2, 349525 ; RV64ZBB-NEXT: addiw a2, a2, 1365 ; RV64ZBB-NEXT: and a1, a1, a2 ; RV64ZBB-NEXT: and a0, a0, a2 ; RV64ZBB-NEXT: slliw a0, a0, 1 ; RV64ZBB-NEXT: or a0, a1, a0 ; RV64ZBB-NEXT: ret ; ; RV32ZBKB-LABEL: test_bswap_bitreverse_i32: ; RV32ZBKB: # %bb.0: ; RV32ZBKB-NEXT: brev8 a0, a0 ; RV32ZBKB-NEXT: ret ; ; RV64ZBKB-LABEL: test_bswap_bitreverse_i32: ; RV64ZBKB: # %bb.0: ; RV64ZBKB-NEXT: brev8 a0, a0 ; RV64ZBKB-NEXT: ret ; ; RV32ZBP-LABEL: test_bswap_bitreverse_i32: ; RV32ZBP: # %bb.0: ; RV32ZBP-NEXT: rev.b a0, a0 ; RV32ZBP-NEXT: ret ; ; RV64ZBP-LABEL: test_bswap_bitreverse_i32: ; RV64ZBP: # %bb.0: ; RV64ZBP-NEXT: rev.b a0, a0 ; RV64ZBP-NEXT: ret %tmp = call i32 @llvm.bswap.i32(i32 %a) %tmp2 = call i32 @llvm.bitreverse.i32(i32 %tmp) ret i32 %tmp2 } define i64 @test_bswap_bitreverse_i64(i64 %a) nounwind { ; RV32I-LABEL: test_bswap_bitreverse_i64: ; RV32I: # %bb.0: ; RV32I-NEXT: srli a2, a0, 4 ; RV32I-NEXT: lui a3, 61681 ; RV32I-NEXT: addi a3, a3, -241 ; RV32I-NEXT: and a2, a2, a3 ; RV32I-NEXT: and a0, a0, a3 ; RV32I-NEXT: slli a0, a0, 4 ; RV32I-NEXT: or a0, a2, a0 ; RV32I-NEXT: srli a2, a0, 2 ; RV32I-NEXT: lui a4, 209715 ; RV32I-NEXT: addi a4, a4, 819 ; RV32I-NEXT: and a2, a2, a4 ; RV32I-NEXT: and a0, a0, a4 ; RV32I-NEXT: slli a0, a0, 2 ; RV32I-NEXT: or a0, a2, a0 ; RV32I-NEXT: srli a2, a0, 1 ; RV32I-NEXT: lui a5, 349525 ; RV32I-NEXT: addi a5, a5, 1365 ; RV32I-NEXT: and a2, a2, a5 ; RV32I-NEXT: and a0, a0, a5 ; RV32I-NEXT: slli a0, a0, 1 ; RV32I-NEXT: or a0, a2, a0 ; RV32I-NEXT: srli a2, a1, 4 ; RV32I-NEXT: and a2, a2, a3 ; RV32I-NEXT: and a1, a1, a3 ; RV32I-NEXT: slli a1, a1, 4 ; RV32I-NEXT: or a1, a2, a1 ; RV32I-NEXT: srli a2, a1, 2 ; RV32I-NEXT: and a2, a2, a4 ; RV32I-NEXT: and a1, a1, a4 ; RV32I-NEXT: slli a1, a1, 2 ; RV32I-NEXT: or a1, a2, a1 ; RV32I-NEXT: srli a2, a1, 1 ; RV32I-NEXT: and a2, a2, a5 ; RV32I-NEXT: and a1, a1, a5 ; RV32I-NEXT: slli a1, a1, 1 ; RV32I-NEXT: or a1, a2, a1 ; RV32I-NEXT: ret ; ; RV64I-LABEL: test_bswap_bitreverse_i64: ; RV64I: # %bb.0: ; RV64I-NEXT: lui a1, %hi(.LCPI9_0) ; RV64I-NEXT: ld a1, %lo(.LCPI9_0)(a1) ; RV64I-NEXT: srli a2, a0, 4 ; RV64I-NEXT: and a2, a2, a1 ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: lui a1, %hi(.LCPI9_1) ; RV64I-NEXT: ld a1, %lo(.LCPI9_1)(a1) ; RV64I-NEXT: slli a0, a0, 4 ; RV64I-NEXT: or a0, a2, a0 ; RV64I-NEXT: srli a2, a0, 2 ; RV64I-NEXT: and a2, a2, a1 ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: lui a1, %hi(.LCPI9_2) ; RV64I-NEXT: ld a1, %lo(.LCPI9_2)(a1) ; RV64I-NEXT: slli a0, a0, 2 ; RV64I-NEXT: or a0, a2, a0 ; RV64I-NEXT: srli a2, a0, 1 ; RV64I-NEXT: and a2, a2, a1 ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: slli a0, a0, 1 ; RV64I-NEXT: or a0, a2, a0 ; RV64I-NEXT: ret ; ; RV32ZBB-LABEL: test_bswap_bitreverse_i64: ; RV32ZBB: # %bb.0: ; RV32ZBB-NEXT: srli a2, a0, 4 ; RV32ZBB-NEXT: lui a3, 61681 ; RV32ZBB-NEXT: addi a3, a3, -241 ; RV32ZBB-NEXT: and a2, a2, a3 ; RV32ZBB-NEXT: and a0, a0, a3 ; RV32ZBB-NEXT: slli a0, a0, 4 ; RV32ZBB-NEXT: or a0, a2, a0 ; RV32ZBB-NEXT: srli a2, a0, 2 ; RV32ZBB-NEXT: lui a4, 209715 ; RV32ZBB-NEXT: addi a4, a4, 819 ; RV32ZBB-NEXT: and a2, a2, a4 ; RV32ZBB-NEXT: and a0, a0, a4 ; RV32ZBB-NEXT: slli a0, a0, 2 ; RV32ZBB-NEXT: or a0, a2, a0 ; RV32ZBB-NEXT: srli a2, a0, 1 ; RV32ZBB-NEXT: lui a5, 349525 ; RV32ZBB-NEXT: addi a5, a5, 1365 ; RV32ZBB-NEXT: and a2, a2, a5 ; RV32ZBB-NEXT: and a0, a0, a5 ; RV32ZBB-NEXT: slli a0, a0, 1 ; RV32ZBB-NEXT: or a0, a2, a0 ; RV32ZBB-NEXT: srli a2, a1, 4 ; RV32ZBB-NEXT: and a2, a2, a3 ; RV32ZBB-NEXT: and a1, a1, a3 ; RV32ZBB-NEXT: slli a1, a1, 4 ; RV32ZBB-NEXT: or a1, a2, a1 ; RV32ZBB-NEXT: srli a2, a1, 2 ; RV32ZBB-NEXT: and a2, a2, a4 ; RV32ZBB-NEXT: and a1, a1, a4 ; RV32ZBB-NEXT: slli a1, a1, 2 ; RV32ZBB-NEXT: or a1, a2, a1 ; RV32ZBB-NEXT: srli a2, a1, 1 ; RV32ZBB-NEXT: and a2, a2, a5 ; RV32ZBB-NEXT: and a1, a1, a5 ; RV32ZBB-NEXT: slli a1, a1, 1 ; RV32ZBB-NEXT: or a1, a2, a1 ; RV32ZBB-NEXT: ret ; ; RV64ZBB-LABEL: test_bswap_bitreverse_i64: ; RV64ZBB: # %bb.0: ; RV64ZBB-NEXT: lui a1, %hi(.LCPI9_0) ; RV64ZBB-NEXT: ld a1, %lo(.LCPI9_0)(a1) ; RV64ZBB-NEXT: srli a2, a0, 4 ; RV64ZBB-NEXT: and a2, a2, a1 ; RV64ZBB-NEXT: and a0, a0, a1 ; RV64ZBB-NEXT: lui a1, %hi(.LCPI9_1) ; RV64ZBB-NEXT: ld a1, %lo(.LCPI9_1)(a1) ; RV64ZBB-NEXT: slli a0, a0, 4 ; RV64ZBB-NEXT: or a0, a2, a0 ; RV64ZBB-NEXT: srli a2, a0, 2 ; RV64ZBB-NEXT: and a2, a2, a1 ; RV64ZBB-NEXT: and a0, a0, a1 ; RV64ZBB-NEXT: lui a1, %hi(.LCPI9_2) ; RV64ZBB-NEXT: ld a1, %lo(.LCPI9_2)(a1) ; RV64ZBB-NEXT: slli a0, a0, 2 ; RV64ZBB-NEXT: or a0, a2, a0 ; RV64ZBB-NEXT: srli a2, a0, 1 ; RV64ZBB-NEXT: and a2, a2, a1 ; RV64ZBB-NEXT: and a0, a0, a1 ; RV64ZBB-NEXT: slli a0, a0, 1 ; RV64ZBB-NEXT: or a0, a2, a0 ; RV64ZBB-NEXT: ret ; ; RV32ZBKB-LABEL: test_bswap_bitreverse_i64: ; RV32ZBKB: # %bb.0: ; RV32ZBKB-NEXT: brev8 a0, a0 ; RV32ZBKB-NEXT: brev8 a1, a1 ; RV32ZBKB-NEXT: ret ; ; RV64ZBKB-LABEL: test_bswap_bitreverse_i64: ; RV64ZBKB: # %bb.0: ; RV64ZBKB-NEXT: brev8 a0, a0 ; RV64ZBKB-NEXT: ret ; ; RV32ZBP-LABEL: test_bswap_bitreverse_i64: ; RV32ZBP: # %bb.0: ; RV32ZBP-NEXT: rev.b a0, a0 ; RV32ZBP-NEXT: rev.b a1, a1 ; RV32ZBP-NEXT: ret ; ; RV64ZBP-LABEL: test_bswap_bitreverse_i64: ; RV64ZBP: # %bb.0: ; RV64ZBP-NEXT: rev.b a0, a0 ; RV64ZBP-NEXT: ret %tmp = call i64 @llvm.bswap.i64(i64 %a) %tmp2 = call i64 @llvm.bitreverse.i64(i64 %tmp) ret i64 %tmp2 } define i16 @test_bitreverse_bswap_i16(i16 %a) nounwind { ; RV32I-LABEL: test_bitreverse_bswap_i16: ; RV32I: # %bb.0: ; RV32I-NEXT: srli a1, a0, 4 ; RV32I-NEXT: lui a2, 1 ; RV32I-NEXT: addi a2, a2, -241 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: and a0, a0, a2 ; RV32I-NEXT: slli a0, a0, 4 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: srli a1, a0, 2 ; RV32I-NEXT: lui a2, 3 ; RV32I-NEXT: addi a2, a2, 819 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: and a0, a0, a2 ; RV32I-NEXT: slli a0, a0, 2 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: srli a1, a0, 1 ; RV32I-NEXT: lui a2, 5 ; RV32I-NEXT: addi a2, a2, 1365 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: and a0, a0, a2 ; RV32I-NEXT: slli a0, a0, 1 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; ; RV64I-LABEL: test_bitreverse_bswap_i16: ; RV64I: # %bb.0: ; RV64I-NEXT: srli a1, a0, 4 ; RV64I-NEXT: lui a2, 1 ; RV64I-NEXT: addiw a2, a2, -241 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: slli a0, a0, 4 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: srli a1, a0, 2 ; RV64I-NEXT: lui a2, 3 ; RV64I-NEXT: addiw a2, a2, 819 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: slli a0, a0, 2 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: srli a1, a0, 1 ; RV64I-NEXT: lui a2, 5 ; RV64I-NEXT: addiw a2, a2, 1365 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: slli a0, a0, 1 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; ; RV32ZBB-LABEL: test_bitreverse_bswap_i16: ; RV32ZBB: # %bb.0: ; RV32ZBB-NEXT: srli a1, a0, 4 ; RV32ZBB-NEXT: lui a2, 1 ; RV32ZBB-NEXT: addi a2, a2, -241 ; RV32ZBB-NEXT: and a1, a1, a2 ; RV32ZBB-NEXT: and a0, a0, a2 ; RV32ZBB-NEXT: slli a0, a0, 4 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: srli a1, a0, 2 ; RV32ZBB-NEXT: lui a2, 3 ; RV32ZBB-NEXT: addi a2, a2, 819 ; RV32ZBB-NEXT: and a1, a1, a2 ; RV32ZBB-NEXT: and a0, a0, a2 ; RV32ZBB-NEXT: slli a0, a0, 2 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: srli a1, a0, 1 ; RV32ZBB-NEXT: lui a2, 5 ; RV32ZBB-NEXT: addi a2, a2, 1365 ; RV32ZBB-NEXT: and a1, a1, a2 ; RV32ZBB-NEXT: and a0, a0, a2 ; RV32ZBB-NEXT: slli a0, a0, 1 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: ret ; ; RV64ZBB-LABEL: test_bitreverse_bswap_i16: ; RV64ZBB: # %bb.0: ; RV64ZBB-NEXT: srli a1, a0, 4 ; RV64ZBB-NEXT: lui a2, 1 ; RV64ZBB-NEXT: addiw a2, a2, -241 ; RV64ZBB-NEXT: and a1, a1, a2 ; RV64ZBB-NEXT: and a0, a0, a2 ; RV64ZBB-NEXT: slli a0, a0, 4 ; RV64ZBB-NEXT: or a0, a1, a0 ; RV64ZBB-NEXT: srli a1, a0, 2 ; RV64ZBB-NEXT: lui a2, 3 ; RV64ZBB-NEXT: addiw a2, a2, 819 ; RV64ZBB-NEXT: and a1, a1, a2 ; RV64ZBB-NEXT: and a0, a0, a2 ; RV64ZBB-NEXT: slli a0, a0, 2 ; RV64ZBB-NEXT: or a0, a1, a0 ; RV64ZBB-NEXT: srli a1, a0, 1 ; RV64ZBB-NEXT: lui a2, 5 ; RV64ZBB-NEXT: addiw a2, a2, 1365 ; RV64ZBB-NEXT: and a1, a1, a2 ; RV64ZBB-NEXT: and a0, a0, a2 ; RV64ZBB-NEXT: slli a0, a0, 1 ; RV64ZBB-NEXT: or a0, a1, a0 ; RV64ZBB-NEXT: ret ; ; RV32ZBKB-LABEL: test_bitreverse_bswap_i16: ; RV32ZBKB: # %bb.0: ; RV32ZBKB-NEXT: brev8 a0, a0 ; RV32ZBKB-NEXT: ret ; ; RV64ZBKB-LABEL: test_bitreverse_bswap_i16: ; RV64ZBKB: # %bb.0: ; RV64ZBKB-NEXT: brev8 a0, a0 ; RV64ZBKB-NEXT: ret ; ; RV32ZBP-LABEL: test_bitreverse_bswap_i16: ; RV32ZBP: # %bb.0: ; RV32ZBP-NEXT: rev.b a0, a0 ; RV32ZBP-NEXT: ret ; ; RV64ZBP-LABEL: test_bitreverse_bswap_i16: ; RV64ZBP: # %bb.0: ; RV64ZBP-NEXT: rev.b a0, a0 ; RV64ZBP-NEXT: ret %tmp = call i16 @llvm.bitreverse.i16(i16 %a) %tmp2 = call i16 @llvm.bswap.i16(i16 %tmp) ret i16 %tmp2 } define i32 @test_bitreverse_bswap_i32(i32 %a) nounwind { ; RV32I-LABEL: test_bitreverse_bswap_i32: ; RV32I: # %bb.0: ; RV32I-NEXT: srli a1, a0, 4 ; RV32I-NEXT: lui a2, 61681 ; RV32I-NEXT: addi a2, a2, -241 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: and a0, a0, a2 ; RV32I-NEXT: slli a0, a0, 4 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: srli a1, a0, 2 ; RV32I-NEXT: lui a2, 209715 ; RV32I-NEXT: addi a2, a2, 819 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: and a0, a0, a2 ; RV32I-NEXT: slli a0, a0, 2 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: srli a1, a0, 1 ; RV32I-NEXT: lui a2, 349525 ; RV32I-NEXT: addi a2, a2, 1365 ; RV32I-NEXT: and a1, a1, a2 ; RV32I-NEXT: and a0, a0, a2 ; RV32I-NEXT: slli a0, a0, 1 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; ; RV64I-LABEL: test_bitreverse_bswap_i32: ; RV64I: # %bb.0: ; RV64I-NEXT: srli a1, a0, 4 ; RV64I-NEXT: lui a2, 61681 ; RV64I-NEXT: addiw a2, a2, -241 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: slliw a0, a0, 4 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: srli a1, a0, 2 ; RV64I-NEXT: lui a2, 209715 ; RV64I-NEXT: addiw a2, a2, 819 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: slliw a0, a0, 2 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: srli a1, a0, 1 ; RV64I-NEXT: lui a2, 349525 ; RV64I-NEXT: addiw a2, a2, 1365 ; RV64I-NEXT: and a1, a1, a2 ; RV64I-NEXT: and a0, a0, a2 ; RV64I-NEXT: slliw a0, a0, 1 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; ; RV32ZBB-LABEL: test_bitreverse_bswap_i32: ; RV32ZBB: # %bb.0: ; RV32ZBB-NEXT: srli a1, a0, 4 ; RV32ZBB-NEXT: lui a2, 61681 ; RV32ZBB-NEXT: addi a2, a2, -241 ; RV32ZBB-NEXT: and a1, a1, a2 ; RV32ZBB-NEXT: and a0, a0, a2 ; RV32ZBB-NEXT: slli a0, a0, 4 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: srli a1, a0, 2 ; RV32ZBB-NEXT: lui a2, 209715 ; RV32ZBB-NEXT: addi a2, a2, 819 ; RV32ZBB-NEXT: and a1, a1, a2 ; RV32ZBB-NEXT: and a0, a0, a2 ; RV32ZBB-NEXT: slli a0, a0, 2 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: srli a1, a0, 1 ; RV32ZBB-NEXT: lui a2, 349525 ; RV32ZBB-NEXT: addi a2, a2, 1365 ; RV32ZBB-NEXT: and a1, a1, a2 ; RV32ZBB-NEXT: and a0, a0, a2 ; RV32ZBB-NEXT: slli a0, a0, 1 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: ret ; ; RV64ZBB-LABEL: test_bitreverse_bswap_i32: ; RV64ZBB: # %bb.0: ; RV64ZBB-NEXT: srli a1, a0, 4 ; RV64ZBB-NEXT: lui a2, 61681 ; RV64ZBB-NEXT: addiw a2, a2, -241 ; RV64ZBB-NEXT: and a1, a1, a2 ; RV64ZBB-NEXT: and a0, a0, a2 ; RV64ZBB-NEXT: slliw a0, a0, 4 ; RV64ZBB-NEXT: or a0, a1, a0 ; RV64ZBB-NEXT: srli a1, a0, 2 ; RV64ZBB-NEXT: lui a2, 209715 ; RV64ZBB-NEXT: addiw a2, a2, 819 ; RV64ZBB-NEXT: and a1, a1, a2 ; RV64ZBB-NEXT: and a0, a0, a2 ; RV64ZBB-NEXT: slliw a0, a0, 2 ; RV64ZBB-NEXT: or a0, a1, a0 ; RV64ZBB-NEXT: srli a1, a0, 1 ; RV64ZBB-NEXT: lui a2, 349525 ; RV64ZBB-NEXT: addiw a2, a2, 1365 ; RV64ZBB-NEXT: and a1, a1, a2 ; RV64ZBB-NEXT: and a0, a0, a2 ; RV64ZBB-NEXT: slliw a0, a0, 1 ; RV64ZBB-NEXT: or a0, a1, a0 ; RV64ZBB-NEXT: ret ; ; RV32ZBKB-LABEL: test_bitreverse_bswap_i32: ; RV32ZBKB: # %bb.0: ; RV32ZBKB-NEXT: brev8 a0, a0 ; RV32ZBKB-NEXT: ret ; ; RV64ZBKB-LABEL: test_bitreverse_bswap_i32: ; RV64ZBKB: # %bb.0: ; RV64ZBKB-NEXT: brev8 a0, a0 ; RV64ZBKB-NEXT: ret ; ; RV32ZBP-LABEL: test_bitreverse_bswap_i32: ; RV32ZBP: # %bb.0: ; RV32ZBP-NEXT: rev.b a0, a0 ; RV32ZBP-NEXT: ret ; ; RV64ZBP-LABEL: test_bitreverse_bswap_i32: ; RV64ZBP: # %bb.0: ; RV64ZBP-NEXT: rev.b a0, a0 ; RV64ZBP-NEXT: ret %tmp = call i32 @llvm.bitreverse.i32(i32 %a) %tmp2 = call i32 @llvm.bswap.i32(i32 %tmp) ret i32 %tmp2 } define i64 @test_bitreverse_bswap_i64(i64 %a) nounwind { ; RV32I-LABEL: test_bitreverse_bswap_i64: ; RV32I: # %bb.0: ; RV32I-NEXT: srli a2, a0, 4 ; RV32I-NEXT: lui a3, 61681 ; RV32I-NEXT: addi a3, a3, -241 ; RV32I-NEXT: and a2, a2, a3 ; RV32I-NEXT: and a0, a0, a3 ; RV32I-NEXT: slli a0, a0, 4 ; RV32I-NEXT: or a0, a2, a0 ; RV32I-NEXT: srli a2, a0, 2 ; RV32I-NEXT: lui a4, 209715 ; RV32I-NEXT: addi a4, a4, 819 ; RV32I-NEXT: and a2, a2, a4 ; RV32I-NEXT: and a0, a0, a4 ; RV32I-NEXT: slli a0, a0, 2 ; RV32I-NEXT: or a0, a2, a0 ; RV32I-NEXT: srli a2, a0, 1 ; RV32I-NEXT: lui a5, 349525 ; RV32I-NEXT: addi a5, a5, 1365 ; RV32I-NEXT: and a2, a2, a5 ; RV32I-NEXT: and a0, a0, a5 ; RV32I-NEXT: slli a0, a0, 1 ; RV32I-NEXT: or a0, a2, a0 ; RV32I-NEXT: srli a2, a1, 4 ; RV32I-NEXT: and a2, a2, a3 ; RV32I-NEXT: and a1, a1, a3 ; RV32I-NEXT: slli a1, a1, 4 ; RV32I-NEXT: or a1, a2, a1 ; RV32I-NEXT: srli a2, a1, 2 ; RV32I-NEXT: and a2, a2, a4 ; RV32I-NEXT: and a1, a1, a4 ; RV32I-NEXT: slli a1, a1, 2 ; RV32I-NEXT: or a1, a2, a1 ; RV32I-NEXT: srli a2, a1, 1 ; RV32I-NEXT: and a2, a2, a5 ; RV32I-NEXT: and a1, a1, a5 ; RV32I-NEXT: slli a1, a1, 1 ; RV32I-NEXT: or a1, a2, a1 ; RV32I-NEXT: ret ; ; RV64I-LABEL: test_bitreverse_bswap_i64: ; RV64I: # %bb.0: ; RV64I-NEXT: lui a1, %hi(.LCPI12_0) ; RV64I-NEXT: ld a1, %lo(.LCPI12_0)(a1) ; RV64I-NEXT: srli a2, a0, 4 ; RV64I-NEXT: and a2, a2, a1 ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: lui a1, %hi(.LCPI12_1) ; RV64I-NEXT: ld a1, %lo(.LCPI12_1)(a1) ; RV64I-NEXT: slli a0, a0, 4 ; RV64I-NEXT: or a0, a2, a0 ; RV64I-NEXT: srli a2, a0, 2 ; RV64I-NEXT: and a2, a2, a1 ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: lui a1, %hi(.LCPI12_2) ; RV64I-NEXT: ld a1, %lo(.LCPI12_2)(a1) ; RV64I-NEXT: slli a0, a0, 2 ; RV64I-NEXT: or a0, a2, a0 ; RV64I-NEXT: srli a2, a0, 1 ; RV64I-NEXT: and a2, a2, a1 ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: slli a0, a0, 1 ; RV64I-NEXT: or a0, a2, a0 ; RV64I-NEXT: ret ; ; RV32ZBB-LABEL: test_bitreverse_bswap_i64: ; RV32ZBB: # %bb.0: ; RV32ZBB-NEXT: srli a2, a0, 4 ; RV32ZBB-NEXT: lui a3, 61681 ; RV32ZBB-NEXT: addi a3, a3, -241 ; RV32ZBB-NEXT: and a2, a2, a3 ; RV32ZBB-NEXT: and a0, a0, a3 ; RV32ZBB-NEXT: slli a0, a0, 4 ; RV32ZBB-NEXT: or a0, a2, a0 ; RV32ZBB-NEXT: srli a2, a0, 2 ; RV32ZBB-NEXT: lui a4, 209715 ; RV32ZBB-NEXT: addi a4, a4, 819 ; RV32ZBB-NEXT: and a2, a2, a4 ; RV32ZBB-NEXT: and a0, a0, a4 ; RV32ZBB-NEXT: slli a0, a0, 2 ; RV32ZBB-NEXT: or a0, a2, a0 ; RV32ZBB-NEXT: srli a2, a0, 1 ; RV32ZBB-NEXT: lui a5, 349525 ; RV32ZBB-NEXT: addi a5, a5, 1365 ; RV32ZBB-NEXT: and a2, a2, a5 ; RV32ZBB-NEXT: and a0, a0, a5 ; RV32ZBB-NEXT: slli a0, a0, 1 ; RV32ZBB-NEXT: or a0, a2, a0 ; RV32ZBB-NEXT: srli a2, a1, 4 ; RV32ZBB-NEXT: and a2, a2, a3 ; RV32ZBB-NEXT: and a1, a1, a3 ; RV32ZBB-NEXT: slli a1, a1, 4 ; RV32ZBB-NEXT: or a1, a2, a1 ; RV32ZBB-NEXT: srli a2, a1, 2 ; RV32ZBB-NEXT: and a2, a2, a4 ; RV32ZBB-NEXT: and a1, a1, a4 ; RV32ZBB-NEXT: slli a1, a1, 2 ; RV32ZBB-NEXT: or a1, a2, a1 ; RV32ZBB-NEXT: srli a2, a1, 1 ; RV32ZBB-NEXT: and a2, a2, a5 ; RV32ZBB-NEXT: and a1, a1, a5 ; RV32ZBB-NEXT: slli a1, a1, 1 ; RV32ZBB-NEXT: or a1, a2, a1 ; RV32ZBB-NEXT: ret ; ; RV64ZBB-LABEL: test_bitreverse_bswap_i64: ; RV64ZBB: # %bb.0: ; RV64ZBB-NEXT: lui a1, %hi(.LCPI12_0) ; RV64ZBB-NEXT: ld a1, %lo(.LCPI12_0)(a1) ; RV64ZBB-NEXT: srli a2, a0, 4 ; RV64ZBB-NEXT: and a2, a2, a1 ; RV64ZBB-NEXT: and a0, a0, a1 ; RV64ZBB-NEXT: lui a1, %hi(.LCPI12_1) ; RV64ZBB-NEXT: ld a1, %lo(.LCPI12_1)(a1) ; RV64ZBB-NEXT: slli a0, a0, 4 ; RV64ZBB-NEXT: or a0, a2, a0 ; RV64ZBB-NEXT: srli a2, a0, 2 ; RV64ZBB-NEXT: and a2, a2, a1 ; RV64ZBB-NEXT: and a0, a0, a1 ; RV64ZBB-NEXT: lui a1, %hi(.LCPI12_2) ; RV64ZBB-NEXT: ld a1, %lo(.LCPI12_2)(a1) ; RV64ZBB-NEXT: slli a0, a0, 2 ; RV64ZBB-NEXT: or a0, a2, a0 ; RV64ZBB-NEXT: srli a2, a0, 1 ; RV64ZBB-NEXT: and a2, a2, a1 ; RV64ZBB-NEXT: and a0, a0, a1 ; RV64ZBB-NEXT: slli a0, a0, 1 ; RV64ZBB-NEXT: or a0, a2, a0 ; RV64ZBB-NEXT: ret ; ; RV32ZBKB-LABEL: test_bitreverse_bswap_i64: ; RV32ZBKB: # %bb.0: ; RV32ZBKB-NEXT: brev8 a0, a0 ; RV32ZBKB-NEXT: brev8 a1, a1 ; RV32ZBKB-NEXT: ret ; ; RV64ZBKB-LABEL: test_bitreverse_bswap_i64: ; RV64ZBKB: # %bb.0: ; RV64ZBKB-NEXT: brev8 a0, a0 ; RV64ZBKB-NEXT: ret ; ; RV32ZBP-LABEL: test_bitreverse_bswap_i64: ; RV32ZBP: # %bb.0: ; RV32ZBP-NEXT: rev.b a0, a0 ; RV32ZBP-NEXT: rev.b a1, a1 ; RV32ZBP-NEXT: ret ; ; RV64ZBP-LABEL: test_bitreverse_bswap_i64: ; RV64ZBP: # %bb.0: ; RV64ZBP-NEXT: rev.b a0, a0 ; RV64ZBP-NEXT: ret %tmp = call i64 @llvm.bitreverse.i64(i64 %a) %tmp2 = call i64 @llvm.bswap.i64(i64 %tmp) ret i64 %tmp2 } define i32 @pr55484(i32 %0) { ; RV32I-LABEL: pr55484: ; RV32I: # %bb.0: ; RV32I-NEXT: srli a1, a0, 8 ; RV32I-NEXT: slli a0, a0, 8 ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: slli a0, a0, 16 ; RV32I-NEXT: srai a0, a0, 16 ; RV32I-NEXT: ret ; ; RV64I-LABEL: pr55484: ; RV64I: # %bb.0: ; RV64I-NEXT: srli a1, a0, 8 ; RV64I-NEXT: slli a0, a0, 8 ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: slli a0, a0, 48 ; RV64I-NEXT: srai a0, a0, 48 ; RV64I-NEXT: ret ; ; RV32ZBB-LABEL: pr55484: ; RV32ZBB: # %bb.0: ; RV32ZBB-NEXT: srli a1, a0, 8 ; RV32ZBB-NEXT: slli a0, a0, 8 ; RV32ZBB-NEXT: or a0, a1, a0 ; RV32ZBB-NEXT: sext.h a0, a0 ; RV32ZBB-NEXT: ret ; ; RV64ZBB-LABEL: pr55484: ; RV64ZBB: # %bb.0: ; RV64ZBB-NEXT: srli a1, a0, 8 ; RV64ZBB-NEXT: slli a0, a0, 8 ; RV64ZBB-NEXT: or a0, a1, a0 ; RV64ZBB-NEXT: sext.h a0, a0 ; RV64ZBB-NEXT: ret ; ; RV32ZBKB-LABEL: pr55484: ; RV32ZBKB: # %bb.0: ; RV32ZBKB-NEXT: srli a1, a0, 8 ; RV32ZBKB-NEXT: slli a0, a0, 8 ; RV32ZBKB-NEXT: or a0, a1, a0 ; RV32ZBKB-NEXT: slli a0, a0, 16 ; RV32ZBKB-NEXT: srai a0, a0, 16 ; RV32ZBKB-NEXT: ret ; ; RV64ZBKB-LABEL: pr55484: ; RV64ZBKB: # %bb.0: ; RV64ZBKB-NEXT: srli a1, a0, 8 ; RV64ZBKB-NEXT: slli a0, a0, 8 ; RV64ZBKB-NEXT: or a0, a1, a0 ; RV64ZBKB-NEXT: slli a0, a0, 48 ; RV64ZBKB-NEXT: srai a0, a0, 48 ; RV64ZBKB-NEXT: ret ; ; RV32ZBP-LABEL: pr55484: ; RV32ZBP: # %bb.0: ; RV32ZBP-NEXT: srli a1, a0, 8 ; RV32ZBP-NEXT: slli a0, a0, 8 ; RV32ZBP-NEXT: or a0, a1, a0 ; RV32ZBP-NEXT: slli a0, a0, 16 ; RV32ZBP-NEXT: srai a0, a0, 16 ; RV32ZBP-NEXT: ret ; ; RV64ZBP-LABEL: pr55484: ; RV64ZBP: # %bb.0: ; RV64ZBP-NEXT: srli a1, a0, 8 ; RV64ZBP-NEXT: slli a0, a0, 8 ; RV64ZBP-NEXT: or a0, a1, a0 ; RV64ZBP-NEXT: slli a0, a0, 48 ; RV64ZBP-NEXT: srai a0, a0, 48 ; RV64ZBP-NEXT: ret %2 = lshr i32 %0, 8 %3 = shl i32 %0, 8 %4 = or i32 %2, %3 %5 = trunc i32 %4 to i16 %6 = sext i16 %5 to i32 ret i32 %6 }