@ RUN: llvm-mc -mcpu=cortex-a8 -triple thumb-unknown-unknown -show-encoding < %s | FileCheck %s
.code 16
@ CHECK: vshl.u8 d16, d17, d16 @ encoding: [0x40,0xff,0xa1,0x04]
vshl.u8 d16, d17, d16
@ CHECK: vshl.u16 d16, d17, d16 @ encoding: [0x50,0xff,0xa1,0x04]
vshl.u16 d16, d17, d16
@ CHECK: vshl.u32 d16, d17, d16 @ encoding: [0x60,0xff,0xa1,0x04]
vshl.u32 d16, d17, d16
@ CHECK: vshl.u64 d16, d17, d16 @ encoding: [0x70,0xff,0xa1,0x04]
vshl.u64 d16, d17, d16
@ CHECK: vshl.i8 d16, d16, vshl.i8 d16, d16, @ CHECK: vshl.i16 d16, d16, vshl.i16 d16, d16, @ CHECK: vshl.i32 d16, d16, vshl.i32 d16, d16, @ CHECK: vshl.i64 d16, d16, vshl.i64 d16, d16, @ CHECK: vshl.u8 q8, q9, q8 @ encoding: [0x40,0xff,0xe2,0x04]
vshl.u8 q8, q9, q8
@ CHECK: vshl.u16 q8, q9, q8 @ encoding: [0x50,0xff,0xe2,0x04]
vshl.u16 q8, q9, q8
@ CHECK: vshl.u32 q8, q9, q8 @ encoding: [0x60,0xff,0xe2,0x04]
vshl.u32 q8, q9, q8
@ CHECK: vshl.u64 q8, q9, q8 @ encoding: [0x70,0xff,0xe2,0x04]
vshl.u64 q8, q9, q8
@ CHECK: vshl.i8 q8, q8, vshl.i8 q8, q8, @ CHECK: vshl.i16 q8, q8, vshl.i16 q8, q8, @ CHECK: vshl.i32 q8, q8, vshl.i32 q8, q8, @ CHECK: vshl.i64 q8, q8, vshl.i64 q8, q8, @ CHECK: vshr.u8 d16, d16, vshr.u8 d16, d16, @ CHECK: vshr.u16 d16, d16, vshr.u16 d16, d16, @ CHECK: vshr.u32 d16, d16, vshr.u32 d16, d16, @ CHECK: vshr.u64 d16, d16, vshr.u64 d16, d16, @ CHECK: vshr.u8 q8, q8, vshr.u8 q8, q8, @ CHECK: vshr.u16 q8, q8, vshr.u16 q8, q8, @ CHECK: vshr.u32 q8, q8, vshr.u32 q8, q8, @ CHECK: vshr.u64 q8, q8, vshr.u64 q8, q8, @ CHECK: vshr.s8 d16, d16, vshr.s8 d16, d16, @ CHECK: vshr.s16 d16, d16, vshr.s16 d16, d16, @ CHECK: vshr.s32 d16, d16, vshr.s32 d16, d16, @ CHECK: vshr.s64 d16, d16, vshr.s64 d16, d16, @ CHECK: vshr.s8 q8, q8, vshr.s8 q8, q8, @ CHECK: vshr.s16 q8, q8, vshr.s16 q8, q8, @ CHECK: vshr.s32 q8, q8, vshr.s32 q8, q8, @ CHECK: vshr.s64 q8, q8, vshr.s64 q8, q8, @ CHECK: vshll.s8 q8, d16, vshll.s8 q8, d16, @ CHECK: vshll.s16 q8, d16, vshll.s16 q8, d16, @ CHECK: vshll.s32 q8, d16, vshll.s32 q8, d16, @ CHECK: vshll.u8 q8, d16, vshll.u8 q8, d16, @ CHECK: vshll.u16 q8, d16, vshll.u16 q8, d16, @ CHECK: vshll.u32 q8, d16, vshll.u32 q8, d16, @ CHECK: vshll.i8 q8, d16, vshll.i8 q8, d16, @ CHECK: vshll.i16 q8, d16, vshll.i16 q8, d16, @ CHECK: vshll.i32 q8, d16, vshll.i32 q8, d16, @ CHECK: vshrn.i16 d16, q8, vshrn.i16 d16, q8, @ CHECK: vshrn.i32 d16, q8, vshrn.i32 d16, q8, @ CHECK: vshrn.i64 d16, q8, vshrn.i64 d16, q8, @ CHECK: vrshl.s8 d16, d17, d16 @ encoding: [0x40,0xef,0xa1,0x05]
vrshl.s8 d16, d17, d16
@ CHECK: vrshl.s16 d16, d17, d16 @ encoding: [0x50,0xef,0xa1,0x05]
vrshl.s16 d16, d17, d16
@ CHECK: vrshl.s32 d16, d17, d16 @ encoding: [0x60,0xef,0xa1,0x05]
vrshl.s32 d16, d17, d16
@ CHECK: vrshl.s64 d16, d17, d16 @ encoding: [0x70,0xef,0xa1,0x05]
vrshl.s64 d16, d17, d16
@ CHECK: vrshl.u8 d16, d17, d16 @ encoding: [0x40,0xff,0xa1,0x05]
vrshl.u8 d16, d17, d16
@ CHECK: vrshl.u16 d16, d17, d16 @ encoding: [0x50,0xff,0xa1,0x05]
vrshl.u16 d16, d17, d16
@ CHECK: vrshl.u32 d16, d17, d16 @ encoding: [0x60,0xff,0xa1,0x05]
vrshl.u32 d16, d17, d16
@ CHECK: vrshl.u64 d16, d17, d16 @ encoding: [0x70,0xff,0xa1,0x05]
vrshl.u64 d16, d17, d16
@ CHECK: vrshl.s8 q8, q9, q8 @ encoding: [0x40,0xef,0xe2,0x05]
vrshl.s8 q8, q9, q8
@ CHECK: vrshl.s16 q8, q9, q8 @ encoding: [0x50,0xef,0xe2,0x05]
vrshl.s16 q8, q9, q8
@ CHECK: vrshl.s32 q8, q9, q8 @ encoding: [0x60,0xef,0xe2,0x05]
vrshl.s32 q8, q9, q8
@ CHECK: vrshl.s64 q8, q9, q8 @ encoding: [0x70,0xef,0xe2,0x05]
vrshl.s64 q8, q9, q8
@ CHECK: vrshl.u8 q8, q9, q8 @ encoding: [0x40,0xff,0xe2,0x05]
vrshl.u8 q8, q9, q8
@ CHECK: vrshl.u16 q8, q9, q8 @ encoding: [0x50,0xff,0xe2,0x05]
vrshl.u16 q8, q9, q8
@ CHECK: vrshl.u32 q8, q9, q8 @ encoding: [0x60,0xff,0xe2,0x05]
vrshl.u32 q8, q9, q8
@ CHECK: vrshl.u64 q8, q9, q8 @ encoding: [0x70,0xff,0xe2,0x05]
vrshl.u64 q8, q9, q8
@ CHECK: vrshr.s8 d16, d16, vrshr.s8 d16, d16, @ CHECK: vrshr.s16 d16, d16, vrshr.s16 d16, d16, @ CHECK: vrshr.s32 d16, d16, vrshr.s32 d16, d16, @ CHECK: vrshr.s64 d16, d16, vrshr.s64 d16, d16, @ CHECK: vrshr.u8 d16, d16, vrshr.u8 d16, d16, @ CHECK: vrshr.u16 d16, d16, vrshr.u16 d16, d16, @ CHECK: vrshr.u32 d16, d16, vrshr.u32 d16, d16, @ CHECK: vrshr.u64 d16, d16, vrshr.u64 d16, d16, @ CHECK: vrshr.s8 q8, q8, vrshr.s8 q8, q8, @ CHECK: vrshr.s16 q8, q8, vrshr.s16 q8, q8, @ CHECK: vrshr.s32 q8, q8, vrshr.s32 q8, q8, @ CHECK: vrshr.s64 q8, q8, vrshr.s64 q8, q8, @ CHECK: vrshr.u8 q8, q8, vrshr.u8 q8, q8, @ CHECK: vrshr.u16 q8, q8, vrshr.u16 q8, q8, @ CHECK: vrshr.u32 q8, q8, vrshr.u32 q8, q8, @ CHECK: vrshr.u64 q8, q8, vrshr.u64 q8, q8, @ CHECK: vrshrn.i16 d16, q8, vrshrn.i16 d16, q8, @ CHECK: vrshrn.i32 d16, q8, vrshrn.i32 d16, q8, @ CHECK: vrshrn.i64 d16, q8, vrshrn.i64 d16, q8,