; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc < %s -mtriple=i686-unknown-unknown | FileCheck %s --check-prefix=X86 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown | FileCheck %s --check-prefix=X64 define i129 @udiv129(i129 %a, i129 %b) nounwind { ; X86-LABEL: udiv129: ; X86: # %bb.0: ; X86-NEXT: pushl %ebp ; X86-NEXT: movl %esp, %ebp ; X86-NEXT: pushl %ebx ; X86-NEXT: pushl %edi ; X86-NEXT: pushl %esi ; X86-NEXT: andl $-8, %esp ; X86-NEXT: subl $104, %esp ; X86-NEXT: movl 24(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 20(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 16(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 12(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 40(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 44(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 32(%ebp), %eax ; X86-NEXT: movl %eax, (%esp) ; X86-NEXT: movl 36(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 28(%ebp), %eax ; X86-NEXT: andl $1, %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 48(%ebp), %eax ; X86-NEXT: andl $1, %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 8(%ebp), %esi ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl %esp, %eax ; X86-NEXT: leal {{[0-9]+}}(%esp), %ecx ; X86-NEXT: leal {{[0-9]+}}(%esp), %edx ; X86-NEXT: pushl $256 # imm = 0x100 ; X86-NEXT: pushl %eax ; X86-NEXT: pushl %ecx ; X86-NEXT: pushl %edx ; X86-NEXT: calll __udivei4 ; X86-NEXT: addl $16, %esp ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx ; X86-NEXT: movl %ebx, 12(%esi) ; X86-NEXT: movl %edi, 8(%esi) ; X86-NEXT: movl %edx, 4(%esi) ; X86-NEXT: movl %ecx, (%esi) ; X86-NEXT: andl $1, %eax ; X86-NEXT: movb %al, 16(%esi) ; X86-NEXT: movl %esi, %eax ; X86-NEXT: leal -12(%ebp), %esp ; X86-NEXT: popl %esi ; X86-NEXT: popl %edi ; X86-NEXT: popl %ebx ; X86-NEXT: popl %ebp ; X86-NEXT: retl $4 ; ; X64-LABEL: udiv129: ; X64: # %bb.0: ; X64-NEXT: subq $104, %rsp ; X64-NEXT: andl $1, %r9d ; X64-NEXT: andl $1, %edx ; X64-NEXT: movq %rsi, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rdi, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rcx, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r8, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rdx, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r9, {{[0-9]+}}(%rsp) ; X64-NEXT: movq $0, {{[0-9]+}}(%rsp) ; X64-NEXT: movq $0, {{[0-9]+}}(%rsp) ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rdi ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rsi ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rdx ; X64-NEXT: movl $256, %ecx # imm = 0x100 ; X64-NEXT: callq __udivei4@PLT ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rcx ; X64-NEXT: andl $1, %ecx ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rdx ; X64-NEXT: addq $104, %rsp ; X64-NEXT: retq %res = udiv i129 %a, %b ret i129 %res } define i129 @urem129(i129 %a, i129 %b) nounwind { ; X86-LABEL: urem129: ; X86: # %bb.0: ; X86-NEXT: pushl %ebp ; X86-NEXT: movl %esp, %ebp ; X86-NEXT: pushl %ebx ; X86-NEXT: pushl %edi ; X86-NEXT: pushl %esi ; X86-NEXT: andl $-8, %esp ; X86-NEXT: subl $104, %esp ; X86-NEXT: movl 24(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 20(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 16(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 12(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 40(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 44(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 32(%ebp), %eax ; X86-NEXT: movl %eax, (%esp) ; X86-NEXT: movl 36(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 28(%ebp), %eax ; X86-NEXT: andl $1, %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 48(%ebp), %eax ; X86-NEXT: andl $1, %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 8(%ebp), %esi ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl %esp, %eax ; X86-NEXT: leal {{[0-9]+}}(%esp), %ecx ; X86-NEXT: leal {{[0-9]+}}(%esp), %edx ; X86-NEXT: pushl $256 # imm = 0x100 ; X86-NEXT: pushl %eax ; X86-NEXT: pushl %ecx ; X86-NEXT: pushl %edx ; X86-NEXT: calll __umodei4 ; X86-NEXT: addl $16, %esp ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx ; X86-NEXT: movl %ebx, 12(%esi) ; X86-NEXT: movl %edi, 8(%esi) ; X86-NEXT: movl %edx, 4(%esi) ; X86-NEXT: movl %ecx, (%esi) ; X86-NEXT: andl $1, %eax ; X86-NEXT: movb %al, 16(%esi) ; X86-NEXT: movl %esi, %eax ; X86-NEXT: leal -12(%ebp), %esp ; X86-NEXT: popl %esi ; X86-NEXT: popl %edi ; X86-NEXT: popl %ebx ; X86-NEXT: popl %ebp ; X86-NEXT: retl $4 ; ; X64-LABEL: urem129: ; X64: # %bb.0: ; X64-NEXT: subq $104, %rsp ; X64-NEXT: andl $1, %r9d ; X64-NEXT: andl $1, %edx ; X64-NEXT: movq %rsi, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rdi, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rcx, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r8, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rdx, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r9, {{[0-9]+}}(%rsp) ; X64-NEXT: movq $0, {{[0-9]+}}(%rsp) ; X64-NEXT: movq $0, {{[0-9]+}}(%rsp) ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rdi ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rsi ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rdx ; X64-NEXT: movl $256, %ecx # imm = 0x100 ; X64-NEXT: callq __umodei4@PLT ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rcx ; X64-NEXT: andl $1, %ecx ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rdx ; X64-NEXT: addq $104, %rsp ; X64-NEXT: retq %res = urem i129 %a, %b ret i129 %res } define i129 @sdiv129(i129 %a, i129 %b) nounwind { ; X86-LABEL: sdiv129: ; X86: # %bb.0: ; X86-NEXT: pushl %ebp ; X86-NEXT: movl %esp, %ebp ; X86-NEXT: pushl %ebx ; X86-NEXT: pushl %edi ; X86-NEXT: pushl %esi ; X86-NEXT: andl $-8, %esp ; X86-NEXT: subl $104, %esp ; X86-NEXT: movl 24(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 20(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 16(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 12(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 40(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 44(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 32(%ebp), %eax ; X86-NEXT: movl %eax, (%esp) ; X86-NEXT: movl 36(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 28(%ebp), %eax ; X86-NEXT: andl $1, %eax ; X86-NEXT: negl %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 48(%ebp), %eax ; X86-NEXT: andl $1, %eax ; X86-NEXT: negl %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 8(%ebp), %esi ; X86-NEXT: movl %esp, %eax ; X86-NEXT: leal {{[0-9]+}}(%esp), %ecx ; X86-NEXT: leal {{[0-9]+}}(%esp), %edx ; X86-NEXT: pushl $256 # imm = 0x100 ; X86-NEXT: pushl %eax ; X86-NEXT: pushl %ecx ; X86-NEXT: pushl %edx ; X86-NEXT: calll __divei4 ; X86-NEXT: addl $16, %esp ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx ; X86-NEXT: movl %ebx, 12(%esi) ; X86-NEXT: movl %edi, 8(%esi) ; X86-NEXT: movl %edx, 4(%esi) ; X86-NEXT: movl %ecx, (%esi) ; X86-NEXT: andl $1, %eax ; X86-NEXT: movb %al, 16(%esi) ; X86-NEXT: movl %esi, %eax ; X86-NEXT: leal -12(%ebp), %esp ; X86-NEXT: popl %esi ; X86-NEXT: popl %edi ; X86-NEXT: popl %ebx ; X86-NEXT: popl %ebp ; X86-NEXT: retl $4 ; ; X64-LABEL: sdiv129: ; X64: # %bb.0: ; X64-NEXT: subq $104, %rsp ; X64-NEXT: andl $1, %r9d ; X64-NEXT: negq %r9 ; X64-NEXT: andl $1, %edx ; X64-NEXT: negq %rdx ; X64-NEXT: movq %rsi, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rdi, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rcx, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r8, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rdx, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rdx, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r9, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r9, {{[0-9]+}}(%rsp) ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rdi ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rsi ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rdx ; X64-NEXT: movl $256, %ecx # imm = 0x100 ; X64-NEXT: callq __divei4@PLT ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rcx ; X64-NEXT: andl $1, %ecx ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rdx ; X64-NEXT: addq $104, %rsp ; X64-NEXT: retq %res = sdiv i129 %a, %b ret i129 %res } define i129 @srem129(i129 %a, i129 %b) nounwind { ; X86-LABEL: srem129: ; X86: # %bb.0: ; X86-NEXT: pushl %ebp ; X86-NEXT: movl %esp, %ebp ; X86-NEXT: pushl %ebx ; X86-NEXT: pushl %edi ; X86-NEXT: pushl %esi ; X86-NEXT: andl $-8, %esp ; X86-NEXT: subl $104, %esp ; X86-NEXT: movl 24(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 20(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 16(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 12(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 40(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 44(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 32(%ebp), %eax ; X86-NEXT: movl %eax, (%esp) ; X86-NEXT: movl 36(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 28(%ebp), %eax ; X86-NEXT: andl $1, %eax ; X86-NEXT: negl %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 48(%ebp), %eax ; X86-NEXT: andl $1, %eax ; X86-NEXT: negl %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 8(%ebp), %esi ; X86-NEXT: movl %esp, %eax ; X86-NEXT: leal {{[0-9]+}}(%esp), %ecx ; X86-NEXT: leal {{[0-9]+}}(%esp), %edx ; X86-NEXT: pushl $256 # imm = 0x100 ; X86-NEXT: pushl %eax ; X86-NEXT: pushl %ecx ; X86-NEXT: pushl %edx ; X86-NEXT: calll __modei4 ; X86-NEXT: addl $16, %esp ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx ; X86-NEXT: movl %ebx, 12(%esi) ; X86-NEXT: movl %edi, 8(%esi) ; X86-NEXT: movl %edx, 4(%esi) ; X86-NEXT: movl %ecx, (%esi) ; X86-NEXT: andl $1, %eax ; X86-NEXT: movb %al, 16(%esi) ; X86-NEXT: movl %esi, %eax ; X86-NEXT: leal -12(%ebp), %esp ; X86-NEXT: popl %esi ; X86-NEXT: popl %edi ; X86-NEXT: popl %ebx ; X86-NEXT: popl %ebp ; X86-NEXT: retl $4 ; ; X64-LABEL: srem129: ; X64: # %bb.0: ; X64-NEXT: subq $104, %rsp ; X64-NEXT: andl $1, %r9d ; X64-NEXT: negq %r9 ; X64-NEXT: andl $1, %edx ; X64-NEXT: negq %rdx ; X64-NEXT: movq %rsi, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rdi, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rcx, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r8, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rdx, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rdx, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r9, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r9, {{[0-9]+}}(%rsp) ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rdi ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rsi ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rdx ; X64-NEXT: movl $256, %ecx # imm = 0x100 ; X64-NEXT: callq __modei4@PLT ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rcx ; X64-NEXT: andl $1, %ecx ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rdx ; X64-NEXT: addq $104, %rsp ; X64-NEXT: retq %res = srem i129 %a, %b ret i129 %res } ; Some higher sizes define i257 @sdiv257(i257 %a, i257 %b) nounwind { ; X86-LABEL: sdiv257: ; X86: # %bb.0: ; X86-NEXT: pushl %ebp ; X86-NEXT: movl %esp, %ebp ; X86-NEXT: pushl %ebx ; X86-NEXT: pushl %edi ; X86-NEXT: pushl %esi ; X86-NEXT: andl $-8, %esp ; X86-NEXT: subl $216, %esp ; X86-NEXT: movl 40(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 36(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 32(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 28(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 24(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 20(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 16(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 12(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 72(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 76(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 64(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 68(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 56(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 60(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 48(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 52(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 44(%ebp), %eax ; X86-NEXT: andl $1, %eax ; X86-NEXT: negl %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 80(%ebp), %eax ; X86-NEXT: andl $1, %eax ; X86-NEXT: negl %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 8(%ebp), %esi ; X86-NEXT: leal {{[0-9]+}}(%esp), %eax ; X86-NEXT: leal {{[0-9]+}}(%esp), %ecx ; X86-NEXT: leal {{[0-9]+}}(%esp), %edx ; X86-NEXT: pushl $512 # imm = 0x200 ; X86-NEXT: pushl %eax ; X86-NEXT: pushl %ecx ; X86-NEXT: pushl %edx ; X86-NEXT: calll __divei4 ; X86-NEXT: addl $16, %esp ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl %ecx, (%esp) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx ; X86-NEXT: movl %ebx, 28(%esi) ; X86-NEXT: movl %ecx, 24(%esi) ; X86-NEXT: movl %edx, 20(%esi) ; X86-NEXT: movl %edi, 16(%esi) ; X86-NEXT: movl (%esp), %ecx # 4-byte Reload ; X86-NEXT: movl %ecx, 12(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload ; X86-NEXT: movl %ecx, 8(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload ; X86-NEXT: movl %ecx, 4(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload ; X86-NEXT: movl %ecx, (%esi) ; X86-NEXT: andl $1, %eax ; X86-NEXT: movb %al, 32(%esi) ; X86-NEXT: movl %esi, %eax ; X86-NEXT: leal -12(%ebp), %esp ; X86-NEXT: popl %esi ; X86-NEXT: popl %edi ; X86-NEXT: popl %ebx ; X86-NEXT: popl %ebp ; X86-NEXT: retl $4 ; ; X64-LABEL: sdiv257: ; X64: # %bb.0: ; X64-NEXT: pushq %r14 ; X64-NEXT: pushq %rbx ; X64-NEXT: subq $200, %rsp ; X64-NEXT: movq %rdi, %rbx ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: andl $1, %eax ; X64-NEXT: negq %rax ; X64-NEXT: andl $1, %r9d ; X64-NEXT: negq %r9 ; X64-NEXT: movq {{[0-9]+}}(%rsp), %r11 ; X64-NEXT: movq {{[0-9]+}}(%rsp), %r10 ; X64-NEXT: movq {{[0-9]+}}(%rsp), %r14 ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rdi ; X64-NEXT: movq %r8, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rcx, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rdx, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rsi, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rdi, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r14, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r11, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r10, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r9, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r9, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r9, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r9, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rdi ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rsi ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rdx ; X64-NEXT: movl $512, %ecx # imm = 0x200 ; X64-NEXT: callq __divei4@PLT ; X64-NEXT: movl {{[0-9]+}}(%rsp), %eax ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rcx ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rdx ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rsi ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rdi ; X64-NEXT: movq %rdi, 24(%rbx) ; X64-NEXT: movq %rsi, 16(%rbx) ; X64-NEXT: movq %rdx, 8(%rbx) ; X64-NEXT: movq %rcx, (%rbx) ; X64-NEXT: andl $1, %eax ; X64-NEXT: movb %al, 32(%rbx) ; X64-NEXT: movq %rbx, %rax ; X64-NEXT: addq $200, %rsp ; X64-NEXT: popq %rbx ; X64-NEXT: popq %r14 ; X64-NEXT: retq %res = sdiv i257 %a, %b ret i257 %res } define i1001 @srem1001(i1001 %a, i1001 %b) nounwind { ; X86-LABEL: srem1001: ; X86: # %bb.0: ; X86-NEXT: pushl %ebp ; X86-NEXT: movl %esp, %ebp ; X86-NEXT: pushl %ebx ; X86-NEXT: pushl %edi ; X86-NEXT: pushl %esi ; X86-NEXT: andl $-8, %esp ; X86-NEXT: subl $496, %esp # imm = 0x1F0 ; X86-NEXT: movl 132(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 128(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 124(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 120(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 116(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 112(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 108(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 104(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 100(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 96(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 92(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 88(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 84(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 80(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 76(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 72(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 68(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 64(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 60(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 56(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 52(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 48(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 44(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 40(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 36(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 32(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 28(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 24(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 20(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 16(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 12(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 136(%ebp), %eax ; X86-NEXT: shll $23, %eax ; X86-NEXT: sarl $23, %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 264(%ebp), %eax ; X86-NEXT: shll $23, %eax ; X86-NEXT: sarl $23, %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 260(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 256(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 252(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 248(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 244(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 240(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 236(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 232(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 228(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 224(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 220(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 216(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 212(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 208(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 204(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 200(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 196(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 192(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 188(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 184(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 180(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 176(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 172(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 168(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 164(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 160(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 156(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 152(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 148(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 144(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 140(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 8(%ebp), %esi ; X86-NEXT: leal {{[0-9]+}}(%esp), %eax ; X86-NEXT: leal {{[0-9]+}}(%esp), %ecx ; X86-NEXT: leal {{[0-9]+}}(%esp), %edx ; X86-NEXT: pushl $1024 # imm = 0x400 ; X86-NEXT: pushl %eax ; X86-NEXT: pushl %ecx ; X86-NEXT: pushl %edx ; X86-NEXT: calll __modei4 ; X86-NEXT: addl $16, %esp ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl %eax, (%esp) # 4-byte Spill ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx ; X86-NEXT: movl %ebx, 120(%esi) ; X86-NEXT: movl %eax, 116(%esi) ; X86-NEXT: movl %ecx, 112(%esi) ; X86-NEXT: movl %edx, 108(%esi) ; X86-NEXT: movl %edi, 104(%esi) ; X86-NEXT: movl (%esp), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 100(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 96(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 92(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 88(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 84(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 80(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 76(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 72(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 68(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 64(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 60(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 56(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 52(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 48(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 44(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 40(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 36(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 32(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 28(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 24(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 20(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 16(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 12(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 8(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, 4(%esi) ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload ; X86-NEXT: movl %eax, (%esi) ; X86-NEXT: movl $511, %eax # imm = 0x1FF ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movw %ax, 124(%esi) ; X86-NEXT: movl %esi, %eax ; X86-NEXT: leal -12(%ebp), %esp ; X86-NEXT: popl %esi ; X86-NEXT: popl %edi ; X86-NEXT: popl %ebx ; X86-NEXT: popl %ebp ; X86-NEXT: retl $4 ; ; X64-LABEL: srem1001: ; X64: # %bb.0: ; X64-NEXT: pushq %rbp ; X64-NEXT: pushq %r15 ; X64-NEXT: pushq %r14 ; X64-NEXT: pushq %r13 ; X64-NEXT: pushq %r12 ; X64-NEXT: pushq %rbx ; X64-NEXT: subq $408, %rsp # imm = 0x198 ; X64-NEXT: movq %rdi, %rbx ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r9, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r8, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rcx, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rdx, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rsi, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: shlq $23, %rax ; X64-NEXT: sarq $23, %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: shlq $23, %rax ; X64-NEXT: sarq $23, %rax ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rdi ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rsi ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rdx ; X64-NEXT: movl $1024, %ecx # imm = 0x400 ; X64-NEXT: callq __modei4@PLT ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rcx ; X64-NEXT: movq %rcx, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rcx ; X64-NEXT: movq %rcx, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill ; X64-NEXT: movq {{[0-9]+}}(%rsp), %r10 ; X64-NEXT: movq {{[0-9]+}}(%rsp), %r11 ; X64-NEXT: movq {{[0-9]+}}(%rsp), %r14 ; X64-NEXT: movq {{[0-9]+}}(%rsp), %r15 ; X64-NEXT: movq {{[0-9]+}}(%rsp), %r12 ; X64-NEXT: movq {{[0-9]+}}(%rsp), %r13 ; X64-NEXT: movq {{[0-9]+}}(%rsp), %r8 ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rdx ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rsi ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rdi ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rbp ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rcx ; X64-NEXT: movq {{[0-9]+}}(%rsp), %r9 ; X64-NEXT: movq %r9, 112(%rbx) ; X64-NEXT: movq %rcx, 104(%rbx) ; X64-NEXT: movq %rbp, 96(%rbx) ; X64-NEXT: movq %rdi, 88(%rbx) ; X64-NEXT: movq %rsi, 80(%rbx) ; X64-NEXT: movq %rdx, 72(%rbx) ; X64-NEXT: movq %r8, 64(%rbx) ; X64-NEXT: movq %r13, 56(%rbx) ; X64-NEXT: movq %r12, 48(%rbx) ; X64-NEXT: movq %r15, 40(%rbx) ; X64-NEXT: movq %r14, 32(%rbx) ; X64-NEXT: movq %r11, 24(%rbx) ; X64-NEXT: movq %r10, 16(%rbx) ; X64-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rcx # 8-byte Reload ; X64-NEXT: movq %rcx, 8(%rbx) ; X64-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rcx # 8-byte Reload ; X64-NEXT: movq %rcx, (%rbx) ; X64-NEXT: movl %eax, 120(%rbx) ; X64-NEXT: shrq $32, %rax ; X64-NEXT: andl $511, %eax # imm = 0x1FF ; X64-NEXT: movw %ax, 124(%rbx) ; X64-NEXT: movq %rbx, %rax ; X64-NEXT: addq $408, %rsp # imm = 0x198 ; X64-NEXT: popq %rbx ; X64-NEXT: popq %r12 ; X64-NEXT: popq %r13 ; X64-NEXT: popq %r14 ; X64-NEXT: popq %r15 ; X64-NEXT: popq %rbp ; X64-NEXT: retq %res = srem i1001 %a, %b ret i1001 %res } define i129 @chain129(i129 %a, i129 %b) nounwind { ; X86-LABEL: chain129: ; X86: # %bb.0: ; X86-NEXT: pushl %ebp ; X86-NEXT: movl %esp, %ebp ; X86-NEXT: pushl %ebx ; X86-NEXT: pushl %edi ; X86-NEXT: pushl %esi ; X86-NEXT: andl $-8, %esp ; X86-NEXT: subl $200, %esp ; X86-NEXT: movl 24(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 20(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 16(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 12(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 40(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 44(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 32(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 36(%ebp), %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 28(%ebp), %eax ; X86-NEXT: andl $1, %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 48(%ebp), %eax ; X86-NEXT: andl $1, %eax ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl 8(%ebp), %esi ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: leal {{[0-9]+}}(%esp), %eax ; X86-NEXT: leal {{[0-9]+}}(%esp), %ecx ; X86-NEXT: leal {{[0-9]+}}(%esp), %edx ; X86-NEXT: pushl $256 # imm = 0x100 ; X86-NEXT: pushl %eax ; X86-NEXT: pushl %ecx ; X86-NEXT: pushl %edx ; X86-NEXT: calll __udivei4 ; X86-NEXT: addl $16, %esp ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: andl $1, %eax ; X86-NEXT: negl %eax ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx ; X86-NEXT: movl %ebx, {{[0-9]+}}(%esp) ; X86-NEXT: movl %edi, {{[0-9]+}}(%esp) ; X86-NEXT: movl %edx, {{[0-9]+}}(%esp) ; X86-NEXT: movl %ecx, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl $17, (%esp) ; X86-NEXT: movl $0, {{[0-9]+}}(%esp) ; X86-NEXT: movl %esp, %eax ; X86-NEXT: leal {{[0-9]+}}(%esp), %ecx ; X86-NEXT: leal {{[0-9]+}}(%esp), %edx ; X86-NEXT: pushl $256 # imm = 0x100 ; X86-NEXT: pushl %eax ; X86-NEXT: pushl %ecx ; X86-NEXT: pushl %edx ; X86-NEXT: calll __divei4 ; X86-NEXT: addl $16, %esp ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx ; X86-NEXT: movl %ebx, 12(%esi) ; X86-NEXT: movl %edi, 8(%esi) ; X86-NEXT: movl %edx, 4(%esi) ; X86-NEXT: movl %ecx, (%esi) ; X86-NEXT: andl $1, %eax ; X86-NEXT: movb %al, 16(%esi) ; X86-NEXT: movl %esi, %eax ; X86-NEXT: leal -12(%ebp), %esp ; X86-NEXT: popl %esi ; X86-NEXT: popl %edi ; X86-NEXT: popl %ebx ; X86-NEXT: popl %ebp ; X86-NEXT: retl $4 ; ; X64-LABEL: chain129: ; X64: # %bb.0: ; X64-NEXT: subq $200, %rsp ; X64-NEXT: andl $1, %r9d ; X64-NEXT: andl $1, %edx ; X64-NEXT: movq %rsi, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rdi, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rcx, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r8, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rdx, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %r9, {{[0-9]+}}(%rsp) ; X64-NEXT: movq $0, {{[0-9]+}}(%rsp) ; X64-NEXT: movq $0, {{[0-9]+}}(%rsp) ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rdi ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rsi ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rdx ; X64-NEXT: movl $256, %ecx # imm = 0x100 ; X64-NEXT: callq __udivei4@PLT ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: andl $1, %eax ; X64-NEXT: negq %rax ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rcx ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rdx ; X64-NEXT: movq %rdx, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rcx, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq %rax, {{[0-9]+}}(%rsp) ; X64-NEXT: movq $0, {{[0-9]+}}(%rsp) ; X64-NEXT: movq $0, {{[0-9]+}}(%rsp) ; X64-NEXT: movq $17, {{[0-9]+}}(%rsp) ; X64-NEXT: movq $0, {{[0-9]+}}(%rsp) ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rdi ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rsi ; X64-NEXT: leaq {{[0-9]+}}(%rsp), %rdx ; X64-NEXT: movl $256, %ecx # imm = 0x100 ; X64-NEXT: callq __divei4@PLT ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rcx ; X64-NEXT: andl $1, %ecx ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rax ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rdx ; X64-NEXT: addq $200, %rsp ; X64-NEXT: retq %res = udiv i129 %a, %b %res2 = sdiv i129 %res, 17 ret i129 %res2 }