mirror of
https://git.proxmox.com/git/mirror_zfs.git
synced 2024-12-27 19:39:35 +03:00
a3600a106d
If there is no explicit note in the .S files, the obj file will mark it as requiring an executable stack. This is unneeded and causes issues on hardened systems. More info: https://wiki.gentoo.org/wiki/Hardened/GNU_stack_quickstart Signed-off-by: Jason Zaman <jason@perfinion.com> Signed-off-by: Brian Behlendorf <behlendorf1@llnl.gov> Closes #4947 Closes #4962
1351 lines
24 KiB
ArmAsm
1351 lines
24 KiB
ArmAsm
/*
|
|
* !/usr/bin/env perl
|
|
*
|
|
* ====================================================================
|
|
* Written by Andy Polyakov <appro@fy.chalmers.se> for the OpenSSL
|
|
* project. The module is, however, dual licensed under OpenSSL and
|
|
* CRYPTOGAMS licenses depending on where you obtain it. For further
|
|
* details see http://www.openssl.org/~appro/cryptogams/.
|
|
* ====================================================================
|
|
*
|
|
* sha1_block procedure for x86_64.
|
|
*
|
|
* It was brought to my attention that on EM64T compiler-generated code
|
|
* was far behind 32-bit assembler implementation. This is unlike on
|
|
* Opteron where compiler-generated code was only 15% behind 32-bit
|
|
* assembler, which originally made it hard to motivate the effort.
|
|
* There was suggestion to mechanically translate 32-bit code, but I
|
|
* dismissed it, reasoning that x86_64 offers enough register bank
|
|
* capacity to fully utilize SHA-1 parallelism. Therefore this fresh
|
|
* implementation:-) However! While 64-bit code does performs better
|
|
* on Opteron, I failed to beat 32-bit assembler on EM64T core. Well,
|
|
* x86_64 does offer larger *addressable* bank, but out-of-order core
|
|
* reaches for even more registers through dynamic aliasing, and EM64T
|
|
* core must have managed to run-time optimize even 32-bit code just as
|
|
* good as 64-bit one. Performance improvement is summarized in the
|
|
* following table:
|
|
*
|
|
* gcc 3.4 32-bit asm cycles/byte
|
|
* Opteron +45% +20% 6.8
|
|
* Xeon P4 +65% +0% 9.9
|
|
* Core2 +60% +10% 7.0
|
|
*
|
|
*
|
|
* OpenSolaris OS modifications
|
|
*
|
|
* Sun elects to use this software under the BSD license.
|
|
*
|
|
* This source originates from OpenSSL file sha1-x86_64.pl at
|
|
* ftp://ftp.openssl.org/snapshot/openssl-0.9.8-stable-SNAP-20080131.tar.gz
|
|
* (presumably for future OpenSSL release 0.9.8h), with these changes:
|
|
*
|
|
* 1. Added perl "use strict" and declared variables.
|
|
*
|
|
* 2. Added OpenSolaris ENTRY_NP/SET_SIZE macros from
|
|
* /usr/include/sys/asm_linkage.h, .ident keywords, and lint(1B) guards.
|
|
*
|
|
* 3. Removed x86_64-xlate.pl script (not needed for as(1) or gas(1)
|
|
* assemblers).
|
|
*
|
|
*/
|
|
|
|
/*
|
|
* This file was generated by a perl script (sha1-x86_64.pl). The comments from
|
|
* the original file have been pasted above.
|
|
*/
|
|
|
|
#if defined(lint) || defined(__lint)
|
|
#include <sys/stdint.h>
|
|
#include <sys/sha1.h>
|
|
|
|
/* ARGSUSED */
|
|
void
|
|
sha1_block_data_order(SHA1_CTX *ctx, const void *inpp, size_t blocks)
|
|
{
|
|
}
|
|
|
|
#else
|
|
#define _ASM
|
|
#include <sys/asm_linkage.h>
|
|
ENTRY_NP(sha1_block_data_order)
|
|
push %rbx
|
|
push %rbp
|
|
push %r12
|
|
mov %rsp,%rax
|
|
mov %rdi,%r8 # reassigned argument
|
|
sub $72,%rsp
|
|
mov %rsi,%r9 # reassigned argument
|
|
and $-64,%rsp
|
|
mov %rdx,%r10 # reassigned argument
|
|
mov %rax,64(%rsp)
|
|
|
|
mov 0(%r8),%edx
|
|
mov 4(%r8),%esi
|
|
mov 8(%r8),%edi
|
|
mov 12(%r8),%ebp
|
|
mov 16(%r8),%r11d
|
|
.align 4
|
|
.Lloop:
|
|
mov 0(%r9),%eax
|
|
bswap %eax
|
|
mov %eax,0(%rsp)
|
|
lea 0x5a827999(%eax,%r11d),%r12d
|
|
mov %edi,%ebx
|
|
mov 4(%r9),%eax
|
|
mov %edx,%r11d
|
|
xor %ebp,%ebx
|
|
bswap %eax
|
|
rol $5,%r11d
|
|
and %esi,%ebx
|
|
mov %eax,4(%rsp)
|
|
add %r11d,%r12d
|
|
xor %ebp,%ebx
|
|
rol $30,%esi
|
|
add %ebx,%r12d
|
|
lea 0x5a827999(%eax,%ebp),%r11d
|
|
mov %esi,%ebx
|
|
mov 8(%r9),%eax
|
|
mov %r12d,%ebp
|
|
xor %edi,%ebx
|
|
bswap %eax
|
|
rol $5,%ebp
|
|
and %edx,%ebx
|
|
mov %eax,8(%rsp)
|
|
add %ebp,%r11d
|
|
xor %edi,%ebx
|
|
rol $30,%edx
|
|
add %ebx,%r11d
|
|
lea 0x5a827999(%eax,%edi),%ebp
|
|
mov %edx,%ebx
|
|
mov 12(%r9),%eax
|
|
mov %r11d,%edi
|
|
xor %esi,%ebx
|
|
bswap %eax
|
|
rol $5,%edi
|
|
and %r12d,%ebx
|
|
mov %eax,12(%rsp)
|
|
add %edi,%ebp
|
|
xor %esi,%ebx
|
|
rol $30,%r12d
|
|
add %ebx,%ebp
|
|
lea 0x5a827999(%eax,%esi),%edi
|
|
mov %r12d,%ebx
|
|
mov 16(%r9),%eax
|
|
mov %ebp,%esi
|
|
xor %edx,%ebx
|
|
bswap %eax
|
|
rol $5,%esi
|
|
and %r11d,%ebx
|
|
mov %eax,16(%rsp)
|
|
add %esi,%edi
|
|
xor %edx,%ebx
|
|
rol $30,%r11d
|
|
add %ebx,%edi
|
|
lea 0x5a827999(%eax,%edx),%esi
|
|
mov %r11d,%ebx
|
|
mov 20(%r9),%eax
|
|
mov %edi,%edx
|
|
xor %r12d,%ebx
|
|
bswap %eax
|
|
rol $5,%edx
|
|
and %ebp,%ebx
|
|
mov %eax,20(%rsp)
|
|
add %edx,%esi
|
|
xor %r12d,%ebx
|
|
rol $30,%ebp
|
|
add %ebx,%esi
|
|
lea 0x5a827999(%eax,%r12d),%edx
|
|
mov %ebp,%ebx
|
|
mov 24(%r9),%eax
|
|
mov %esi,%r12d
|
|
xor %r11d,%ebx
|
|
bswap %eax
|
|
rol $5,%r12d
|
|
and %edi,%ebx
|
|
mov %eax,24(%rsp)
|
|
add %r12d,%edx
|
|
xor %r11d,%ebx
|
|
rol $30,%edi
|
|
add %ebx,%edx
|
|
lea 0x5a827999(%eax,%r11d),%r12d
|
|
mov %edi,%ebx
|
|
mov 28(%r9),%eax
|
|
mov %edx,%r11d
|
|
xor %ebp,%ebx
|
|
bswap %eax
|
|
rol $5,%r11d
|
|
and %esi,%ebx
|
|
mov %eax,28(%rsp)
|
|
add %r11d,%r12d
|
|
xor %ebp,%ebx
|
|
rol $30,%esi
|
|
add %ebx,%r12d
|
|
lea 0x5a827999(%eax,%ebp),%r11d
|
|
mov %esi,%ebx
|
|
mov 32(%r9),%eax
|
|
mov %r12d,%ebp
|
|
xor %edi,%ebx
|
|
bswap %eax
|
|
rol $5,%ebp
|
|
and %edx,%ebx
|
|
mov %eax,32(%rsp)
|
|
add %ebp,%r11d
|
|
xor %edi,%ebx
|
|
rol $30,%edx
|
|
add %ebx,%r11d
|
|
lea 0x5a827999(%eax,%edi),%ebp
|
|
mov %edx,%ebx
|
|
mov 36(%r9),%eax
|
|
mov %r11d,%edi
|
|
xor %esi,%ebx
|
|
bswap %eax
|
|
rol $5,%edi
|
|
and %r12d,%ebx
|
|
mov %eax,36(%rsp)
|
|
add %edi,%ebp
|
|
xor %esi,%ebx
|
|
rol $30,%r12d
|
|
add %ebx,%ebp
|
|
lea 0x5a827999(%eax,%esi),%edi
|
|
mov %r12d,%ebx
|
|
mov 40(%r9),%eax
|
|
mov %ebp,%esi
|
|
xor %edx,%ebx
|
|
bswap %eax
|
|
rol $5,%esi
|
|
and %r11d,%ebx
|
|
mov %eax,40(%rsp)
|
|
add %esi,%edi
|
|
xor %edx,%ebx
|
|
rol $30,%r11d
|
|
add %ebx,%edi
|
|
lea 0x5a827999(%eax,%edx),%esi
|
|
mov %r11d,%ebx
|
|
mov 44(%r9),%eax
|
|
mov %edi,%edx
|
|
xor %r12d,%ebx
|
|
bswap %eax
|
|
rol $5,%edx
|
|
and %ebp,%ebx
|
|
mov %eax,44(%rsp)
|
|
add %edx,%esi
|
|
xor %r12d,%ebx
|
|
rol $30,%ebp
|
|
add %ebx,%esi
|
|
lea 0x5a827999(%eax,%r12d),%edx
|
|
mov %ebp,%ebx
|
|
mov 48(%r9),%eax
|
|
mov %esi,%r12d
|
|
xor %r11d,%ebx
|
|
bswap %eax
|
|
rol $5,%r12d
|
|
and %edi,%ebx
|
|
mov %eax,48(%rsp)
|
|
add %r12d,%edx
|
|
xor %r11d,%ebx
|
|
rol $30,%edi
|
|
add %ebx,%edx
|
|
lea 0x5a827999(%eax,%r11d),%r12d
|
|
mov %edi,%ebx
|
|
mov 52(%r9),%eax
|
|
mov %edx,%r11d
|
|
xor %ebp,%ebx
|
|
bswap %eax
|
|
rol $5,%r11d
|
|
and %esi,%ebx
|
|
mov %eax,52(%rsp)
|
|
add %r11d,%r12d
|
|
xor %ebp,%ebx
|
|
rol $30,%esi
|
|
add %ebx,%r12d
|
|
lea 0x5a827999(%eax,%ebp),%r11d
|
|
mov %esi,%ebx
|
|
mov 56(%r9),%eax
|
|
mov %r12d,%ebp
|
|
xor %edi,%ebx
|
|
bswap %eax
|
|
rol $5,%ebp
|
|
and %edx,%ebx
|
|
mov %eax,56(%rsp)
|
|
add %ebp,%r11d
|
|
xor %edi,%ebx
|
|
rol $30,%edx
|
|
add %ebx,%r11d
|
|
lea 0x5a827999(%eax,%edi),%ebp
|
|
mov %edx,%ebx
|
|
mov 60(%r9),%eax
|
|
mov %r11d,%edi
|
|
xor %esi,%ebx
|
|
bswap %eax
|
|
rol $5,%edi
|
|
and %r12d,%ebx
|
|
mov %eax,60(%rsp)
|
|
add %edi,%ebp
|
|
xor %esi,%ebx
|
|
rol $30,%r12d
|
|
add %ebx,%ebp
|
|
lea 0x5a827999(%eax,%esi),%edi
|
|
mov 0(%rsp),%eax
|
|
mov %r12d,%ebx
|
|
mov %ebp,%esi
|
|
xor 8(%rsp),%eax
|
|
xor %edx,%ebx
|
|
rol $5,%esi
|
|
xor 32(%rsp),%eax
|
|
and %r11d,%ebx
|
|
add %esi,%edi
|
|
xor 52(%rsp),%eax
|
|
xor %edx,%ebx
|
|
rol $30,%r11d
|
|
add %ebx,%edi
|
|
rol $1,%eax
|
|
mov %eax,0(%rsp)
|
|
lea 0x5a827999(%eax,%edx),%esi
|
|
mov 4(%rsp),%eax
|
|
mov %r11d,%ebx
|
|
mov %edi,%edx
|
|
xor 12(%rsp),%eax
|
|
xor %r12d,%ebx
|
|
rol $5,%edx
|
|
xor 36(%rsp),%eax
|
|
and %ebp,%ebx
|
|
add %edx,%esi
|
|
xor 56(%rsp),%eax
|
|
xor %r12d,%ebx
|
|
rol $30,%ebp
|
|
add %ebx,%esi
|
|
rol $1,%eax
|
|
mov %eax,4(%rsp)
|
|
lea 0x5a827999(%eax,%r12d),%edx
|
|
mov 8(%rsp),%eax
|
|
mov %ebp,%ebx
|
|
mov %esi,%r12d
|
|
xor 16(%rsp),%eax
|
|
xor %r11d,%ebx
|
|
rol $5,%r12d
|
|
xor 40(%rsp),%eax
|
|
and %edi,%ebx
|
|
add %r12d,%edx
|
|
xor 60(%rsp),%eax
|
|
xor %r11d,%ebx
|
|
rol $30,%edi
|
|
add %ebx,%edx
|
|
rol $1,%eax
|
|
mov %eax,8(%rsp)
|
|
lea 0x5a827999(%eax,%r11d),%r12d
|
|
mov 12(%rsp),%eax
|
|
mov %edi,%ebx
|
|
mov %edx,%r11d
|
|
xor 20(%rsp),%eax
|
|
xor %ebp,%ebx
|
|
rol $5,%r11d
|
|
xor 44(%rsp),%eax
|
|
and %esi,%ebx
|
|
add %r11d,%r12d
|
|
xor 0(%rsp),%eax
|
|
xor %ebp,%ebx
|
|
rol $30,%esi
|
|
add %ebx,%r12d
|
|
rol $1,%eax
|
|
mov %eax,12(%rsp)
|
|
lea 0x5a827999(%eax,%ebp),%r11d
|
|
mov 16(%rsp),%eax
|
|
mov %esi,%ebx
|
|
mov %r12d,%ebp
|
|
xor 24(%rsp),%eax
|
|
xor %edi,%ebx
|
|
rol $5,%ebp
|
|
xor 48(%rsp),%eax
|
|
and %edx,%ebx
|
|
add %ebp,%r11d
|
|
xor 4(%rsp),%eax
|
|
xor %edi,%ebx
|
|
rol $30,%edx
|
|
add %ebx,%r11d
|
|
rol $1,%eax
|
|
mov %eax,16(%rsp)
|
|
lea 0x6ed9eba1(%eax,%edi),%ebp
|
|
mov 20(%rsp),%eax
|
|
mov %edx,%ebx
|
|
mov %r11d,%edi
|
|
xor 28(%rsp),%eax
|
|
xor %r12d,%ebx
|
|
rol $5,%edi
|
|
xor 52(%rsp),%eax
|
|
xor %esi,%ebx
|
|
add %edi,%ebp
|
|
xor 8(%rsp),%eax
|
|
rol $30,%r12d
|
|
add %ebx,%ebp
|
|
rol $1,%eax
|
|
mov %eax,20(%rsp)
|
|
lea 0x6ed9eba1(%eax,%esi),%edi
|
|
mov 24(%rsp),%eax
|
|
mov %r12d,%ebx
|
|
mov %ebp,%esi
|
|
xor 32(%rsp),%eax
|
|
xor %r11d,%ebx
|
|
rol $5,%esi
|
|
xor 56(%rsp),%eax
|
|
xor %edx,%ebx
|
|
add %esi,%edi
|
|
xor 12(%rsp),%eax
|
|
rol $30,%r11d
|
|
add %ebx,%edi
|
|
rol $1,%eax
|
|
mov %eax,24(%rsp)
|
|
lea 0x6ed9eba1(%eax,%edx),%esi
|
|
mov 28(%rsp),%eax
|
|
mov %r11d,%ebx
|
|
mov %edi,%edx
|
|
xor 36(%rsp),%eax
|
|
xor %ebp,%ebx
|
|
rol $5,%edx
|
|
xor 60(%rsp),%eax
|
|
xor %r12d,%ebx
|
|
add %edx,%esi
|
|
xor 16(%rsp),%eax
|
|
rol $30,%ebp
|
|
add %ebx,%esi
|
|
rol $1,%eax
|
|
mov %eax,28(%rsp)
|
|
lea 0x6ed9eba1(%eax,%r12d),%edx
|
|
mov 32(%rsp),%eax
|
|
mov %ebp,%ebx
|
|
mov %esi,%r12d
|
|
xor 40(%rsp),%eax
|
|
xor %edi,%ebx
|
|
rol $5,%r12d
|
|
xor 0(%rsp),%eax
|
|
xor %r11d,%ebx
|
|
add %r12d,%edx
|
|
xor 20(%rsp),%eax
|
|
rol $30,%edi
|
|
add %ebx,%edx
|
|
rol $1,%eax
|
|
mov %eax,32(%rsp)
|
|
lea 0x6ed9eba1(%eax,%r11d),%r12d
|
|
mov 36(%rsp),%eax
|
|
mov %edi,%ebx
|
|
mov %edx,%r11d
|
|
xor 44(%rsp),%eax
|
|
xor %esi,%ebx
|
|
rol $5,%r11d
|
|
xor 4(%rsp),%eax
|
|
xor %ebp,%ebx
|
|
add %r11d,%r12d
|
|
xor 24(%rsp),%eax
|
|
rol $30,%esi
|
|
add %ebx,%r12d
|
|
rol $1,%eax
|
|
mov %eax,36(%rsp)
|
|
lea 0x6ed9eba1(%eax,%ebp),%r11d
|
|
mov 40(%rsp),%eax
|
|
mov %esi,%ebx
|
|
mov %r12d,%ebp
|
|
xor 48(%rsp),%eax
|
|
xor %edx,%ebx
|
|
rol $5,%ebp
|
|
xor 8(%rsp),%eax
|
|
xor %edi,%ebx
|
|
add %ebp,%r11d
|
|
xor 28(%rsp),%eax
|
|
rol $30,%edx
|
|
add %ebx,%r11d
|
|
rol $1,%eax
|
|
mov %eax,40(%rsp)
|
|
lea 0x6ed9eba1(%eax,%edi),%ebp
|
|
mov 44(%rsp),%eax
|
|
mov %edx,%ebx
|
|
mov %r11d,%edi
|
|
xor 52(%rsp),%eax
|
|
xor %r12d,%ebx
|
|
rol $5,%edi
|
|
xor 12(%rsp),%eax
|
|
xor %esi,%ebx
|
|
add %edi,%ebp
|
|
xor 32(%rsp),%eax
|
|
rol $30,%r12d
|
|
add %ebx,%ebp
|
|
rol $1,%eax
|
|
mov %eax,44(%rsp)
|
|
lea 0x6ed9eba1(%eax,%esi),%edi
|
|
mov 48(%rsp),%eax
|
|
mov %r12d,%ebx
|
|
mov %ebp,%esi
|
|
xor 56(%rsp),%eax
|
|
xor %r11d,%ebx
|
|
rol $5,%esi
|
|
xor 16(%rsp),%eax
|
|
xor %edx,%ebx
|
|
add %esi,%edi
|
|
xor 36(%rsp),%eax
|
|
rol $30,%r11d
|
|
add %ebx,%edi
|
|
rol $1,%eax
|
|
mov %eax,48(%rsp)
|
|
lea 0x6ed9eba1(%eax,%edx),%esi
|
|
mov 52(%rsp),%eax
|
|
mov %r11d,%ebx
|
|
mov %edi,%edx
|
|
xor 60(%rsp),%eax
|
|
xor %ebp,%ebx
|
|
rol $5,%edx
|
|
xor 20(%rsp),%eax
|
|
xor %r12d,%ebx
|
|
add %edx,%esi
|
|
xor 40(%rsp),%eax
|
|
rol $30,%ebp
|
|
add %ebx,%esi
|
|
rol $1,%eax
|
|
mov %eax,52(%rsp)
|
|
lea 0x6ed9eba1(%eax,%r12d),%edx
|
|
mov 56(%rsp),%eax
|
|
mov %ebp,%ebx
|
|
mov %esi,%r12d
|
|
xor 0(%rsp),%eax
|
|
xor %edi,%ebx
|
|
rol $5,%r12d
|
|
xor 24(%rsp),%eax
|
|
xor %r11d,%ebx
|
|
add %r12d,%edx
|
|
xor 44(%rsp),%eax
|
|
rol $30,%edi
|
|
add %ebx,%edx
|
|
rol $1,%eax
|
|
mov %eax,56(%rsp)
|
|
lea 0x6ed9eba1(%eax,%r11d),%r12d
|
|
mov 60(%rsp),%eax
|
|
mov %edi,%ebx
|
|
mov %edx,%r11d
|
|
xor 4(%rsp),%eax
|
|
xor %esi,%ebx
|
|
rol $5,%r11d
|
|
xor 28(%rsp),%eax
|
|
xor %ebp,%ebx
|
|
add %r11d,%r12d
|
|
xor 48(%rsp),%eax
|
|
rol $30,%esi
|
|
add %ebx,%r12d
|
|
rol $1,%eax
|
|
mov %eax,60(%rsp)
|
|
lea 0x6ed9eba1(%eax,%ebp),%r11d
|
|
mov 0(%rsp),%eax
|
|
mov %esi,%ebx
|
|
mov %r12d,%ebp
|
|
xor 8(%rsp),%eax
|
|
xor %edx,%ebx
|
|
rol $5,%ebp
|
|
xor 32(%rsp),%eax
|
|
xor %edi,%ebx
|
|
add %ebp,%r11d
|
|
xor 52(%rsp),%eax
|
|
rol $30,%edx
|
|
add %ebx,%r11d
|
|
rol $1,%eax
|
|
mov %eax,0(%rsp)
|
|
lea 0x6ed9eba1(%eax,%edi),%ebp
|
|
mov 4(%rsp),%eax
|
|
mov %edx,%ebx
|
|
mov %r11d,%edi
|
|
xor 12(%rsp),%eax
|
|
xor %r12d,%ebx
|
|
rol $5,%edi
|
|
xor 36(%rsp),%eax
|
|
xor %esi,%ebx
|
|
add %edi,%ebp
|
|
xor 56(%rsp),%eax
|
|
rol $30,%r12d
|
|
add %ebx,%ebp
|
|
rol $1,%eax
|
|
mov %eax,4(%rsp)
|
|
lea 0x6ed9eba1(%eax,%esi),%edi
|
|
mov 8(%rsp),%eax
|
|
mov %r12d,%ebx
|
|
mov %ebp,%esi
|
|
xor 16(%rsp),%eax
|
|
xor %r11d,%ebx
|
|
rol $5,%esi
|
|
xor 40(%rsp),%eax
|
|
xor %edx,%ebx
|
|
add %esi,%edi
|
|
xor 60(%rsp),%eax
|
|
rol $30,%r11d
|
|
add %ebx,%edi
|
|
rol $1,%eax
|
|
mov %eax,8(%rsp)
|
|
lea 0x6ed9eba1(%eax,%edx),%esi
|
|
mov 12(%rsp),%eax
|
|
mov %r11d,%ebx
|
|
mov %edi,%edx
|
|
xor 20(%rsp),%eax
|
|
xor %ebp,%ebx
|
|
rol $5,%edx
|
|
xor 44(%rsp),%eax
|
|
xor %r12d,%ebx
|
|
add %edx,%esi
|
|
xor 0(%rsp),%eax
|
|
rol $30,%ebp
|
|
add %ebx,%esi
|
|
rol $1,%eax
|
|
mov %eax,12(%rsp)
|
|
lea 0x6ed9eba1(%eax,%r12d),%edx
|
|
mov 16(%rsp),%eax
|
|
mov %ebp,%ebx
|
|
mov %esi,%r12d
|
|
xor 24(%rsp),%eax
|
|
xor %edi,%ebx
|
|
rol $5,%r12d
|
|
xor 48(%rsp),%eax
|
|
xor %r11d,%ebx
|
|
add %r12d,%edx
|
|
xor 4(%rsp),%eax
|
|
rol $30,%edi
|
|
add %ebx,%edx
|
|
rol $1,%eax
|
|
mov %eax,16(%rsp)
|
|
lea 0x6ed9eba1(%eax,%r11d),%r12d
|
|
mov 20(%rsp),%eax
|
|
mov %edi,%ebx
|
|
mov %edx,%r11d
|
|
xor 28(%rsp),%eax
|
|
xor %esi,%ebx
|
|
rol $5,%r11d
|
|
xor 52(%rsp),%eax
|
|
xor %ebp,%ebx
|
|
add %r11d,%r12d
|
|
xor 8(%rsp),%eax
|
|
rol $30,%esi
|
|
add %ebx,%r12d
|
|
rol $1,%eax
|
|
mov %eax,20(%rsp)
|
|
lea 0x6ed9eba1(%eax,%ebp),%r11d
|
|
mov 24(%rsp),%eax
|
|
mov %esi,%ebx
|
|
mov %r12d,%ebp
|
|
xor 32(%rsp),%eax
|
|
xor %edx,%ebx
|
|
rol $5,%ebp
|
|
xor 56(%rsp),%eax
|
|
xor %edi,%ebx
|
|
add %ebp,%r11d
|
|
xor 12(%rsp),%eax
|
|
rol $30,%edx
|
|
add %ebx,%r11d
|
|
rol $1,%eax
|
|
mov %eax,24(%rsp)
|
|
lea 0x6ed9eba1(%eax,%edi),%ebp
|
|
mov 28(%rsp),%eax
|
|
mov %edx,%ebx
|
|
mov %r11d,%edi
|
|
xor 36(%rsp),%eax
|
|
xor %r12d,%ebx
|
|
rol $5,%edi
|
|
xor 60(%rsp),%eax
|
|
xor %esi,%ebx
|
|
add %edi,%ebp
|
|
xor 16(%rsp),%eax
|
|
rol $30,%r12d
|
|
add %ebx,%ebp
|
|
rol $1,%eax
|
|
mov %eax,28(%rsp)
|
|
lea 0x6ed9eba1(%eax,%esi),%edi
|
|
mov 32(%rsp),%eax
|
|
mov %r12d,%ebx
|
|
mov %ebp,%esi
|
|
xor 40(%rsp),%eax
|
|
xor %r11d,%ebx
|
|
rol $5,%esi
|
|
xor 0(%rsp),%eax
|
|
xor %edx,%ebx
|
|
add %esi,%edi
|
|
xor 20(%rsp),%eax
|
|
rol $30,%r11d
|
|
add %ebx,%edi
|
|
rol $1,%eax
|
|
mov %eax,32(%rsp)
|
|
lea -0x70e44324(%eax,%edx),%esi
|
|
mov 36(%rsp),%eax
|
|
mov %ebp,%ebx
|
|
mov %ebp,%ecx
|
|
xor 44(%rsp),%eax
|
|
mov %edi,%edx
|
|
and %r11d,%ebx
|
|
xor 4(%rsp),%eax
|
|
or %r11d,%ecx
|
|
rol $5,%edx
|
|
xor 24(%rsp),%eax
|
|
and %r12d,%ecx
|
|
add %edx,%esi
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%ebp
|
|
mov %eax,36(%rsp)
|
|
add %ebx,%esi
|
|
lea -0x70e44324(%eax,%r12d),%edx
|
|
mov 40(%rsp),%eax
|
|
mov %edi,%ebx
|
|
mov %edi,%ecx
|
|
xor 48(%rsp),%eax
|
|
mov %esi,%r12d
|
|
and %ebp,%ebx
|
|
xor 8(%rsp),%eax
|
|
or %ebp,%ecx
|
|
rol $5,%r12d
|
|
xor 28(%rsp),%eax
|
|
and %r11d,%ecx
|
|
add %r12d,%edx
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%edi
|
|
mov %eax,40(%rsp)
|
|
add %ebx,%edx
|
|
lea -0x70e44324(%eax,%r11d),%r12d
|
|
mov 44(%rsp),%eax
|
|
mov %esi,%ebx
|
|
mov %esi,%ecx
|
|
xor 52(%rsp),%eax
|
|
mov %edx,%r11d
|
|
and %edi,%ebx
|
|
xor 12(%rsp),%eax
|
|
or %edi,%ecx
|
|
rol $5,%r11d
|
|
xor 32(%rsp),%eax
|
|
and %ebp,%ecx
|
|
add %r11d,%r12d
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%esi
|
|
mov %eax,44(%rsp)
|
|
add %ebx,%r12d
|
|
lea -0x70e44324(%eax,%ebp),%r11d
|
|
mov 48(%rsp),%eax
|
|
mov %edx,%ebx
|
|
mov %edx,%ecx
|
|
xor 56(%rsp),%eax
|
|
mov %r12d,%ebp
|
|
and %esi,%ebx
|
|
xor 16(%rsp),%eax
|
|
or %esi,%ecx
|
|
rol $5,%ebp
|
|
xor 36(%rsp),%eax
|
|
and %edi,%ecx
|
|
add %ebp,%r11d
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%edx
|
|
mov %eax,48(%rsp)
|
|
add %ebx,%r11d
|
|
lea -0x70e44324(%eax,%edi),%ebp
|
|
mov 52(%rsp),%eax
|
|
mov %r12d,%ebx
|
|
mov %r12d,%ecx
|
|
xor 60(%rsp),%eax
|
|
mov %r11d,%edi
|
|
and %edx,%ebx
|
|
xor 20(%rsp),%eax
|
|
or %edx,%ecx
|
|
rol $5,%edi
|
|
xor 40(%rsp),%eax
|
|
and %esi,%ecx
|
|
add %edi,%ebp
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%r12d
|
|
mov %eax,52(%rsp)
|
|
add %ebx,%ebp
|
|
lea -0x70e44324(%eax,%esi),%edi
|
|
mov 56(%rsp),%eax
|
|
mov %r11d,%ebx
|
|
mov %r11d,%ecx
|
|
xor 0(%rsp),%eax
|
|
mov %ebp,%esi
|
|
and %r12d,%ebx
|
|
xor 24(%rsp),%eax
|
|
or %r12d,%ecx
|
|
rol $5,%esi
|
|
xor 44(%rsp),%eax
|
|
and %edx,%ecx
|
|
add %esi,%edi
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%r11d
|
|
mov %eax,56(%rsp)
|
|
add %ebx,%edi
|
|
lea -0x70e44324(%eax,%edx),%esi
|
|
mov 60(%rsp),%eax
|
|
mov %ebp,%ebx
|
|
mov %ebp,%ecx
|
|
xor 4(%rsp),%eax
|
|
mov %edi,%edx
|
|
and %r11d,%ebx
|
|
xor 28(%rsp),%eax
|
|
or %r11d,%ecx
|
|
rol $5,%edx
|
|
xor 48(%rsp),%eax
|
|
and %r12d,%ecx
|
|
add %edx,%esi
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%ebp
|
|
mov %eax,60(%rsp)
|
|
add %ebx,%esi
|
|
lea -0x70e44324(%eax,%r12d),%edx
|
|
mov 0(%rsp),%eax
|
|
mov %edi,%ebx
|
|
mov %edi,%ecx
|
|
xor 8(%rsp),%eax
|
|
mov %esi,%r12d
|
|
and %ebp,%ebx
|
|
xor 32(%rsp),%eax
|
|
or %ebp,%ecx
|
|
rol $5,%r12d
|
|
xor 52(%rsp),%eax
|
|
and %r11d,%ecx
|
|
add %r12d,%edx
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%edi
|
|
mov %eax,0(%rsp)
|
|
add %ebx,%edx
|
|
lea -0x70e44324(%eax,%r11d),%r12d
|
|
mov 4(%rsp),%eax
|
|
mov %esi,%ebx
|
|
mov %esi,%ecx
|
|
xor 12(%rsp),%eax
|
|
mov %edx,%r11d
|
|
and %edi,%ebx
|
|
xor 36(%rsp),%eax
|
|
or %edi,%ecx
|
|
rol $5,%r11d
|
|
xor 56(%rsp),%eax
|
|
and %ebp,%ecx
|
|
add %r11d,%r12d
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%esi
|
|
mov %eax,4(%rsp)
|
|
add %ebx,%r12d
|
|
lea -0x70e44324(%eax,%ebp),%r11d
|
|
mov 8(%rsp),%eax
|
|
mov %edx,%ebx
|
|
mov %edx,%ecx
|
|
xor 16(%rsp),%eax
|
|
mov %r12d,%ebp
|
|
and %esi,%ebx
|
|
xor 40(%rsp),%eax
|
|
or %esi,%ecx
|
|
rol $5,%ebp
|
|
xor 60(%rsp),%eax
|
|
and %edi,%ecx
|
|
add %ebp,%r11d
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%edx
|
|
mov %eax,8(%rsp)
|
|
add %ebx,%r11d
|
|
lea -0x70e44324(%eax,%edi),%ebp
|
|
mov 12(%rsp),%eax
|
|
mov %r12d,%ebx
|
|
mov %r12d,%ecx
|
|
xor 20(%rsp),%eax
|
|
mov %r11d,%edi
|
|
and %edx,%ebx
|
|
xor 44(%rsp),%eax
|
|
or %edx,%ecx
|
|
rol $5,%edi
|
|
xor 0(%rsp),%eax
|
|
and %esi,%ecx
|
|
add %edi,%ebp
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%r12d
|
|
mov %eax,12(%rsp)
|
|
add %ebx,%ebp
|
|
lea -0x70e44324(%eax,%esi),%edi
|
|
mov 16(%rsp),%eax
|
|
mov %r11d,%ebx
|
|
mov %r11d,%ecx
|
|
xor 24(%rsp),%eax
|
|
mov %ebp,%esi
|
|
and %r12d,%ebx
|
|
xor 48(%rsp),%eax
|
|
or %r12d,%ecx
|
|
rol $5,%esi
|
|
xor 4(%rsp),%eax
|
|
and %edx,%ecx
|
|
add %esi,%edi
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%r11d
|
|
mov %eax,16(%rsp)
|
|
add %ebx,%edi
|
|
lea -0x70e44324(%eax,%edx),%esi
|
|
mov 20(%rsp),%eax
|
|
mov %ebp,%ebx
|
|
mov %ebp,%ecx
|
|
xor 28(%rsp),%eax
|
|
mov %edi,%edx
|
|
and %r11d,%ebx
|
|
xor 52(%rsp),%eax
|
|
or %r11d,%ecx
|
|
rol $5,%edx
|
|
xor 8(%rsp),%eax
|
|
and %r12d,%ecx
|
|
add %edx,%esi
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%ebp
|
|
mov %eax,20(%rsp)
|
|
add %ebx,%esi
|
|
lea -0x70e44324(%eax,%r12d),%edx
|
|
mov 24(%rsp),%eax
|
|
mov %edi,%ebx
|
|
mov %edi,%ecx
|
|
xor 32(%rsp),%eax
|
|
mov %esi,%r12d
|
|
and %ebp,%ebx
|
|
xor 56(%rsp),%eax
|
|
or %ebp,%ecx
|
|
rol $5,%r12d
|
|
xor 12(%rsp),%eax
|
|
and %r11d,%ecx
|
|
add %r12d,%edx
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%edi
|
|
mov %eax,24(%rsp)
|
|
add %ebx,%edx
|
|
lea -0x70e44324(%eax,%r11d),%r12d
|
|
mov 28(%rsp),%eax
|
|
mov %esi,%ebx
|
|
mov %esi,%ecx
|
|
xor 36(%rsp),%eax
|
|
mov %edx,%r11d
|
|
and %edi,%ebx
|
|
xor 60(%rsp),%eax
|
|
or %edi,%ecx
|
|
rol $5,%r11d
|
|
xor 16(%rsp),%eax
|
|
and %ebp,%ecx
|
|
add %r11d,%r12d
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%esi
|
|
mov %eax,28(%rsp)
|
|
add %ebx,%r12d
|
|
lea -0x70e44324(%eax,%ebp),%r11d
|
|
mov 32(%rsp),%eax
|
|
mov %edx,%ebx
|
|
mov %edx,%ecx
|
|
xor 40(%rsp),%eax
|
|
mov %r12d,%ebp
|
|
and %esi,%ebx
|
|
xor 0(%rsp),%eax
|
|
or %esi,%ecx
|
|
rol $5,%ebp
|
|
xor 20(%rsp),%eax
|
|
and %edi,%ecx
|
|
add %ebp,%r11d
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%edx
|
|
mov %eax,32(%rsp)
|
|
add %ebx,%r11d
|
|
lea -0x70e44324(%eax,%edi),%ebp
|
|
mov 36(%rsp),%eax
|
|
mov %r12d,%ebx
|
|
mov %r12d,%ecx
|
|
xor 44(%rsp),%eax
|
|
mov %r11d,%edi
|
|
and %edx,%ebx
|
|
xor 4(%rsp),%eax
|
|
or %edx,%ecx
|
|
rol $5,%edi
|
|
xor 24(%rsp),%eax
|
|
and %esi,%ecx
|
|
add %edi,%ebp
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%r12d
|
|
mov %eax,36(%rsp)
|
|
add %ebx,%ebp
|
|
lea -0x70e44324(%eax,%esi),%edi
|
|
mov 40(%rsp),%eax
|
|
mov %r11d,%ebx
|
|
mov %r11d,%ecx
|
|
xor 48(%rsp),%eax
|
|
mov %ebp,%esi
|
|
and %r12d,%ebx
|
|
xor 8(%rsp),%eax
|
|
or %r12d,%ecx
|
|
rol $5,%esi
|
|
xor 28(%rsp),%eax
|
|
and %edx,%ecx
|
|
add %esi,%edi
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%r11d
|
|
mov %eax,40(%rsp)
|
|
add %ebx,%edi
|
|
lea -0x70e44324(%eax,%edx),%esi
|
|
mov 44(%rsp),%eax
|
|
mov %ebp,%ebx
|
|
mov %ebp,%ecx
|
|
xor 52(%rsp),%eax
|
|
mov %edi,%edx
|
|
and %r11d,%ebx
|
|
xor 12(%rsp),%eax
|
|
or %r11d,%ecx
|
|
rol $5,%edx
|
|
xor 32(%rsp),%eax
|
|
and %r12d,%ecx
|
|
add %edx,%esi
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%ebp
|
|
mov %eax,44(%rsp)
|
|
add %ebx,%esi
|
|
lea -0x70e44324(%eax,%r12d),%edx
|
|
mov 48(%rsp),%eax
|
|
mov %edi,%ebx
|
|
mov %edi,%ecx
|
|
xor 56(%rsp),%eax
|
|
mov %esi,%r12d
|
|
and %ebp,%ebx
|
|
xor 16(%rsp),%eax
|
|
or %ebp,%ecx
|
|
rol $5,%r12d
|
|
xor 36(%rsp),%eax
|
|
and %r11d,%ecx
|
|
add %r12d,%edx
|
|
rol $1,%eax
|
|
or %ecx,%ebx
|
|
rol $30,%edi
|
|
mov %eax,48(%rsp)
|
|
add %ebx,%edx
|
|
lea -0x359d3e2a(%eax,%r11d),%r12d
|
|
mov 52(%rsp),%eax
|
|
mov %edi,%ebx
|
|
mov %edx,%r11d
|
|
xor 60(%rsp),%eax
|
|
xor %esi,%ebx
|
|
rol $5,%r11d
|
|
xor 20(%rsp),%eax
|
|
xor %ebp,%ebx
|
|
add %r11d,%r12d
|
|
xor 40(%rsp),%eax
|
|
rol $30,%esi
|
|
add %ebx,%r12d
|
|
rol $1,%eax
|
|
mov %eax,52(%rsp)
|
|
lea -0x359d3e2a(%eax,%ebp),%r11d
|
|
mov 56(%rsp),%eax
|
|
mov %esi,%ebx
|
|
mov %r12d,%ebp
|
|
xor 0(%rsp),%eax
|
|
xor %edx,%ebx
|
|
rol $5,%ebp
|
|
xor 24(%rsp),%eax
|
|
xor %edi,%ebx
|
|
add %ebp,%r11d
|
|
xor 44(%rsp),%eax
|
|
rol $30,%edx
|
|
add %ebx,%r11d
|
|
rol $1,%eax
|
|
mov %eax,56(%rsp)
|
|
lea -0x359d3e2a(%eax,%edi),%ebp
|
|
mov 60(%rsp),%eax
|
|
mov %edx,%ebx
|
|
mov %r11d,%edi
|
|
xor 4(%rsp),%eax
|
|
xor %r12d,%ebx
|
|
rol $5,%edi
|
|
xor 28(%rsp),%eax
|
|
xor %esi,%ebx
|
|
add %edi,%ebp
|
|
xor 48(%rsp),%eax
|
|
rol $30,%r12d
|
|
add %ebx,%ebp
|
|
rol $1,%eax
|
|
mov %eax,60(%rsp)
|
|
lea -0x359d3e2a(%eax,%esi),%edi
|
|
mov 0(%rsp),%eax
|
|
mov %r12d,%ebx
|
|
mov %ebp,%esi
|
|
xor 8(%rsp),%eax
|
|
xor %r11d,%ebx
|
|
rol $5,%esi
|
|
xor 32(%rsp),%eax
|
|
xor %edx,%ebx
|
|
add %esi,%edi
|
|
xor 52(%rsp),%eax
|
|
rol $30,%r11d
|
|
add %ebx,%edi
|
|
rol $1,%eax
|
|
mov %eax,0(%rsp)
|
|
lea -0x359d3e2a(%eax,%edx),%esi
|
|
mov 4(%rsp),%eax
|
|
mov %r11d,%ebx
|
|
mov %edi,%edx
|
|
xor 12(%rsp),%eax
|
|
xor %ebp,%ebx
|
|
rol $5,%edx
|
|
xor 36(%rsp),%eax
|
|
xor %r12d,%ebx
|
|
add %edx,%esi
|
|
xor 56(%rsp),%eax
|
|
rol $30,%ebp
|
|
add %ebx,%esi
|
|
rol $1,%eax
|
|
mov %eax,4(%rsp)
|
|
lea -0x359d3e2a(%eax,%r12d),%edx
|
|
mov 8(%rsp),%eax
|
|
mov %ebp,%ebx
|
|
mov %esi,%r12d
|
|
xor 16(%rsp),%eax
|
|
xor %edi,%ebx
|
|
rol $5,%r12d
|
|
xor 40(%rsp),%eax
|
|
xor %r11d,%ebx
|
|
add %r12d,%edx
|
|
xor 60(%rsp),%eax
|
|
rol $30,%edi
|
|
add %ebx,%edx
|
|
rol $1,%eax
|
|
mov %eax,8(%rsp)
|
|
lea -0x359d3e2a(%eax,%r11d),%r12d
|
|
mov 12(%rsp),%eax
|
|
mov %edi,%ebx
|
|
mov %edx,%r11d
|
|
xor 20(%rsp),%eax
|
|
xor %esi,%ebx
|
|
rol $5,%r11d
|
|
xor 44(%rsp),%eax
|
|
xor %ebp,%ebx
|
|
add %r11d,%r12d
|
|
xor 0(%rsp),%eax
|
|
rol $30,%esi
|
|
add %ebx,%r12d
|
|
rol $1,%eax
|
|
mov %eax,12(%rsp)
|
|
lea -0x359d3e2a(%eax,%ebp),%r11d
|
|
mov 16(%rsp),%eax
|
|
mov %esi,%ebx
|
|
mov %r12d,%ebp
|
|
xor 24(%rsp),%eax
|
|
xor %edx,%ebx
|
|
rol $5,%ebp
|
|
xor 48(%rsp),%eax
|
|
xor %edi,%ebx
|
|
add %ebp,%r11d
|
|
xor 4(%rsp),%eax
|
|
rol $30,%edx
|
|
add %ebx,%r11d
|
|
rol $1,%eax
|
|
mov %eax,16(%rsp)
|
|
lea -0x359d3e2a(%eax,%edi),%ebp
|
|
mov 20(%rsp),%eax
|
|
mov %edx,%ebx
|
|
mov %r11d,%edi
|
|
xor 28(%rsp),%eax
|
|
xor %r12d,%ebx
|
|
rol $5,%edi
|
|
xor 52(%rsp),%eax
|
|
xor %esi,%ebx
|
|
add %edi,%ebp
|
|
xor 8(%rsp),%eax
|
|
rol $30,%r12d
|
|
add %ebx,%ebp
|
|
rol $1,%eax
|
|
mov %eax,20(%rsp)
|
|
lea -0x359d3e2a(%eax,%esi),%edi
|
|
mov 24(%rsp),%eax
|
|
mov %r12d,%ebx
|
|
mov %ebp,%esi
|
|
xor 32(%rsp),%eax
|
|
xor %r11d,%ebx
|
|
rol $5,%esi
|
|
xor 56(%rsp),%eax
|
|
xor %edx,%ebx
|
|
add %esi,%edi
|
|
xor 12(%rsp),%eax
|
|
rol $30,%r11d
|
|
add %ebx,%edi
|
|
rol $1,%eax
|
|
mov %eax,24(%rsp)
|
|
lea -0x359d3e2a(%eax,%edx),%esi
|
|
mov 28(%rsp),%eax
|
|
mov %r11d,%ebx
|
|
mov %edi,%edx
|
|
xor 36(%rsp),%eax
|
|
xor %ebp,%ebx
|
|
rol $5,%edx
|
|
xor 60(%rsp),%eax
|
|
xor %r12d,%ebx
|
|
add %edx,%esi
|
|
xor 16(%rsp),%eax
|
|
rol $30,%ebp
|
|
add %ebx,%esi
|
|
rol $1,%eax
|
|
mov %eax,28(%rsp)
|
|
lea -0x359d3e2a(%eax,%r12d),%edx
|
|
mov 32(%rsp),%eax
|
|
mov %ebp,%ebx
|
|
mov %esi,%r12d
|
|
xor 40(%rsp),%eax
|
|
xor %edi,%ebx
|
|
rol $5,%r12d
|
|
xor 0(%rsp),%eax
|
|
xor %r11d,%ebx
|
|
add %r12d,%edx
|
|
xor 20(%rsp),%eax
|
|
rol $30,%edi
|
|
add %ebx,%edx
|
|
rol $1,%eax
|
|
mov %eax,32(%rsp)
|
|
lea -0x359d3e2a(%eax,%r11d),%r12d
|
|
mov 36(%rsp),%eax
|
|
mov %edi,%ebx
|
|
mov %edx,%r11d
|
|
xor 44(%rsp),%eax
|
|
xor %esi,%ebx
|
|
rol $5,%r11d
|
|
xor 4(%rsp),%eax
|
|
xor %ebp,%ebx
|
|
add %r11d,%r12d
|
|
xor 24(%rsp),%eax
|
|
rol $30,%esi
|
|
add %ebx,%r12d
|
|
rol $1,%eax
|
|
mov %eax,36(%rsp)
|
|
lea -0x359d3e2a(%eax,%ebp),%r11d
|
|
mov 40(%rsp),%eax
|
|
mov %esi,%ebx
|
|
mov %r12d,%ebp
|
|
xor 48(%rsp),%eax
|
|
xor %edx,%ebx
|
|
rol $5,%ebp
|
|
xor 8(%rsp),%eax
|
|
xor %edi,%ebx
|
|
add %ebp,%r11d
|
|
xor 28(%rsp),%eax
|
|
rol $30,%edx
|
|
add %ebx,%r11d
|
|
rol $1,%eax
|
|
mov %eax,40(%rsp)
|
|
lea -0x359d3e2a(%eax,%edi),%ebp
|
|
mov 44(%rsp),%eax
|
|
mov %edx,%ebx
|
|
mov %r11d,%edi
|
|
xor 52(%rsp),%eax
|
|
xor %r12d,%ebx
|
|
rol $5,%edi
|
|
xor 12(%rsp),%eax
|
|
xor %esi,%ebx
|
|
add %edi,%ebp
|
|
xor 32(%rsp),%eax
|
|
rol $30,%r12d
|
|
add %ebx,%ebp
|
|
rol $1,%eax
|
|
mov %eax,44(%rsp)
|
|
lea -0x359d3e2a(%eax,%esi),%edi
|
|
mov 48(%rsp),%eax
|
|
mov %r12d,%ebx
|
|
mov %ebp,%esi
|
|
xor 56(%rsp),%eax
|
|
xor %r11d,%ebx
|
|
rol $5,%esi
|
|
xor 16(%rsp),%eax
|
|
xor %edx,%ebx
|
|
add %esi,%edi
|
|
xor 36(%rsp),%eax
|
|
rol $30,%r11d
|
|
add %ebx,%edi
|
|
rol $1,%eax
|
|
mov %eax,48(%rsp)
|
|
lea -0x359d3e2a(%eax,%edx),%esi
|
|
mov 52(%rsp),%eax
|
|
mov %r11d,%ebx
|
|
mov %edi,%edx
|
|
xor 60(%rsp),%eax
|
|
xor %ebp,%ebx
|
|
rol $5,%edx
|
|
xor 20(%rsp),%eax
|
|
xor %r12d,%ebx
|
|
add %edx,%esi
|
|
xor 40(%rsp),%eax
|
|
rol $30,%ebp
|
|
add %ebx,%esi
|
|
rol $1,%eax
|
|
lea -0x359d3e2a(%eax,%r12d),%edx
|
|
mov 56(%rsp),%eax
|
|
mov %ebp,%ebx
|
|
mov %esi,%r12d
|
|
xor 0(%rsp),%eax
|
|
xor %edi,%ebx
|
|
rol $5,%r12d
|
|
xor 24(%rsp),%eax
|
|
xor %r11d,%ebx
|
|
add %r12d,%edx
|
|
xor 44(%rsp),%eax
|
|
rol $30,%edi
|
|
add %ebx,%edx
|
|
rol $1,%eax
|
|
lea -0x359d3e2a(%eax,%r11d),%r12d
|
|
mov 60(%rsp),%eax
|
|
mov %edi,%ebx
|
|
mov %edx,%r11d
|
|
xor 4(%rsp),%eax
|
|
xor %esi,%ebx
|
|
rol $5,%r11d
|
|
xor 28(%rsp),%eax
|
|
xor %ebp,%ebx
|
|
add %r11d,%r12d
|
|
xor 48(%rsp),%eax
|
|
rol $30,%esi
|
|
add %ebx,%r12d
|
|
rol $1,%eax
|
|
lea -0x359d3e2a(%eax,%ebp),%r11d
|
|
mov %esi,%ebx
|
|
mov %r12d,%ebp
|
|
xor %edx,%ebx
|
|
rol $5,%ebp
|
|
xor %edi,%ebx
|
|
add %ebp,%r11d
|
|
rol $30,%edx
|
|
add %ebx,%r11d
|
|
// Update and save state information in SHA-1 context
|
|
add 0(%r8),%r11d
|
|
add 4(%r8),%r12d
|
|
add 8(%r8),%edx
|
|
add 12(%r8),%esi
|
|
add 16(%r8),%edi
|
|
mov %r11d,0(%r8)
|
|
mov %r12d,4(%r8)
|
|
mov %edx,8(%r8)
|
|
mov %esi,12(%r8)
|
|
mov %edi,16(%r8)
|
|
|
|
xchg %r11d,%edx # mov %r11d,%edx
|
|
xchg %r12d,%esi # mov %r12d,%esi
|
|
xchg %r11d,%edi # mov %edx,%edi
|
|
xchg %r12d,%ebp # mov %esi,%ebp
|
|
# mov %edi,%r11d
|
|
lea 64(%r9),%r9
|
|
sub $1,%r10
|
|
jnz .Lloop
|
|
mov 64(%rsp),%rsp
|
|
pop %r12
|
|
pop %rbp
|
|
pop %rbx
|
|
ret
|
|
SET_SIZE(sha1_block_data_order)
|
|
.asciz "SHA1 block transform for x86_64, CRYPTOGAMS by <appro@openssl.org>"
|
|
|
|
#endif /* lint || __lint */
|
|
|
|
#ifdef __ELF__
|
|
.section .note.GNU-stack,"",%progbits
|
|
#endif
|