Source-Changes-HG archive

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]

[src/trunk]: src/crypto/external/bsd/openssl/lib/libcrypto/arch/i386 new scri...



details:   https://anonhg.NetBSD.org/src/rev/290f05593b27
branches:  trunk
changeset: 829725:290f05593b27
user:      christos <christos%NetBSD.org@localhost>
date:      Fri Feb 09 13:25:41 2018 +0000

description:
new script does not know -D, regen

diffstat:

 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/Makefile     |     4 +-
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/aes-586.S    |  3244 +++++++++
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/aesni-x86.S  |  2448 +++++++
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/bf-586.S     |   896 ++
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/bn-586.S     |  1384 ++++
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/cast-586.S   |   933 ++
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/cmll-x86.S   |  2375 +++++++
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/co-586.S     |  1254 +++
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/crypt586.S   |   879 ++
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/des-586.S    |  1838 +++++
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/ghash-x86.S  |   694 ++
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/md5-586.S    |   679 ++
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/rc4-586.S    |   381 +
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/rc5-586.S    |   564 +
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/rmd-586.S    |  1965 +++++
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/sha1-586.S   |  1380 ++++
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/sha256-586.S |  3347 ++++++++++
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/sha512-586.S |   565 +
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/uplink-x86.S |   271 +
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/vpaes-x86.S  |   661 +
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/x86cpuid.S   |   362 +
 21 files changed, 26122 insertions(+), 2 deletions(-)

diffs (truncated from 26218 to 300 lines):

diff -r 8f0f342ca63e -r 290f05593b27 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/Makefile
--- a/crypto/external/bsd/openssl/lib/libcrypto/arch/i386/Makefile      Fri Feb 09 09:36:42 2018 +0000
+++ b/crypto/external/bsd/openssl/lib/libcrypto/arch/i386/Makefile      Fri Feb 09 13:25:41 2018 +0000
@@ -1,4 +1,4 @@
-#      $NetBSD: Makefile,v 1.7 2015/05/16 17:32:54 joerg Exp $
+#      $NetBSD: Makefile,v 1.8 2018/02/09 13:25:41 christos Exp $
 
 .include "bsd.own.mk"
 
@@ -9,7 +9,7 @@
        for i in $$(find ${OPENSSLSRC} -name \*86.pl) \
                  ${OPENSSLSRC}/crypto/x86cpuid.pl; do \
                perl -I${OPENSSLSRC}/crypto/perlasm \
-               -I${OPENSSLSRC}/crypto/bn/asm $$i elf -fPIC -DOPENSSL_IA32_SSE2 \
+               -I${OPENSSLSRC}/crypto/bn/asm $$i elf -fPIC \
                | sed -e 's,^\.file.*$$,#include <machine/asm.h>,' \
                        -e 's/  call    OPENSSL_cpuid_setup/    PIC_PROLOGUE!   call    PIC_PLT(OPENSSL_cpuid_setup)!   PIC_EPILOGUE/' | tr '!' '\n' \
                > $$(basename $$i .pl).S; \
diff -r 8f0f342ca63e -r 290f05593b27 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/aes-586.S
--- a/crypto/external/bsd/openssl/lib/libcrypto/arch/i386/aes-586.S     Fri Feb 09 09:36:42 2018 +0000
+++ b/crypto/external/bsd/openssl/lib/libcrypto/arch/i386/aes-586.S     Fri Feb 09 13:25:41 2018 +0000
@@ -0,0 +1,3244 @@
+#include <machine/asm.h>
+.text
+.type  _x86_AES_encrypt_compact,@function
+.align 16
+_x86_AES_encrypt_compact:
+       movl    %edi,20(%esp)
+       xorl    (%edi),%eax
+       xorl    4(%edi),%ebx
+       xorl    8(%edi),%ecx
+       xorl    12(%edi),%edx
+       movl    240(%edi),%esi
+       leal    -2(%esi,%esi,1),%esi
+       leal    (%edi,%esi,8),%esi
+       movl    %esi,24(%esp)
+       movl    -128(%ebp),%edi
+       movl    -96(%ebp),%esi
+       movl    -64(%ebp),%edi
+       movl    -32(%ebp),%esi
+       movl    (%ebp),%edi
+       movl    32(%ebp),%esi
+       movl    64(%ebp),%edi
+       movl    96(%ebp),%esi
+.align 16
+.L000loop:
+       movl    %eax,%esi
+       andl    $255,%esi
+       movzbl  -128(%ebp,%esi,1),%esi
+       movzbl  %bh,%edi
+       movzbl  -128(%ebp,%edi,1),%edi
+       shll    $8,%edi
+       xorl    %edi,%esi
+       movl    %ecx,%edi
+       shrl    $16,%edi
+       andl    $255,%edi
+       movzbl  -128(%ebp,%edi,1),%edi
+       shll    $16,%edi
+       xorl    %edi,%esi
+       movl    %edx,%edi
+       shrl    $24,%edi
+       movzbl  -128(%ebp,%edi,1),%edi
+       shll    $24,%edi
+       xorl    %edi,%esi
+       movl    %esi,4(%esp)
+
+       movl    %ebx,%esi
+       andl    $255,%esi
+       shrl    $16,%ebx
+       movzbl  -128(%ebp,%esi,1),%esi
+       movzbl  %ch,%edi
+       movzbl  -128(%ebp,%edi,1),%edi
+       shll    $8,%edi
+       xorl    %edi,%esi
+       movl    %edx,%edi
+       shrl    $16,%edi
+       andl    $255,%edi
+       movzbl  -128(%ebp,%edi,1),%edi
+       shll    $16,%edi
+       xorl    %edi,%esi
+       movl    %eax,%edi
+       shrl    $24,%edi
+       movzbl  -128(%ebp,%edi,1),%edi
+       shll    $24,%edi
+       xorl    %edi,%esi
+       movl    %esi,8(%esp)
+
+       movl    %ecx,%esi
+       andl    $255,%esi
+       shrl    $24,%ecx
+       movzbl  -128(%ebp,%esi,1),%esi
+       movzbl  %dh,%edi
+       movzbl  -128(%ebp,%edi,1),%edi
+       shll    $8,%edi
+       xorl    %edi,%esi
+       movl    %eax,%edi
+       shrl    $16,%edi
+       andl    $255,%edx
+       andl    $255,%edi
+       movzbl  -128(%ebp,%edi,1),%edi
+       shll    $16,%edi
+       xorl    %edi,%esi
+       movzbl  %bh,%edi
+       movzbl  -128(%ebp,%edi,1),%edi
+       shll    $24,%edi
+       xorl    %edi,%esi
+
+       andl    $255,%edx
+       movzbl  -128(%ebp,%edx,1),%edx
+       movzbl  %ah,%eax
+       movzbl  -128(%ebp,%eax,1),%eax
+       shll    $8,%eax
+       xorl    %eax,%edx
+       movl    4(%esp),%eax
+       andl    $255,%ebx
+       movzbl  -128(%ebp,%ebx,1),%ebx
+       shll    $16,%ebx
+       xorl    %ebx,%edx
+       movl    8(%esp),%ebx
+       movzbl  -128(%ebp,%ecx,1),%ecx
+       shll    $24,%ecx
+       xorl    %ecx,%edx
+       movl    %esi,%ecx
+
+       movl    $2155905152,%ebp
+       andl    %ecx,%ebp
+       leal    (%ecx,%ecx,1),%edi
+       movl    %ebp,%esi
+       shrl    $7,%ebp
+       andl    $4278124286,%edi
+       subl    %ebp,%esi
+       movl    %ecx,%ebp
+       andl    $454761243,%esi
+       rorl    $16,%ebp
+       xorl    %edi,%esi
+       movl    %ecx,%edi
+       xorl    %esi,%ecx
+       rorl    $24,%edi
+       xorl    %ebp,%esi
+       roll    $24,%ecx
+       xorl    %edi,%esi
+       movl    $2155905152,%ebp
+       xorl    %esi,%ecx
+       andl    %edx,%ebp
+       leal    (%edx,%edx,1),%edi
+       movl    %ebp,%esi
+       shrl    $7,%ebp
+       andl    $4278124286,%edi
+       subl    %ebp,%esi
+       movl    %edx,%ebp
+       andl    $454761243,%esi
+       rorl    $16,%ebp
+       xorl    %edi,%esi
+       movl    %edx,%edi
+       xorl    %esi,%edx
+       rorl    $24,%edi
+       xorl    %ebp,%esi
+       roll    $24,%edx
+       xorl    %edi,%esi
+       movl    $2155905152,%ebp
+       xorl    %esi,%edx
+       andl    %eax,%ebp
+       leal    (%eax,%eax,1),%edi
+       movl    %ebp,%esi
+       shrl    $7,%ebp
+       andl    $4278124286,%edi
+       subl    %ebp,%esi
+       movl    %eax,%ebp
+       andl    $454761243,%esi
+       rorl    $16,%ebp
+       xorl    %edi,%esi
+       movl    %eax,%edi
+       xorl    %esi,%eax
+       rorl    $24,%edi
+       xorl    %ebp,%esi
+       roll    $24,%eax
+       xorl    %edi,%esi
+       movl    $2155905152,%ebp
+       xorl    %esi,%eax
+       andl    %ebx,%ebp
+       leal    (%ebx,%ebx,1),%edi
+       movl    %ebp,%esi
+       shrl    $7,%ebp
+       andl    $4278124286,%edi
+       subl    %ebp,%esi
+       movl    %ebx,%ebp
+       andl    $454761243,%esi
+       rorl    $16,%ebp
+       xorl    %edi,%esi
+       movl    %ebx,%edi
+       xorl    %esi,%ebx
+       rorl    $24,%edi
+       xorl    %ebp,%esi
+       roll    $24,%ebx
+       xorl    %edi,%esi
+       xorl    %esi,%ebx
+       movl    20(%esp),%edi
+       movl    28(%esp),%ebp
+       addl    $16,%edi
+       xorl    (%edi),%eax
+       xorl    4(%edi),%ebx
+       xorl    8(%edi),%ecx
+       xorl    12(%edi),%edx
+       cmpl    24(%esp),%edi
+       movl    %edi,20(%esp)
+       jb      .L000loop
+       movl    %eax,%esi
+       andl    $255,%esi
+       movzbl  -128(%ebp,%esi,1),%esi
+       movzbl  %bh,%edi
+       movzbl  -128(%ebp,%edi,1),%edi
+       shll    $8,%edi
+       xorl    %edi,%esi
+       movl    %ecx,%edi
+       shrl    $16,%edi
+       andl    $255,%edi
+       movzbl  -128(%ebp,%edi,1),%edi
+       shll    $16,%edi
+       xorl    %edi,%esi
+       movl    %edx,%edi
+       shrl    $24,%edi
+       movzbl  -128(%ebp,%edi,1),%edi
+       shll    $24,%edi
+       xorl    %edi,%esi
+       movl    %esi,4(%esp)
+
+       movl    %ebx,%esi
+       andl    $255,%esi
+       shrl    $16,%ebx
+       movzbl  -128(%ebp,%esi,1),%esi
+       movzbl  %ch,%edi
+       movzbl  -128(%ebp,%edi,1),%edi
+       shll    $8,%edi
+       xorl    %edi,%esi
+       movl    %edx,%edi
+       shrl    $16,%edi
+       andl    $255,%edi
+       movzbl  -128(%ebp,%edi,1),%edi
+       shll    $16,%edi
+       xorl    %edi,%esi
+       movl    %eax,%edi
+       shrl    $24,%edi
+       movzbl  -128(%ebp,%edi,1),%edi
+       shll    $24,%edi
+       xorl    %edi,%esi
+       movl    %esi,8(%esp)
+
+       movl    %ecx,%esi
+       andl    $255,%esi
+       shrl    $24,%ecx
+       movzbl  -128(%ebp,%esi,1),%esi
+       movzbl  %dh,%edi
+       movzbl  -128(%ebp,%edi,1),%edi
+       shll    $8,%edi
+       xorl    %edi,%esi
+       movl    %eax,%edi
+       shrl    $16,%edi
+       andl    $255,%edx
+       andl    $255,%edi
+       movzbl  -128(%ebp,%edi,1),%edi
+       shll    $16,%edi
+       xorl    %edi,%esi
+       movzbl  %bh,%edi
+       movzbl  -128(%ebp,%edi,1),%edi
+       shll    $24,%edi
+       xorl    %edi,%esi
+
+       movl    20(%esp),%edi
+       andl    $255,%edx
+       movzbl  -128(%ebp,%edx,1),%edx
+       movzbl  %ah,%eax
+       movzbl  -128(%ebp,%eax,1),%eax
+       shll    $8,%eax
+       xorl    %eax,%edx
+       movl    4(%esp),%eax
+       andl    $255,%ebx
+       movzbl  -128(%ebp,%ebx,1),%ebx
+       shll    $16,%ebx
+       xorl    %ebx,%edx
+       movl    8(%esp),%ebx
+       movzbl  -128(%ebp,%ecx,1),%ecx
+       shll    $24,%ecx
+       xorl    %ecx,%edx
+       movl    %esi,%ecx
+
+       xorl    16(%edi),%eax
+       xorl    20(%edi),%ebx
+       xorl    24(%edi),%ecx
+       xorl    28(%edi),%edx
+       ret
+.size  _x86_AES_encrypt_compact,.-_x86_AES_encrypt_compact
+.type  _sse_AES_encrypt_compact,@function
+.align 16
+_sse_AES_encrypt_compact:
+       pxor    (%edi),%mm0
+       pxor    8(%edi),%mm4
+       movl    240(%edi),%esi
+       leal    -2(%esi,%esi,1),%esi
+       leal    (%edi,%esi,8),%esi
+       movl    %esi,24(%esp)



Home | Main Index | Thread Index | Old Index