Source-Changes-HG archive

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]

[src/trunk]: src/crypto/external/bsd/openssl/lib/libcrypto/arch re-gen of the...



details:   https://anonhg.NetBSD.org/src/rev/ec28fdad6a64
branches:  trunk
changeset: 765787:ec28fdad6a64
user:      spz <spz%NetBSD.org@localhost>
date:      Mon Jun 06 06:08:52 2011 +0000

description:
re-gen of the assembler files created these
(changed files were committed previously)

diffstat:

 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/aesni-x86.S        |   761 +++
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/cmll-x86.S         |  2375 ++++++++++
 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/uplink-x86.S       |   271 +
 crypto/external/bsd/openssl/lib/libcrypto/arch/sparc64/sha1-sparcv9a.S |  1634 ++++++
 crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/cmll-x86_64.S    |  1838 +++++++
 crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/uplink-x86_64.S  |   497 ++
 6 files changed, 7376 insertions(+), 0 deletions(-)

diffs (truncated from 7400 to 300 lines):

diff -r 1dcbc3726b99 -r ec28fdad6a64 crypto/external/bsd/openssl/lib/libcrypto/arch/i386/aesni-x86.S
--- /dev/null   Thu Jan 01 00:00:00 1970 +0000
+++ b/crypto/external/bsd/openssl/lib/libcrypto/arch/i386/aesni-x86.S   Mon Jun 06 06:08:52 2011 +0000
@@ -0,0 +1,761 @@
+.file  "/home/spz/cvs/src/crypto/external/bsd/openssl/dist/crypto/aes/asm/aesni-x86.s"
+.text
+.globl aesni_encrypt
+.type  aesni_encrypt,@function
+.align 16
+aesni_encrypt:
+.L_aesni_encrypt_begin:
+       movl    4(%esp),%eax
+       movl    12(%esp),%edx
+       movups  (%eax),%xmm0
+       movl    240(%edx),%ecx
+       movl    8(%esp),%eax
+       movups  (%edx),%xmm3
+       movups  16(%edx),%xmm4
+       leal    32(%edx),%edx
+       pxor    %xmm3,%xmm0
+.L000enc1_loop:
+       aesenc  %xmm4,%xmm0
+       decl    %ecx
+       movups  (%edx),%xmm4
+       leal    16(%edx),%edx
+       jnz     .L000enc1_loop
+       aesenclast      %xmm4,%xmm0
+       movups  %xmm0,(%eax)
+       ret
+.size  aesni_encrypt,.-.L_aesni_encrypt_begin
+.globl aesni_decrypt
+.type  aesni_decrypt,@function
+.align 16
+aesni_decrypt:
+.L_aesni_decrypt_begin:
+       movl    4(%esp),%eax
+       movl    12(%esp),%edx
+       movups  (%eax),%xmm0
+       movl    240(%edx),%ecx
+       movl    8(%esp),%eax
+       movups  (%edx),%xmm3
+       movups  16(%edx),%xmm4
+       leal    32(%edx),%edx
+       pxor    %xmm3,%xmm0
+.L001dec1_loop:
+       aesdec  %xmm4,%xmm0
+       decl    %ecx
+       movups  (%edx),%xmm4
+       leal    16(%edx),%edx
+       jnz     .L001dec1_loop
+       aesdeclast      %xmm4,%xmm0
+       movups  %xmm0,(%eax)
+       ret
+.size  aesni_decrypt,.-.L_aesni_decrypt_begin
+.type  _aesni_encrypt3,@function
+.align 16
+_aesni_encrypt3:
+       movups  (%edx),%xmm3
+       shrl    $1,%ecx
+       movups  16(%edx),%xmm4
+       leal    32(%edx),%edx
+       pxor    %xmm3,%xmm0
+       pxor    %xmm3,%xmm1
+       pxor    %xmm3,%xmm2
+       jmp     .L002enc3_loop
+.align 16
+.L002enc3_loop:
+       aesenc  %xmm4,%xmm0
+       movups  (%edx),%xmm3
+       aesenc  %xmm4,%xmm1
+       decl    %ecx
+       aesenc  %xmm4,%xmm2
+       movups  16(%edx),%xmm4
+       aesenc  %xmm3,%xmm0
+       leal    32(%edx),%edx
+       aesenc  %xmm3,%xmm1
+       aesenc  %xmm3,%xmm2
+       jnz     .L002enc3_loop
+       aesenc  %xmm4,%xmm0
+       movups  (%edx),%xmm3
+       aesenc  %xmm4,%xmm1
+       aesenc  %xmm4,%xmm2
+       aesenclast      %xmm3,%xmm0
+       aesenclast      %xmm3,%xmm1
+       aesenclast      %xmm3,%xmm2
+       ret
+.size  _aesni_encrypt3,.-_aesni_encrypt3
+.type  _aesni_decrypt3,@function
+.align 16
+_aesni_decrypt3:
+       movups  (%edx),%xmm3
+       shrl    $1,%ecx
+       movups  16(%edx),%xmm4
+       leal    32(%edx),%edx
+       pxor    %xmm3,%xmm0
+       pxor    %xmm3,%xmm1
+       pxor    %xmm3,%xmm2
+       jmp     .L003dec3_loop
+.align 16
+.L003dec3_loop:
+       aesdec  %xmm4,%xmm0
+       movups  (%edx),%xmm3
+       aesdec  %xmm4,%xmm1
+       decl    %ecx
+       aesdec  %xmm4,%xmm2
+       movups  16(%edx),%xmm4
+       aesdec  %xmm3,%xmm0
+       leal    32(%edx),%edx
+       aesdec  %xmm3,%xmm1
+       aesdec  %xmm3,%xmm2
+       jnz     .L003dec3_loop
+       aesdec  %xmm4,%xmm0
+       movups  (%edx),%xmm3
+       aesdec  %xmm4,%xmm1
+       aesdec  %xmm4,%xmm2
+       aesdeclast      %xmm3,%xmm0
+       aesdeclast      %xmm3,%xmm1
+       aesdeclast      %xmm3,%xmm2
+       ret
+.size  _aesni_decrypt3,.-_aesni_decrypt3
+.type  _aesni_encrypt4,@function
+.align 16
+_aesni_encrypt4:
+       movups  (%edx),%xmm3
+       movups  16(%edx),%xmm4
+       shrl    $1,%ecx
+       leal    32(%edx),%edx
+       pxor    %xmm3,%xmm0
+       pxor    %xmm3,%xmm1
+       pxor    %xmm3,%xmm2
+       pxor    %xmm3,%xmm7
+       jmp     .L004enc3_loop
+.align 16
+.L004enc3_loop:
+       aesenc  %xmm4,%xmm0
+       movups  (%edx),%xmm3
+       aesenc  %xmm4,%xmm1
+       decl    %ecx
+       aesenc  %xmm4,%xmm2
+       aesenc  %xmm4,%xmm7
+       movups  16(%edx),%xmm4
+       aesenc  %xmm3,%xmm0
+       leal    32(%edx),%edx
+       aesenc  %xmm3,%xmm1
+       aesenc  %xmm3,%xmm2
+       aesenc  %xmm3,%xmm7
+       jnz     .L004enc3_loop
+       aesenc  %xmm4,%xmm0
+       movups  (%edx),%xmm3
+       aesenc  %xmm4,%xmm1
+       aesenc  %xmm4,%xmm2
+       aesenc  %xmm4,%xmm7
+       aesenclast      %xmm3,%xmm0
+       aesenclast      %xmm3,%xmm1
+       aesenclast      %xmm3,%xmm2
+       aesenclast      %xmm3,%xmm7
+       ret
+.size  _aesni_encrypt4,.-_aesni_encrypt4
+.type  _aesni_decrypt4,@function
+.align 16
+_aesni_decrypt4:
+       movups  (%edx),%xmm3
+       movups  16(%edx),%xmm4
+       shrl    $1,%ecx
+       leal    32(%edx),%edx
+       pxor    %xmm3,%xmm0
+       pxor    %xmm3,%xmm1
+       pxor    %xmm3,%xmm2
+       pxor    %xmm3,%xmm7
+       jmp     .L005dec3_loop
+.align 16
+.L005dec3_loop:
+       aesdec  %xmm4,%xmm0
+       movups  (%edx),%xmm3
+       aesdec  %xmm4,%xmm1
+       decl    %ecx
+       aesdec  %xmm4,%xmm2
+       aesdec  %xmm4,%xmm7
+       movups  16(%edx),%xmm4
+       aesdec  %xmm3,%xmm0
+       leal    32(%edx),%edx
+       aesdec  %xmm3,%xmm1
+       aesdec  %xmm3,%xmm2
+       aesdec  %xmm3,%xmm7
+       jnz     .L005dec3_loop
+       aesdec  %xmm4,%xmm0
+       movups  (%edx),%xmm3
+       aesdec  %xmm4,%xmm1
+       aesdec  %xmm4,%xmm2
+       aesdec  %xmm4,%xmm7
+       aesdeclast      %xmm3,%xmm0
+       aesdeclast      %xmm3,%xmm1
+       aesdeclast      %xmm3,%xmm2
+       aesdeclast      %xmm3,%xmm7
+       ret
+.size  _aesni_decrypt4,.-_aesni_decrypt4
+.globl aesni_ecb_encrypt
+.type  aesni_ecb_encrypt,@function
+.align 16
+aesni_ecb_encrypt:
+.L_aesni_ecb_encrypt_begin:
+       pushl   %ebp
+       pushl   %ebx
+       pushl   %esi
+       pushl   %edi
+       movl    20(%esp),%esi
+       movl    24(%esp),%edi
+       movl    28(%esp),%eax
+       movl    32(%esp),%edx
+       movl    36(%esp),%ecx
+       cmpl    $16,%eax
+       jb      .L006ecb_ret
+       andl    $-16,%eax
+       testl   %ecx,%ecx
+       movl    240(%edx),%ecx
+       movl    %edx,%ebp
+       movl    %ecx,%ebx
+       jz      .L007ecb_decrypt
+       subl    $64,%eax
+       jbe     .L008ecb_enc_tail
+       jmp     .L009ecb_enc_loop3
+.align 16
+.L009ecb_enc_loop3:
+       movups  (%esi),%xmm0
+       movups  16(%esi),%xmm1
+       movups  32(%esi),%xmm2
+       call    _aesni_encrypt3
+       subl    $48,%eax
+       leal    48(%esi),%esi
+       leal    48(%edi),%edi
+       movups  %xmm0,-48(%edi)
+       movl    %ebp,%edx
+       movups  %xmm1,-32(%edi)
+       movl    %ebx,%ecx
+       movups  %xmm2,-16(%edi)
+       ja      .L009ecb_enc_loop3
+.L008ecb_enc_tail:
+       addl    $64,%eax
+       jz      .L006ecb_ret
+       cmpl    $16,%eax
+       movups  (%esi),%xmm0
+       je      .L010ecb_enc_one
+       cmpl    $32,%eax
+       movups  16(%esi),%xmm1
+       je      .L011ecb_enc_two
+       cmpl    $48,%eax
+       movups  32(%esi),%xmm2
+       je      .L012ecb_enc_three
+       movups  48(%esi),%xmm7
+       call    _aesni_encrypt4
+       movups  %xmm0,(%edi)
+       movups  %xmm1,16(%edi)
+       movups  %xmm2,32(%edi)
+       movups  %xmm7,48(%edi)
+       jmp     .L006ecb_ret
+.align 16
+.L010ecb_enc_one:
+       movups  (%edx),%xmm3
+       movups  16(%edx),%xmm4
+       leal    32(%edx),%edx
+       pxor    %xmm3,%xmm0
+.L013enc1_loop:
+       aesenc  %xmm4,%xmm0
+       decl    %ecx
+       movups  (%edx),%xmm4
+       leal    16(%edx),%edx
+       jnz     .L013enc1_loop
+       aesenclast      %xmm4,%xmm0
+       movups  %xmm0,(%edi)
+       jmp     .L006ecb_ret
+.align 16
+.L011ecb_enc_two:
+       call    _aesni_encrypt3
+       movups  %xmm0,(%edi)
+       movups  %xmm1,16(%edi)
+       jmp     .L006ecb_ret
+.align 16
+.L012ecb_enc_three:
+       call    _aesni_encrypt3
+       movups  %xmm0,(%edi)
+       movups  %xmm1,16(%edi)
+       movups  %xmm2,32(%edi)
+       jmp     .L006ecb_ret
+.align 16
+.L007ecb_decrypt:
+       subl    $64,%eax
+       jbe     .L014ecb_dec_tail
+       jmp     .L015ecb_dec_loop3
+.align 16
+.L015ecb_dec_loop3:
+       movups  (%esi),%xmm0
+       movups  16(%esi),%xmm1
+       movups  32(%esi),%xmm2
+       call    _aesni_decrypt3
+       subl    $48,%eax
+       leal    48(%esi),%esi
+       leal    48(%edi),%edi
+       movups  %xmm0,-48(%edi)
+       movl    %ebp,%edx
+       movups  %xmm1,-32(%edi)



Home | Main Index | Thread Index | Old Index