]> git.ipfire.org Git - thirdparty/nettle.git/commitdiff
x86_64: Update table-based ghash to new organization.
authorNiels Möller <nisse@lysator.liu.se>
Sat, 19 Feb 2022 19:00:26 +0000 (20:00 +0100)
committerNiels Möller <nisse@lysator.liu.se>
Sat, 19 Feb 2022 19:00:26 +0000 (20:00 +0100)
ChangeLog
configure.ac
x86_64/ghash-update.asm [moved from x86_64/gcm-hash8.asm with 86% similarity]

index dc288dd10262fb5e300e5429577e7044f53bd01e..31ee8117d477207a03a02c61e6e3635c8b4127dc 100644 (file)
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,11 @@
 2022-02-19  Niels Möller  <nisse@lysator.liu.se>
 
+       * configure.ac (asm_replace_list): Add ghash-set-key.asm ghash-update.asm.
+       (asm_nettle_optional_list): Delete gcm-hash.asm gcm-hash8.asm.
+       * x86_64/ghash-update.asm: New file, based on old gcm-hash8.asm,
+       but without any handling of partial blocks.
+       * x86_64/gcm-hash8.asm: Deleted.
+
        * ghash-set-key.c (_ghash_digest): Moved function from...
        * ghash-update.c (_ghash_digest): ...old location.
 
index dff7d869e3bed0c29080f7f1b3d46363c2c1fdd9..529e6cb5c15b964b1ea871d4c612a84b19740a23 100644 (file)
@@ -586,6 +586,7 @@ asm_replace_list="aes-encrypt-internal.asm aes-decrypt-internal.asm \
                cbc-aes256-encrypt.asm \
                arcfour-crypt.asm camellia-crypt-internal.asm \
                md5-compress.asm memxor.asm memxor3.asm \
+               ghash-set-key.asm ghash-update.asm \
                poly1305-internal.asm \
                chacha-core-internal.asm \
                salsa20-crypt.asm salsa20-core-internal.asm \
@@ -594,7 +595,7 @@ asm_replace_list="aes-encrypt-internal.asm aes-decrypt-internal.asm \
                sha3-permute.asm umac-nh.asm umac-nh-n.asm machine.m4"
 
 # Assembler files which generate additional object files if they are used.
-asm_nettle_optional_list="gcm-hash.asm gcm-hash8.asm cpuid.asm cpu-facility.asm \
+asm_nettle_optional_list="cpuid.asm cpu-facility.asm \
   aes-encrypt-internal-2.asm aes-decrypt-internal-2.asm memxor-2.asm memxor3-2.asm \
   aes128-set-encrypt-key-2.asm aes128-set-decrypt-key-2.asm \
   aes128-encrypt-2.asm aes128-decrypt-2.asm \
similarity index 86%
rename from x86_64/gcm-hash8.asm
rename to x86_64/ghash-update.asm
index 6c79803cb119d6e2e2060300a3c485e2a049a467..f8055bbfda24f8980bd82daf010c36f9457b3616 100644 (file)
@@ -1,7 +1,7 @@
-C x86_64/gcm-hash8.asm
+C x86_64/ghash-update.asm
 
 ifelse(`
-   Copyright (C) 2013 Niels Möller
+   Copyright (C) 2013, 2022 Niels Möller
 
    This file is part of GNU Nettle.
 
@@ -34,7 +34,7 @@ C Register usage:
 
 define(`KEY', `%rdi')
 define(`XP', `%rsi')
-define(`LENGTH', `%rdx')
+define(`BLOCKS', `%rdx')
 define(`SRC', `%rcx')
 define(`X0', `%rax')
 define(`X1', `%rbx')
@@ -46,24 +46,25 @@ define(`Z0', `%r11')
 define(`Z1', `%r12')
 define(`SHIFT_TABLE', `%r13')
 
-       .file "gcm-hash8.asm"
+       .file "ghash-update.asm"
 
-       C void gcm_hash (const struct gcm_key *key, union gcm_block *x,
-       C                size_t length, const uint8_t *data)
+       C const uint8_t *_ghash_update (const struct gcm_key *key,
+       C                               union gcm_block *x,
+       C                               size_t blocks, const uint8_t *data)
 
        .text
        ALIGN(16)
-PROLOGUE(_nettle_gcm_hash8)
+PROLOGUE(_nettle_ghash_update)
        W64_ENTRY(4, 0)
        push    %rbx
        push    %rbp
        push    %r12
        push    %r13
-       sub     $16, LENGTH
+       sub     $1, BLOCKS
        lea     .Lshift_table(%rip), SHIFT_TABLE
        mov     (XP), X0
        mov     8(XP), X1
-       jc      .Lfinal
+       jc      .Ldone
 ALIGN(16)
 .Lblock_loop:
 
@@ -148,57 +149,20 @@ ALIGN(16)
        xor     Z1, X1
 
        add     $16, SRC
-       sub     $16, LENGTH
+       sub     $1, BLOCKS
        jnc     .Lblock_loop
 
-.Lfinal:
-       add     $16, LENGTH
-       jnz     .Lpartial
-
+.Ldone:
        mov     X0, (XP)
        mov     X1, 8(XP)
-
+       mov     SRC, %rax
        pop     %r13
        pop     %r12
        pop     %rbp
        pop     %rbx
        W64_EXIT(4, 0)
        ret
-
-.Lpartial:
-       C Read and xor partial block, then jump back into the loop
-       C with LENGTH == 0.
-
-       cmp     $8, LENGTH
-       jc      .Llt8
-
-       C       8 <= LENGTH < 16
-       xor     (SRC), X0
-       add     $8, SRC
-       sub     $8, LENGTH
-       jz      .Lblock_mul
-       call    .Lread_bytes
-       xor     T0, X1
-       jmp     .Lblock_mul
-
-.Llt8: C 0 < LENGTH < 8
-       call    .Lread_bytes
-       xor     T0, X0
-       jmp     .Lblock_mul
-
-C Read 0 < LENGTH < 8 bytes at SRC, result in T0
-.Lread_bytes:
-       xor     T0, T0
-       sub     $1, SRC
-ALIGN(16)
-.Lread_loop:
-       shl     $8, T0
-       orb     (SRC, LENGTH), LREG(T0)
-.Lread_next:
-       sub     $1, LENGTH
-       jnz     .Lread_loop
-       ret
-EPILOGUE(_nettle_gcm_hash8)
+EPILOGUE(_nettle_ghash_update)
 
 define(`W', `0x$2$1')
        RODATA