]> git.ipfire.org Git - thirdparty/kernel/stable.git/commitdiff
crypto: arm64/sha2-ce - clean up backwards function names
authorEric Biggers <ebiggers@google.com>
Tue, 10 Oct 2023 06:41:24 +0000 (23:41 -0700)
committerHerbert Xu <herbert@gondor.apana.org.au>
Fri, 20 Oct 2023 05:39:25 +0000 (13:39 +0800)
In the Linux kernel, a function whose name has two leading underscores
is conventionally called by the same-named function without leading
underscores -- not the other way around.  __sha2_ce_transform() and
__sha256_block_data_order() got this backwards.  Fix this, albeit
without changing "sha256_block_data_order" in the perlasm since that is
OpenSSL code.  No change in behavior.

Signed-off-by: Eric Biggers <ebiggers@google.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
arch/arm64/crypto/sha2-ce-core.S
arch/arm64/crypto/sha2-ce-glue.c

index 491179922f49808f1144a7a313b3eb647067d17e..fce84d88ddb2cce382ac6cc3d86dda6e6bb80e43 100644 (file)
        .word           0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2
 
        /*
-        * void sha2_ce_transform(struct sha256_ce_state *sst, u8 const *src,
-        *                        int blocks)
+        * int __sha256_ce_transform(struct sha256_ce_state *sst, u8 const *src,
+        *                           int blocks)
         */
        .text
-SYM_FUNC_START(sha2_ce_transform)
+SYM_FUNC_START(__sha256_ce_transform)
        /* load round constants */
        adr_l           x8, .Lsha2_rcon
        ld1             { v0.4s- v3.4s}, [x8], #64
@@ -154,4 +154,4 @@ CPU_LE(     rev32           v19.16b, v19.16b        )
 3:     st1             {dgav.4s, dgbv.4s}, [x0]
        mov             w0, w2
        ret
-SYM_FUNC_END(sha2_ce_transform)
+SYM_FUNC_END(__sha256_ce_transform)
index f2f118b0e1c1f4f83bcbe5fae5a51d25577e57ca..0a44d2e7ee1f7b1d5da894b6229a75c3a3c7bde4 100644 (file)
@@ -30,18 +30,19 @@ struct sha256_ce_state {
 extern const u32 sha256_ce_offsetof_count;
 extern const u32 sha256_ce_offsetof_finalize;
 
-asmlinkage int sha2_ce_transform(struct sha256_ce_state *sst, u8 const *src,
-                                int blocks);
+asmlinkage int __sha256_ce_transform(struct sha256_ce_state *sst, u8 const *src,
+                                    int blocks);
 
-static void __sha2_ce_transform(struct sha256_state *sst, u8 const *src,
+static void sha256_ce_transform(struct sha256_state *sst, u8 const *src,
                                int blocks)
 {
        while (blocks) {
                int rem;
 
                kernel_neon_begin();
-               rem = sha2_ce_transform(container_of(sst, struct sha256_ce_state,
-                                                    sst), src, blocks);
+               rem = __sha256_ce_transform(container_of(sst,
+                                                        struct sha256_ce_state,
+                                                        sst), src, blocks);
                kernel_neon_end();
                src += (blocks - rem) * SHA256_BLOCK_SIZE;
                blocks = rem;
@@ -55,8 +56,8 @@ const u32 sha256_ce_offsetof_finalize = offsetof(struct sha256_ce_state,
 
 asmlinkage void sha256_block_data_order(u32 *digest, u8 const *src, int blocks);
 
-static void __sha256_block_data_order(struct sha256_state *sst, u8 const *src,
-                                     int blocks)
+static void sha256_arm64_transform(struct sha256_state *sst, u8 const *src,
+                                  int blocks)
 {
        sha256_block_data_order(sst->state, src, blocks);
 }
@@ -68,10 +69,10 @@ static int sha256_ce_update(struct shash_desc *desc, const u8 *data,
 
        if (!crypto_simd_usable())
                return sha256_base_do_update(desc, data, len,
-                               __sha256_block_data_order);
+                                            sha256_arm64_transform);
 
        sctx->finalize = 0;
-       sha256_base_do_update(desc, data, len, __sha2_ce_transform);
+       sha256_base_do_update(desc, data, len, sha256_ce_transform);
 
        return 0;
 }
@@ -85,8 +86,8 @@ static int sha256_ce_finup(struct shash_desc *desc, const u8 *data,
        if (!crypto_simd_usable()) {
                if (len)
                        sha256_base_do_update(desc, data, len,
-                               __sha256_block_data_order);
-               sha256_base_do_finalize(desc, __sha256_block_data_order);
+                                             sha256_arm64_transform);
+               sha256_base_do_finalize(desc, sha256_arm64_transform);
                return sha256_base_finish(desc, out);
        }
 
@@ -96,9 +97,9 @@ static int sha256_ce_finup(struct shash_desc *desc, const u8 *data,
         */
        sctx->finalize = finalize;
 
-       sha256_base_do_update(desc, data, len, __sha2_ce_transform);
+       sha256_base_do_update(desc, data, len, sha256_ce_transform);
        if (!finalize)
-               sha256_base_do_finalize(desc, __sha2_ce_transform);
+               sha256_base_do_finalize(desc, sha256_ce_transform);
        return sha256_base_finish(desc, out);
 }
 
@@ -107,12 +108,12 @@ static int sha256_ce_final(struct shash_desc *desc, u8 *out)
        struct sha256_ce_state *sctx = shash_desc_ctx(desc);
 
        if (!crypto_simd_usable()) {
-               sha256_base_do_finalize(desc, __sha256_block_data_order);
+               sha256_base_do_finalize(desc, sha256_arm64_transform);
                return sha256_base_finish(desc, out);
        }
 
        sctx->finalize = 0;
-       sha256_base_do_finalize(desc, __sha2_ce_transform);
+       sha256_base_do_finalize(desc, sha256_ce_transform);
        return sha256_base_finish(desc, out);
 }