]> git.ipfire.org Git - thirdparty/kernel/linux.git/commitdiff
lib/crc32: expose whether the lib is really optimized at runtime
authorEric Biggers <ebiggers@google.com>
Mon, 2 Dec 2024 01:08:28 +0000 (17:08 -0800)
committerEric Biggers <ebiggers@google.com>
Mon, 2 Dec 2024 01:23:01 +0000 (17:23 -0800)
Make the CRC32 library export a function crc32_optimizations() which
returns flags that indicate which CRC32 functions are actually executing
optimized code at runtime.

This will be used to determine whether the crc32[c]-$arch shash
algorithms should be registered in the crypto API.  btrfs could also
start using these flags instead of the hack that it currently uses where
it parses the crypto_shash_driver_name.

Reviewed-by: Ard Biesheuvel <ardb@kernel.org>
Link: https://lore.kernel.org/r/20241202010844.144356-4-ebiggers@kernel.org
Signed-off-by: Eric Biggers <ebiggers@google.com>
arch/arm64/lib/crc32-glue.c
arch/riscv/lib/crc32-riscv.c
include/linux/crc32.h

index d7f6e1cbf0d233c589a46d92d3cf44f95ae33282..15c4c9db573ec1545350ee8f908c956fefaa669f 100644 (file)
@@ -85,5 +85,15 @@ u32 __pure crc32_be_arch(u32 crc, const u8 *p, size_t len)
 }
 EXPORT_SYMBOL(crc32_be_arch);
 
+u32 crc32_optimizations(void)
+{
+       if (alternative_has_cap_likely(ARM64_HAS_CRC32))
+               return CRC32_LE_OPTIMIZATION |
+                      CRC32_BE_OPTIMIZATION |
+                      CRC32C_OPTIMIZATION;
+       return 0;
+}
+EXPORT_SYMBOL(crc32_optimizations);
+
 MODULE_LICENSE("GPL");
 MODULE_DESCRIPTION("arm64-optimized CRC32 functions");
index a3ff7db2a1ce228c10deaafb73853d539ba38cee..53d56ab422c722254860850f5cd2226668f50f2e 100644 (file)
@@ -297,5 +297,15 @@ legacy:
 }
 EXPORT_SYMBOL(crc32_be_arch);
 
+u32 crc32_optimizations(void)
+{
+       if (riscv_has_extension_likely(RISCV_ISA_EXT_ZBC))
+               return CRC32_LE_OPTIMIZATION |
+                      CRC32_BE_OPTIMIZATION |
+                      CRC32C_OPTIMIZATION;
+       return 0;
+}
+EXPORT_SYMBOL(crc32_optimizations);
+
 MODULE_LICENSE("GPL");
 MODULE_DESCRIPTION("Accelerated CRC32 implementation with Zbc extension");
index 58c632533b086170105513ca48982b83f9ebecc2..e9bd40056687a4efa9eabfc6e2e8526607ed2613 100644 (file)
@@ -37,6 +37,21 @@ static inline u32 __pure __crc32c_le(u32 crc, const u8 *p, size_t len)
        return crc32c_le_base(crc, p, len);
 }
 
+/*
+ * crc32_optimizations() returns flags that indicate which CRC32 library
+ * functions are using architecture-specific optimizations.  Unlike
+ * IS_ENABLED(CONFIG_CRC32_ARCH) it takes into account the different CRC32
+ * variants and also whether any needed CPU features are available at runtime.
+ */
+#define CRC32_LE_OPTIMIZATION  BIT(0) /* crc32_le() is optimized */
+#define CRC32_BE_OPTIMIZATION  BIT(1) /* crc32_be() is optimized */
+#define CRC32C_OPTIMIZATION    BIT(2) /* __crc32c_le() is optimized */
+#if IS_ENABLED(CONFIG_CRC32_ARCH)
+u32 crc32_optimizations(void);
+#else
+static inline u32 crc32_optimizations(void) { return 0; }
+#endif
+
 /**
  * crc32_le_combine - Combine two crc32 check values into one. For two
  *                   sequences of bytes, seq1 and seq2 with lengths len1