]> git.ipfire.org Git - thirdparty/zlib-ng.git/commitdiff
Remove unused chunk memory functions from functable.
authorNathan Moinvaziri <nathan@nathanm.com>
Sun, 28 Aug 2022 02:34:30 +0000 (19:34 -0700)
committerHans Kristian Rosbach <hk-github@circlestorm.org>
Sun, 5 Feb 2023 16:51:46 +0000 (17:51 +0100)
arch/power/chunkset_power8.c
arch/x86/chunkset_sse41.c
cpu_features.h
functable.c
functable.h

index abc5f5e2148a43c1843a566dd4a587a71bb74e2f..389be0817168a90b73369a7d1e7152626fe7a259 100644 (file)
@@ -32,12 +32,6 @@ static inline void chunkmemset_8(uint8_t *from, chunk_t *chunk) {
     *chunk = (vector unsigned char)vec_splats(tmp);
 }
 
-#define CHUNKSIZE        chunksize_power8
-#define CHUNKCOPY        chunkcopy_power8
-#define CHUNKUNROLL      chunkunroll_power8
-#define CHUNKMEMSET      chunkmemset_power8
-#define CHUNKMEMSET_SAFE chunkmemset_safe_power8
-
 static inline void loadchunk(uint8_t const *s, chunk_t *chunk) {
     *chunk = vec_xl(0, s);
 }
@@ -46,6 +40,12 @@ static inline void storechunk(uint8_t *out, chunk_t *chunk) {
     vec_xst(*chunk, 0, out);
 }
 
+#define CHUNKSIZE        chunksize_power8
+#define CHUNKCOPY        chunkcopy_power8
+#define CHUNKUNROLL      chunkunroll_power8
+#define CHUNKMEMSET      chunkmemset_power8
+#define CHUNKMEMSET_SAFE chunkmemset_safe_power8
+
 #include "chunkset_tpl.h"
 
 #define INFLATE_FAST     inflate_fast_power8
index 9a9558856c7a8470b8ca1f172abcc767a4591e10..4b7396bcacec773ab9ca87fb78f200a391c77571 100644 (file)
@@ -86,12 +86,13 @@ static inline chunk_t GET_CHUNK_MAG(uint8_t *buf, uint32_t *chunk_rem, uint32_t
 }
 
 extern uint8_t* chunkcopy_sse2(uint8_t *out, uint8_t const *from, unsigned len);
+extern uint8_t* chunkunroll_sse2(uint8_t *out, unsigned *dist, unsigned *len);
 
-#define CHUNKSIZE            chunksize_sse41
-#define CHUNKMEMSET          chunkmemset_sse41
-#define CHUNKMEMSET_SAFE     chunkmemset_safe_sse41
-#define CHUNKCOPY(a, b, c)   chunkcopy_sse2(a, b, c)
-#define CHUNKUNROLL(a, b, c) chunkunroll_sse2(a, b, c)
+#define CHUNKSIZE        chunksize_sse41
+#define CHUNKMEMSET      chunkmemset_sse41
+#define CHUNKMEMSET_SAFE chunkmemset_safe_sse41
+#define CHUNKCOPY        chunkcopy_sse2
+#define CHUNKUNROLL      chunkunroll_sse2
 
 #include "chunkset_tpl.h"
 
index d211cb112e2cca31b06e76e14801f5a7bacb3971..696b152210e8b4c193209b9d3c2dae638f7647c1 100644 (file)
@@ -73,40 +73,24 @@ extern uint32_t crc32_pclmulqdq(uint32_t crc32, const uint8_t *buf, size_t len);
 
 /* memory chunking */
 extern uint32_t chunksize_c(void);
-extern uint8_t* chunkcopy_c(uint8_t *out, uint8_t const *from, unsigned len);
-extern uint8_t* chunkunroll_c(uint8_t *out, unsigned *dist, unsigned *len);
-extern uint8_t* chunkmemset_c(uint8_t *out, unsigned dist, unsigned len);
 extern uint8_t* chunkmemset_safe_c(uint8_t *out, unsigned dist, unsigned len, unsigned left);
 #ifdef X86_SSE2_CHUNKSET
 extern uint32_t chunksize_sse2(void);
-extern uint8_t* chunkcopy_sse2(uint8_t *out, uint8_t const *from, unsigned len);
-extern uint8_t* chunkunroll_sse2(uint8_t *out, unsigned *dist, unsigned *len);
-extern uint8_t* chunkmemset_sse2(uint8_t *out, unsigned dist, unsigned len);
 extern uint8_t* chunkmemset_safe_sse2(uint8_t *out, unsigned dist, unsigned len, unsigned left);
 #endif
 #ifdef X86_SSE41
-extern uint8_t* chunkmemset_sse41(uint8_t *out, unsigned dist, unsigned len);
 extern uint8_t* chunkmemset_safe_sse41(uint8_t *out, unsigned dist, unsigned len, unsigned left);
 #endif
 #ifdef X86_AVX_CHUNKSET
 extern uint32_t chunksize_avx(void);
-extern uint8_t* chunkcopy_avx(uint8_t *out, uint8_t const *from, unsigned len);
-extern uint8_t* chunkunroll_avx(uint8_t *out, unsigned *dist, unsigned *len);
-extern uint8_t* chunkmemset_avx(uint8_t *out, unsigned dist, unsigned len);
 extern uint8_t* chunkmemset_safe_avx(uint8_t *out, unsigned dist, unsigned len, unsigned left);
 #endif
 #ifdef ARM_NEON_CHUNKSET
 extern uint32_t chunksize_neon(void);
-extern uint8_t* chunkcopy_neon(uint8_t *out, uint8_t const *from, unsigned len);
-extern uint8_t* chunkunroll_neon(uint8_t *out, unsigned *dist, unsigned *len);
-extern uint8_t* chunkmemset_neon(uint8_t *out, unsigned dist, unsigned len);
 extern uint8_t* chunkmemset_safe_neon(uint8_t *out, unsigned dist, unsigned len, unsigned left);
 #endif
 #ifdef POWER8_VSX_CHUNKSET
 extern uint32_t chunksize_power8(void);
-extern uint8_t* chunkcopy_power8(uint8_t *out, uint8_t const *from, unsigned len);
-extern uint8_t* chunkunroll_power8(uint8_t *out, unsigned *dist, unsigned *len);
-extern uint8_t* chunkmemset_power8(uint8_t *out, unsigned dist, unsigned len);
 extern uint8_t* chunkmemset_safe_power8(uint8_t *out, unsigned dist, unsigned len, unsigned left);
 #endif
 
index 1bdba15390dea9b2a3c30f7b97db41de8cf3bdf3..7f3bc6e034c0e247072a304f730b890d5078b33a 100644 (file)
@@ -231,96 +231,54 @@ static void init_functable(void) {
         ft.chunksize = &chunksize_power8;
 #endif
 
-    // chunkcopy_stub:
-    ft.chunkcopy = &chunkcopy_c;
-#ifdef X86_SSE2_CHUNKSET
-# if !defined(__x86_64__) && !defined(_M_X64) && !defined(X86_NOCHECK_SSE2)
-    if (x86_cpu_has_sse2)
-# endif
-        ft.chunkcopy = &chunkcopy_sse2;
-#endif
-#ifdef X86_AVX_CHUNKSET
-    if (x86_cpu_has_avx2)
-        ft.chunkcopy = &chunkcopy_avx;
-#endif
-#ifdef ARM_NEON_CHUNKSET
-    if (arm_cpu_has_neon)
-        ft.chunkcopy = &chunkcopy_neon;
-#endif
-#ifdef POWER8_VSX_CHUNKSET
-    if (power_cpu_has_arch_2_07)
-        ft.chunkcopy = &chunkcopy_power8;
-#endif
-
-    // chunkunroll_stub:
-    ft.chunkunroll = &chunkunroll_c;
-#ifdef X86_SSE2_CHUNKSET
-# if !defined(__x86_64__) && !defined(_M_X64) && !defined(X86_NOCHECK_SSE2)
-    if (x86_cpu_has_sse2)
-# endif
-        ft.chunkunroll = &chunkunroll_sse2;
-#endif
-#ifdef X86_AVX_CHUNKSET
-    if (x86_cpu_has_avx2)
-        ft.chunkunroll = &chunkunroll_avx;
-#endif
-#ifdef ARM_NEON_CHUNKSET
-    if (arm_cpu_has_neon)
-        ft.chunkunroll = &chunkunroll_neon;
-#endif
-#ifdef POWER8_VSX_CHUNKSET
-    if (power_cpu_has_arch_2_07)
-        ft.chunkunroll = &chunkunroll_power8;
-#endif
-
-    // chunkmemset_stub:
-    ft.chunkmemset = &chunkmemset_c;
+    // chunkmemset_safe_stub:
+    ft.chunkmemset_safe = &chunkmemset_safe_c;
 #ifdef X86_SSE2_CHUNKSET
 # if !defined(__x86_64__) && !defined(_M_X64) && !defined(X86_NOCHECK_SSE2)
     if (x86_cpu_has_sse2)
 # endif
-        ft.chunkmemset = &chunkmemset_sse2;
+        ft.chunkmemset_safe = &chunkmemset_safe_sse2;
 #endif
 #if defined(X86_SSE41) && defined(X86_SSE2)
     if (x86_cpu_has_sse41)
-        ft.chunkmemset = &chunkmemset_sse41;
+        ft.chunkmemset_safe = &chunkmemset_safe_sse41;
 #endif
 #ifdef X86_AVX_CHUNKSET
     if (x86_cpu_has_avx2)
-        ft.chunkmemset = &chunkmemset_avx;
+        ft.chunkmemset_safe = &chunkmemset_safe_avx;
 #endif
 #ifdef ARM_NEON_CHUNKSET
     if (arm_cpu_has_neon)
-        ft.chunkmemset = &chunkmemset_neon;
+        ft.chunkmemset_safe = &chunkmemset_safe_neon;
 #endif
 #ifdef POWER8_VSX_CHUNKSET
     if (power_cpu_has_arch_2_07)
-        ft.chunkmemset = &chunkmemset_power8;
+        ft.chunkmemset_safe = &chunkmemset_safe_power8;
 #endif
 
-    // chunkmemset_safe_stub:
-    ft.chunkmemset_safe = &chunkmemset_safe_c;
+    // inflate_fast_stub:
+    ft.inflate_fast = &inflate_fast_c;
 #ifdef X86_SSE2_CHUNKSET
 # if !defined(__x86_64__) && !defined(_M_X64) && !defined(X86_NOCHECK_SSE2)
     if (x86_cpu_has_sse2)
 # endif
-        ft.chunkmemset_safe = &chunkmemset_safe_sse2;
+        ft.inflate_fast = &inflate_fast_sse2;
 #endif
 #if defined(X86_SSE41) && defined(X86_SSE2)
     if (x86_cpu_has_sse41)
-        ft.chunkmemset_safe = &chunkmemset_safe_sse41;
+        ft.inflate_fast = &inflate_fast_sse41;
 #endif
 #ifdef X86_AVX_CHUNKSET
     if (x86_cpu_has_avx2)
-        ft.chunkmemset_safe = &chunkmemset_safe_avx;
+        ft.inflate_fast = &inflate_fast_avx;
 #endif
 #ifdef ARM_NEON_CHUNKSET
     if (arm_cpu_has_neon)
-        ft.chunkmemset_safe = &chunkmemset_safe_neon;
+        ft.inflate_fast = &inflate_fast_neon;
 #endif
 #ifdef POWER8_VSX_CHUNKSET
     if (power_cpu_has_arch_2_07)
-        ft.chunkmemset_safe = &chunkmemset_safe_power8;
+        ft.inflate_fast = &inflate_fast_power8;
 #endif
 
     // crc32_stub:
@@ -374,10 +332,8 @@ static void init_functable(void) {
     functable.crc32_fold_final = ft.crc32_fold_final;
     functable.compare256 = ft.compare256;
     functable.chunksize = ft.chunksize;
-    functable.chunkcopy = ft.chunkcopy;
-    functable.chunkunroll = ft.chunkunroll;
-    functable.chunkmemset = ft.chunkmemset;
     functable.chunkmemset_safe = ft.chunkmemset_safe;
+    functable.inflate_fast = ft.inflate_fast;
     functable.insert_string = ft.insert_string;
     functable.longest_match = ft.longest_match;
     functable.longest_match_slow = ft.longest_match_slow;
@@ -452,24 +408,14 @@ static uint32_t chunksize_stub(void) {
     return functable.chunksize();
 }
 
-static uint8_t* chunkcopy_stub(uint8_t* out, uint8_t const* from, unsigned len) {
-    init_functable();
-    return functable.chunkcopy(out, from, len);
-}
-
-static uint8_t* chunkunroll_stub(uint8_t* out, unsigned* dist, unsigned* len) {
-    init_functable();
-    return functable.chunkunroll(out, dist, len);
-}
-
-static uint8_t* chunkmemset_stub(uint8_t* out, unsigned dist, unsigned len) {
+static uint8_t* chunkmemset_safe_stub(uint8_t* out, unsigned dist, unsigned len, unsigned left) {
     init_functable();
-    return functable.chunkmemset(out, dist, len);
+    return functable.chunkmemset_safe(out, dist, len, left);
 }
 
-static uint8_t* chunkmemset_safe_stub(uint8_t* out, unsigned dist, unsigned len, unsigned left) {
+static void inflate_fast_stub(void *strm, uint32_t start) {
     init_functable();
-    return functable.chunkmemset_safe(out, dist, len, left);
+    functable.inflate_fast(strm, start);
 }
 
 static uint32_t crc32_stub(uint32_t crc, const uint8_t* buf, size_t len) {
@@ -482,35 +428,6 @@ static uint32_t compare256_stub(const uint8_t* src0, const uint8_t* src1) {
     return functable.compare256(src0, src1);
 }
 
-Z_INTERNAL void inflate_fast_stub(void *strm, uint32_t start) {
-    functable.inflate_fast = &inflate_fast_c;
-
-#ifdef X86_SSE2_CHUNKSET
-# if !defined(__x86_64__) && !defined(_M_X64) && !defined(X86_NOCHECK_SSE2)
-    if (x86_cpu_has_sse2)
-# endif
-        functable.inflate_fast = &inflate_fast_sse2;
-#endif
-#if defined(X86_SSE41) && defined(X86_SSE2)
-    if (x86_cpu_has_sse41)
-        functable.inflate_fast = &inflate_fast_sse41;
-#endif
-#ifdef X86_AVX_CHUNKSET
-    if (x86_cpu_has_avx2)
-        functable.inflate_fast = &inflate_fast_avx;
-#endif
-#ifdef ARM_NEON_CHUNKSET
-    if (arm_cpu_has_neon)
-        functable.inflate_fast = &inflate_fast_neon;
-#endif
-#ifdef POWER8_VSX_CHUNKSET
-    if (power_cpu_has_arch_2_07)
-        functable.inflate_fast = &inflate_fast_power8;
-#endif
-
-    functable.inflate_fast(strm, start);
-}
-
 /* functable init */
 Z_INTERNAL Z_TLS struct functable_s functable = {
     adler32_stub,
@@ -522,9 +439,6 @@ Z_INTERNAL Z_TLS struct functable_s functable = {
     crc32_fold_final_stub,
     compare256_stub,
     chunksize_stub,
-    chunkcopy_stub,
-    chunkunroll_stub,
-    chunkmemset_stub,
     chunkmemset_safe_stub,
     inflate_fast_stub,
     insert_string_stub,
index 4319b4c11b74fe9ff749cc0b4b755680c1c25cc0..51acf457890a1e2366d8a507b4bf8f0838c3e785 100644 (file)
@@ -20,9 +20,6 @@ struct functable_s {
     uint32_t (* crc32_fold_final)   (struct crc32_fold_s *crc);
     uint32_t (* compare256)         (const uint8_t *src0, const uint8_t *src1);
     uint32_t (* chunksize)          (void);
-    uint8_t* (* chunkcopy)          (uint8_t *out, uint8_t const *from, unsigned len);
-    uint8_t* (* chunkunroll)        (uint8_t *out, unsigned *dist, unsigned *len);
-    uint8_t* (* chunkmemset)        (uint8_t *out, unsigned dist, unsigned len);
     uint8_t* (* chunkmemset_safe)   (uint8_t *out, unsigned dist, unsigned len, unsigned left);
     void     (* inflate_fast)       (void *strm, uint32_t start);
     void     (* insert_string)      (deflate_state *const s, uint32_t str, uint32_t count);