option(WITH_INFLATE_STRICT "Build with strict inflate distance checking" OFF)
option(WITH_INFLATE_ALLOW_INVALID_DIST "Build with zero fill for inflate invalid distances" OFF)
+option(WITH_C_FALLBACK "Build with C fallback functions.
+Causes abort() on init if optimized functions are missing!
+Currently known safe on X86-64 if WITH_OPTIM and WITH_SSE2 is on." ON)
+
set(ZLIB_SYMBOL_PREFIX "" CACHE STRING "Give this prefix to all publicly exported symbols.
Useful when embedding into a larger library.
Default is no prefix (empty prefix).")
mark_as_advanced(FORCE
ZLIB_SYMBOL_PREFIX
WITH_REDUCED_MEM
+ WITH_C_FALLBACK
WITH_CRC32_CHORBA
WITH_ARMV8 WITH_NEON
WITH_ARMV6
add_definitions(-DWITHOUT_CHORBA)
endif()
+if(${ARCH} MATCHES "x86_64")
+
+endif()
+
+if(NOT WITH_C_FALLBACK)
+ add_definitions(-DNO_C_FALLBACK=1)
+endif()
+
if(CMAKE_C_COMPILER_ID MATCHES "^Intel")
if(CMAKE_HOST_UNIX)
set(WARNFLAGS -Wall)
zutil.h
zutil_p.h
)
-set(ZLIB_SRCS
+set(ZLIB_C_FALLBACK_SRCS
arch/generic/adler32_c.c
arch/generic/adler32_fold_c.c
arch/generic/chunkset_c.c
arch/generic/compare256_c.c
- arch/generic/crc32_braid_c.c
arch/generic/crc32_c.c
+ arch/generic/crc32_braid_c.c
arch/generic/crc32_fold_c.c
arch/generic/slide_hash_c.c
+)
+
+
+set(ZLIB_SRCS
adler32.c
compress.c
crc32.c
if(WITH_CRC32_CHORBA)
list(APPEND ZLIB_SRCS arch/generic/crc32_chorba_c.c)
+ if(NOT WITH_C_FALLBACK)
+ list(APPEND ZLIB_SRCS arch/generic/crc32_braid_c.c)
+ endif()
endif()
if(WITH_RUNTIME_CPU_DETECTION)
list(APPEND ZLIB_ALL_SRCS ${ZLIB_GZFILE_PRIVATE_HDRS} ${ZLIB_GZFILE_SRCS})
endif()
+if(WITH_C_FALLBACK)
+ list(APPEND ZLIB_ALL_SRCS ${ZLIB_C_FALLBACK_SRCS})
+endif()
+
if(NOT DEFINED BUILD_SHARED_LIBS OR BUILD_SHARED_LIBS)
set(ZLIB_DLL_SRCS win32/zlib${SUFFIX}1.rc)
endif()
add_feature_info(WITH_FUZZERS WITH_FUZZERS "Build test/fuzz")
add_feature_info(WITH_BENCHMARKS WITH_BENCHMARKS "Build test/benchmarks")
add_feature_info(WITH_BENCHMARK_APPS WITH_BENCHMARK_APPS "Build application benchmarks")
+add_feature_info(WITH_C_FALLBACK WITH_C_FALLBACK "Build with C fallbacks")
add_feature_info(WITH_OPTIM WITH_OPTIM "Build with optimisation")
add_feature_info(WITH_NEW_STRATEGIES WITH_NEW_STRATEGIES "Use new strategies")
add_feature_info(WITH_CRC32_CHORBA WITH_CRC32_CHORBA "Use optimized CRC32 algorithm Chorba")
return c;
}
+#ifndef NO_C_FALLBACK
Z_INTERNAL uint32_t crc32_braid(uint32_t c, const uint8_t *buf, size_t len) {
c = (~c) & 0xffffffff;
/* Return the CRC, post-conditioned. */
return c ^ 0xffffffff;
}
+#endif
#include "functable.h"
#include "cpu_features.h"
#include "arch_functions.h"
+#include <stdio.h>
/* Platform has pointer size atomic store */
#if defined(__GNUC__) || defined(__clang__)
# define FUNCTABLE_BARRIER() do { /* Empty */ } while (0)
#endif
+// Verify all pointers are valid before assigning, abort on failure
+#define FUNCTABLE_VERIFY_ASSIGN(VAR, FUNC_NAME) \
+ if (!VAR.FUNC_NAME) { \
+ fprintf(stderr, "Functable entry not set!\n"); \
+ abort(); \
+ } \
+ FUNCTABLE_ASSIGN(VAR, FUNC_NAME);
+
+
static void force_init_empty(void) {
// empty
}
cpu_check_features(&cf);
- // Generic code
ft.force_init = &force_init_empty;
+
+#ifndef NO_C_FALLBACK
+ // Generic code
ft.adler32 = &adler32_c;
ft.adler32_fold_copy = &adler32_fold_copy_c;
ft.chunkmemset_safe = &chunkmemset_safe_c;
ft.longest_match = &longest_match_c;
ft.longest_match_slow = &longest_match_slow_c;
ft.compare256 = &compare256_c;
+#endif
// Select arch-optimized functions
#endif
// Assign function pointers individually for atomic operation
- FUNCTABLE_ASSIGN(ft, force_init);
- FUNCTABLE_ASSIGN(ft, adler32);
- FUNCTABLE_ASSIGN(ft, adler32_fold_copy);
- FUNCTABLE_ASSIGN(ft, chunkmemset_safe);
- FUNCTABLE_ASSIGN(ft, chunksize);
- FUNCTABLE_ASSIGN(ft, compare256);
- FUNCTABLE_ASSIGN(ft, crc32);
- FUNCTABLE_ASSIGN(ft, crc32_fold);
- FUNCTABLE_ASSIGN(ft, crc32_fold_copy);
- FUNCTABLE_ASSIGN(ft, crc32_fold_final);
- FUNCTABLE_ASSIGN(ft, crc32_fold_reset);
- FUNCTABLE_ASSIGN(ft, inflate_fast);
- FUNCTABLE_ASSIGN(ft, longest_match);
- FUNCTABLE_ASSIGN(ft, longest_match_slow);
- FUNCTABLE_ASSIGN(ft, slide_hash);
+ FUNCTABLE_VERIFY_ASSIGN(ft, force_init);
+ FUNCTABLE_VERIFY_ASSIGN(ft, adler32);
+ FUNCTABLE_VERIFY_ASSIGN(ft, adler32_fold_copy);
+ FUNCTABLE_VERIFY_ASSIGN(ft, chunkmemset_safe);
+ FUNCTABLE_VERIFY_ASSIGN(ft, chunksize);
+ FUNCTABLE_VERIFY_ASSIGN(ft, compare256);
+ FUNCTABLE_VERIFY_ASSIGN(ft, crc32);
+ FUNCTABLE_VERIFY_ASSIGN(ft, crc32_fold);
+ FUNCTABLE_VERIFY_ASSIGN(ft, crc32_fold_copy);
+ FUNCTABLE_VERIFY_ASSIGN(ft, crc32_fold_final);
+ FUNCTABLE_VERIFY_ASSIGN(ft, crc32_fold_reset);
+ FUNCTABLE_VERIFY_ASSIGN(ft, inflate_fast);
+ FUNCTABLE_VERIFY_ASSIGN(ft, longest_match);
+ FUNCTABLE_VERIFY_ASSIGN(ft, longest_match_slow);
+ FUNCTABLE_VERIFY_ASSIGN(ft, slide_hash);
// Memory barrier for weak memory order CPUs
FUNCTABLE_BARRIER();
} \
BENCHMARK_REGISTER_F(adler32_copy, name)->Range(8192, MAX_RANDOM_INTS_SIZE);
+#ifndef NO_C_FALLBACK
BENCHMARK_ADLER32_BASELINE_COPY(c, adler32_c, 1);
+#endif
#ifdef DISABLE_RUNTIME_CPU_DETECTION
BENCHMARK_ADLER32_BASELINE_COPY(native, native_adler32, 1);
} \
BENCHMARK_REGISTER_F(crc32, name)->Arg(1)->Arg(8)->Arg(12)->Arg(16)->Arg(32)->Arg(64)->Arg(512)->Arg(4<<10)->Arg(32<<10)->Arg(256<<10)->Arg(4096<<10);
-#ifndef WITHOUT_CHORBA
-BENCHMARK_CRC32(generic_chorba, crc32_c, 1);
-#else
-BENCHMARK_CRC32(generic, crc32_c, 1);
-#endif
+#ifndef NO_C_FALLBACK
+# ifndef WITHOUT_CHORBA
+ BENCHMARK_CRC32(generic_chorba, crc32_c, 1);
+# else
+ BENCHMARK_CRC32(generic, crc32_c, 1);
+# endif
-BENCHMARK_CRC32(braid, crc32_braid, 1);
+ BENCHMARK_CRC32(braid, crc32_braid, 1);
+#endif
#ifdef DISABLE_RUNTIME_CPU_DETECTION
BENCHMARK_CRC32(native, native_crc32, 1);
} \
BENCHMARK_REGISTER_F(slide_hash, name)->RangeMultiplier(2)->Range(512, MAX_RANDOM_INTS);
+#ifndef NO_C_FALLBACK
BENCHMARK_SLIDEHASH(c, slide_hash_c, 1);
+#endif
#ifdef DISABLE_RUNTIME_CPU_DETECTION
BENCHMARK_SLIDEHASH(native, native_slide_hash, 1);
hash(GetParam(), func); \
}
+#ifndef NO_C_FALLBACK
TEST_ADLER32(c, adler32_c, 1)
+#endif
#ifdef DISABLE_RUNTIME_CPU_DETECTION
TEST_ADLER32(native, native_adler32, 1)
hash(func); \
}
-#ifndef WITHOUT_CHORBA
-TEST_CRC32(generic_chorba, crc32_c, 1)
-#else
-TEST_CRC32(generic, crc32_c, 1)
-#endif
+#ifndef NO_C_FALLBACK
+# ifndef WITHOUT_CHORBA
+ TEST_CRC32(generic_chorba, crc32_c, 1)
+# else
+ TEST_CRC32(generic, crc32_c, 1)
+# endif
-TEST_CRC32(braid, crc32_braid, 1)
+ TEST_CRC32(braid, crc32_braid, 1)
+#endif
#ifdef DISABLE_RUNTIME_CPU_DETECTION
TEST_CRC32(native, native_crc32, 1)