@@ -28,13 +28,13 @@ void inflate_fast_c(PREFIX3(stream) *strm, uint32_t start);
2828uint32_t PREFIX (crc32_braid )(uint32_t crc , const uint8_t * buf , size_t len );
2929
3030uint32_t compare256_c (const uint8_t * src0 , const uint8_t * src1 );
31- #if defined( UNALIGNED_OK ) && BYTE_ORDER == LITTLE_ENDIAN
32- uint32_t compare256_unaligned_16 (const uint8_t * src0 , const uint8_t * src1 );
31+ #if BYTE_ORDER == LITTLE_ENDIAN && OPTIMAL_CMP >= 32
32+ uint32_t compare256_unaligned_16 (const uint8_t * src0 , const uint8_t * src1 );
3333# ifdef HAVE_BUILTIN_CTZ
34- uint32_t compare256_unaligned_32 (const uint8_t * src0 , const uint8_t * src1 );
34+ uint32_t compare256_unaligned_32 (const uint8_t * src0 , const uint8_t * src1 );
3535# endif
36- # if defined(UNALIGNED64_OK ) && defined( HAVE_BUILTIN_CTZLL )
37- uint32_t compare256_unaligned_64 (const uint8_t * src0 , const uint8_t * src1 );
36+ # if defined(HAVE_BUILTIN_CTZLL ) && OPTIMAL_CMP >= 64
37+ uint32_t compare256_unaligned_64 (const uint8_t * src0 , const uint8_t * src1 );
3838# endif
3939#endif
4040
@@ -43,29 +43,24 @@ typedef void (*slide_hash_func)(deflate_state *s);
4343void slide_hash_c (deflate_state * s );
4444
4545uint32_t longest_match_c (deflate_state * const s , Pos cur_match );
46- # if defined(UNALIGNED_OK ) && BYTE_ORDER == LITTLE_ENDIAN
46+ uint32_t longest_match_slow_c (deflate_state * const s , Pos cur_match );
47+ #if BYTE_ORDER == LITTLE_ENDIAN && OPTIMAL_CMP >= 32
4748 uint32_t longest_match_unaligned_16 (deflate_state * const s , Pos cur_match );
48- # ifdef HAVE_BUILTIN_CTZ
49+ uint32_t longest_match_slow_unaligned_16 (deflate_state * const s , Pos cur_match );
50+ # ifdef HAVE_BUILTIN_CTZ
4951 uint32_t longest_match_unaligned_32 (deflate_state * const s , Pos cur_match );
50- # endif
51- # if defined(UNALIGNED64_OK ) && defined(HAVE_BUILTIN_CTZLL )
52- uint32_t longest_match_unaligned_64 (deflate_state * const s , Pos cur_match );
53- # endif
52+ uint32_t longest_match_slow_unaligned_32 (deflate_state * const s , Pos cur_match );
5453# endif
55-
56- uint32_t longest_match_slow_c (deflate_state * const s , Pos cur_match );
57- # if defined(UNALIGNED_OK ) && BYTE_ORDER == LITTLE_ENDIAN
58- uint32_t longest_match_slow_unaligned_16 (deflate_state * const s , Pos cur_match );
59- uint32_t longest_match_slow_unaligned_32 (deflate_state * const s , Pos cur_match );
60- # ifdef UNALIGNED64_OK
54+ # if defined(HAVE_BUILTIN_CTZLL ) && OPTIMAL_CMP >= 64
55+ uint32_t longest_match_unaligned_64 (deflate_state * const s , Pos cur_match );
6156 uint32_t longest_match_slow_unaligned_64 (deflate_state * const s , Pos cur_match );
62- # endif
6357# endif
58+ #endif
6459
6560
6661// Select generic implementation for longest_match, longest_match_slow, longest_match_slow functions.
67- #if defined( UNALIGNED_OK ) && BYTE_ORDER == LITTLE_ENDIAN
68- # if defined(UNALIGNED64_OK ) && defined( HAVE_BUILTIN_CTZLL )
62+ #if BYTE_ORDER == LITTLE_ENDIAN && OPTIMAL_CMP >= 32
63+ # if defined(HAVE_BUILTIN_CTZLL ) && OPTIMAL_CMP >= 64
6964# define longest_match_generic longest_match_unaligned_64
7065# define longest_match_slow_generic longest_match_slow_unaligned_64
7166# define compare256_generic compare256_unaligned_64
0 commit comments