local block_state deflate_fast (deflate_state *s, int flush);
block_state deflate_quick (deflate_state *s, int flush);
local block_state deflate_medium (deflate_state *s, int flush);
-#ifndef FASTEST
local block_state deflate_slow (deflate_state *s, int flush);
-#endif
local block_state deflate_rle (deflate_state *s, int flush);
local block_state deflate_huff (deflate_state *s, int flush);
local void lm_init (deflate_state *s);
compress_func func;
} config;
-#ifdef FASTEST
-local const config configuration_table[2] = {
-/* good lazy nice chain */
-/* 0 */ {0, 0, 0, 0, deflate_stored}, /* store only */
-/* 1 */ {4, 4, 8, 4, deflate_fast}}; /* max speed, no lazy matches */
-#else
local const config configuration_table[10] = {
/* good lazy nice chain */
/* 0 */ {0, 0, 0, 0, deflate_stored}, /* store only */
/* 7 */ {8, 32, 128, 256, deflate_slow},
/* 8 */ {32, 128, 258, 1024, deflate_slow},
/* 9 */ {32, 258, 258, 4096, deflate_slow}}; /* max compression */
-#endif
/* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4
* For deflate_fast() (levels <= 3) good is ignored and lazy has a different
* Insert string str in the dictionary and set match_head to the previous head
* of the hash chain (the most recent string with same hash key). Return
* the previous length of the hash chain.
- * If this file is compiled with -DFASTEST, the compression level is forced
- * to 1, and no hash chains are maintained.
* IN assertion: all calls to to INSERT_STRING are made with consecutive
* input characters and the first MIN_MATCH bytes of str are valid
* (except for the last MIN_MATCH-1 bytes of the input file).
Pos ret;
UPDATE_HASH(s, s->ins_h, str);
-#ifdef FASTEST
- ret = s->head[s->ins_h];
-#else
ret = s->prev[str & s->w_mask] = s->head[s->ins_h];
-#endif
s->head[s->ins_h] = str;
return ret;
strm->zfree = zcfree;
#endif
-#ifdef FASTEST
- if (level != 0) level = 1;
-#else
if (level == Z_DEFAULT_COMPRESSION) level = 6;
-#endif
if (windowBits < 0) { /* suppress zlib wrapper */
wrap = 0;
n = s->lookahead - (MIN_MATCH-1);
do {
UPDATE_HASH(s, s->ins_h, str);
-#ifndef FASTEST
s->prev[str & s->w_mask] = s->head[s->ins_h];
-#endif
s->head[s->ins_h] = (Pos)str;
str++;
} while (--n);
if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR;
s = strm->state;
-#ifdef FASTEST
- if (level != 0) level = 1;
-#else
if (level == Z_DEFAULT_COMPRESSION) level = 6;
-#endif
if (level < 0 || level > 9 || strategy < 0 || strategy > Z_FIXED) {
return Z_STREAM_ERROR;
}
s->match_length = s->prev_length = MIN_MATCH-1;
s->match_available = 0;
s->ins_h = 0;
-#ifndef FASTEST
#ifdef ASMV
match_init(); /* initialize the asm code */
#endif
-#endif
}
#include "match.c"
#endif /* NOT_TWEAK_COMPILER */
n = wsize;
-#ifndef FASTEST
p = &s->prev[n];
#ifdef NOT_TWEAK_COMPILER
do {
}
}
#endif /* NOT_TWEAK_COMPILER */
-#endif
more += wsize;
}
if (s->strm->avail_in == 0) break;
#endif
while (s->insert) {
UPDATE_HASH(s, s->ins_h, str);
-#ifndef FASTEST
s->prev[str & s->w_mask] = s->head[s->ins_h];
-#endif
s->head[s->ins_h] = (Pos)str;
str++;
s->insert--;
/* Insert new strings in the hash table only if the match length
* is not too large. This saves time but degrades compression.
*/
-#ifndef FASTEST
if (s->match_length <= s->max_insert_length &&
s->lookahead >= MIN_MATCH) {
s->match_length--; /* string at strstart already in table */
} while (--s->match_length != 0);
s->strstart++;
} else
-#endif
{
s->strstart += s->match_length;
s->match_length = 0;
#include "deflate_medium.c"
#endif
-#ifndef FASTEST
/* ===========================================================================
* Same as above, but achieves better compression. We use a lazy
* evaluation for matches: a match is finally adopted only if there is
FLUSH_BLOCK(s, 0);
return block_done;
}
-#endif /* FASTEST */
/* ===========================================================================
* For Z_RLE, simply look for runs of bytes, generate matches only of distance
#include "deflate.h"
-#ifdef FASTEST
-#define longest_match fastest_longest_match
-#elif (defined(UNALIGNED_OK) && MAX_MATCH == 258)
-#define longest_match std2_longest_match
+#if (defined(UNALIGNED_OK) && MAX_MATCH == 258)
+# define longest_match std2_longest_match
#else
-#define longest_match std1_longest_match
+# define longest_match std1_longest_match
#endif
/*
return best_len;
return s->lookahead;
}
-
-/*
- * FASTEST-only longest_match
- *
- */
-local unsigned fastest_longest_match(deflate_state *z_const s, IPos cur_match)
-{
- unsigned char *scan, *match, *strend;
- int len;
-
- /*
- * The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple
- * of 16. It is easy to get rid of this optimization if necessary
- */
- Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
-
- Assert((unsigned long)s->strstart <= s->window_size - MIN_LOOKAHEAD,
- "need lookahead");
-
- Assert(cur_match < s->strstart, "no future");
-
- match = s->window + cur_match;
- scan = s->window + s->strstart;
- strend = s->window + s->strstart + MAX_MATCH;
-
- if (*match++ != *scan++ || *match++ != *scan++)
- return MIN_MATCH-1;
-
- /*
- * The check at best_len-1 can be removed because it will be made
- * again later. (This heuristic is not always a win.) It is not
- * necessary to compare scan[2] and match[2] since they are always
- * equal when the other bytes match, given that the hash keys are equal
- * and that HASH_BITS >= 8.
- */
- Assert(*scan == *match, "match[2]?");
-
- do {
- } while (*++scan == *++match && *++scan == *++match &&
- *++scan == *++match && *++scan == *++match &&
- *++scan == *++match && *++scan == *++match &&
- *++scan == *++match && *++scan == *++match &&
- scan < strend);
-
- Assert(scan <= s->window+(unsigned int)(s->window_size-1), "wild scan");
-
- len = MAX_MATCH - (long)(strend - scan);
- if (len < MIN_MATCH)
- return MIN_MATCH-1;
-
- s->match_start = cur_match;
- return len <= s->lookahead ? len : s->lookahead;
-}