mirror of https://github.com/madler/zlib
parent
3e4aa45834
commit
84c6716a48
|
@ -19,7 +19,7 @@
|
|||
memory, Z_BUF_ERROR if there was not enough room in the output buffer,
|
||||
Z_STREAM_ERROR if the level parameter is invalid.
|
||||
*/
|
||||
int ZEXPORT compress2 (dest, destLen, source, sourceLen, level)
|
||||
int ZEXPORT compress2(dest, destLen, source, sourceLen, level)
|
||||
Bytef *dest;
|
||||
uLongf *destLen;
|
||||
const Bytef *source;
|
||||
|
@ -65,7 +65,7 @@ int ZEXPORT compress2 (dest, destLen, source, sourceLen, level)
|
|||
|
||||
/* ===========================================================================
|
||||
*/
|
||||
int ZEXPORT compress (dest, destLen, source, sourceLen)
|
||||
int ZEXPORT compress(dest, destLen, source, sourceLen)
|
||||
Bytef *dest;
|
||||
uLongf *destLen;
|
||||
const Bytef *source;
|
||||
|
@ -78,7 +78,7 @@ int ZEXPORT compress (dest, destLen, source, sourceLen)
|
|||
If the default memLevel or windowBits for deflateInit() is changed, then
|
||||
this function needs to be updated.
|
||||
*/
|
||||
uLong ZEXPORT compressBound (sourceLen)
|
||||
uLong ZEXPORT compressBound(sourceLen)
|
||||
uLong sourceLen;
|
||||
{
|
||||
return sourceLen + (sourceLen >> 12) + (sourceLen >> 14) +
|
||||
|
|
137
deflate.c
137
deflate.c
|
@ -160,7 +160,7 @@ local const config configuration_table[10] = {
|
|||
* characters, so that a running hash key can be computed from the previous
|
||||
* key instead of complete recalculation each time.
|
||||
*/
|
||||
#define UPDATE_HASH(s,h,c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask)
|
||||
#define UPDATE_HASH(s,h,c) (h = (((h) << s->hash_shift) ^ (c)) & s->hash_mask)
|
||||
|
||||
|
||||
/* ===========================================================================
|
||||
|
@ -191,9 +191,9 @@ local const config configuration_table[10] = {
|
|||
*/
|
||||
#define CLEAR_HASH(s) \
|
||||
do { \
|
||||
s->head[s->hash_size-1] = NIL; \
|
||||
s->head[s->hash_size - 1] = NIL; \
|
||||
zmemzero((Bytef *)s->head, \
|
||||
(unsigned)(s->hash_size-1)*sizeof(*s->head)); \
|
||||
(unsigned)(s->hash_size - 1)*sizeof(*s->head)); \
|
||||
} while (0)
|
||||
|
||||
/* ===========================================================================
|
||||
|
@ -314,7 +314,7 @@ int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy,
|
|||
s->hash_bits = (uInt)memLevel + 7;
|
||||
s->hash_size = 1 << s->hash_bits;
|
||||
s->hash_mask = s->hash_size - 1;
|
||||
s->hash_shift = ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH);
|
||||
s->hash_shift = ((s->hash_bits + MIN_MATCH-1) / MIN_MATCH);
|
||||
|
||||
s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof(Byte));
|
||||
s->prev = (Posf *) ZALLOC(strm, s->w_size, sizeof(Pos));
|
||||
|
@ -340,11 +340,11 @@ int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy,
|
|||
* sym_buf value to read moves forward three bytes. From that symbol, up to
|
||||
* 31 bits are written to pending_buf. The closest the written pending_buf
|
||||
* bits gets to the next sym_buf symbol to read is just before the last
|
||||
* code is written. At that time, 31*(n-2) bits have been written, just
|
||||
* after 24*(n-2) bits have been consumed from sym_buf. sym_buf starts at
|
||||
* 8*n bits into pending_buf. (Note that the symbol buffer fills when n-1
|
||||
* code is written. At that time, 31*(n - 2) bits have been written, just
|
||||
* after 24*(n - 2) bits have been consumed from sym_buf. sym_buf starts at
|
||||
* 8*n bits into pending_buf. (Note that the symbol buffer fills when n - 1
|
||||
* symbols are written.) The closest the writing gets to what is unread is
|
||||
* then n+14 bits. Here n is lit_bufsize, which is 16384 by default, and
|
||||
* then n + 14 bits. Here n is lit_bufsize, which is 16384 by default, and
|
||||
* can range from 128 to 32768.
|
||||
*
|
||||
* Therefore, at a minimum, there are 142 bits of space between what is
|
||||
|
@ -390,7 +390,7 @@ int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy,
|
|||
/* =========================================================================
|
||||
* Check for a valid deflate stream state. Return 0 if ok, 1 if not.
|
||||
*/
|
||||
local int deflateStateCheck (strm)
|
||||
local int deflateStateCheck(strm)
|
||||
z_streamp strm;
|
||||
{
|
||||
deflate_state *s;
|
||||
|
@ -413,7 +413,7 @@ local int deflateStateCheck (strm)
|
|||
}
|
||||
|
||||
/* ========================================================================= */
|
||||
int ZEXPORT deflateSetDictionary (strm, dictionary, dictLength)
|
||||
int ZEXPORT deflateSetDictionary(strm, dictionary, dictLength)
|
||||
z_streamp strm;
|
||||
const Bytef *dictionary;
|
||||
uInt dictLength;
|
||||
|
@ -482,7 +482,7 @@ int ZEXPORT deflateSetDictionary (strm, dictionary, dictLength)
|
|||
}
|
||||
|
||||
/* ========================================================================= */
|
||||
int ZEXPORT deflateGetDictionary (strm, dictionary, dictLength)
|
||||
int ZEXPORT deflateGetDictionary(strm, dictionary, dictLength)
|
||||
z_streamp strm;
|
||||
Bytef *dictionary;
|
||||
uInt *dictLength;
|
||||
|
@ -504,7 +504,7 @@ int ZEXPORT deflateGetDictionary (strm, dictionary, dictLength)
|
|||
}
|
||||
|
||||
/* ========================================================================= */
|
||||
int ZEXPORT deflateResetKeep (strm)
|
||||
int ZEXPORT deflateResetKeep(strm)
|
||||
z_streamp strm;
|
||||
{
|
||||
deflate_state *s;
|
||||
|
@ -542,7 +542,7 @@ int ZEXPORT deflateResetKeep (strm)
|
|||
}
|
||||
|
||||
/* ========================================================================= */
|
||||
int ZEXPORT deflateReset (strm)
|
||||
int ZEXPORT deflateReset(strm)
|
||||
z_streamp strm;
|
||||
{
|
||||
int ret;
|
||||
|
@ -554,7 +554,7 @@ int ZEXPORT deflateReset (strm)
|
|||
}
|
||||
|
||||
/* ========================================================================= */
|
||||
int ZEXPORT deflateSetHeader (strm, head)
|
||||
int ZEXPORT deflateSetHeader(strm, head)
|
||||
z_streamp strm;
|
||||
gz_headerp head;
|
||||
{
|
||||
|
@ -565,7 +565,7 @@ int ZEXPORT deflateSetHeader (strm, head)
|
|||
}
|
||||
|
||||
/* ========================================================================= */
|
||||
int ZEXPORT deflatePending (strm, pending, bits)
|
||||
int ZEXPORT deflatePending(strm, pending, bits)
|
||||
unsigned *pending;
|
||||
int *bits;
|
||||
z_streamp strm;
|
||||
|
@ -579,7 +579,7 @@ int ZEXPORT deflatePending (strm, pending, bits)
|
|||
}
|
||||
|
||||
/* ========================================================================= */
|
||||
int ZEXPORT deflatePrime (strm, bits, value)
|
||||
int ZEXPORT deflatePrime(strm, bits, value)
|
||||
z_streamp strm;
|
||||
int bits;
|
||||
int value;
|
||||
|
@ -769,7 +769,7 @@ uLong ZEXPORT deflateBound(strm, sourceLen)
|
|||
* IN assertion: the stream state is correct and there is enough room in
|
||||
* pending_buf.
|
||||
*/
|
||||
local void putShortMSB (s, b)
|
||||
local void putShortMSB(s, b)
|
||||
deflate_state *s;
|
||||
uInt b;
|
||||
{
|
||||
|
@ -816,7 +816,7 @@ local void flush_pending(strm)
|
|||
} while (0)
|
||||
|
||||
/* ========================================================================= */
|
||||
int ZEXPORT deflate (strm, flush)
|
||||
int ZEXPORT deflate(strm, flush)
|
||||
z_streamp strm;
|
||||
int flush;
|
||||
{
|
||||
|
@ -871,7 +871,7 @@ int ZEXPORT deflate (strm, flush)
|
|||
s->status = BUSY_STATE;
|
||||
if (s->status == INIT_STATE) {
|
||||
/* zlib header */
|
||||
uInt header = (Z_DEFLATED + ((s->w_bits-8)<<4)) << 8;
|
||||
uInt header = (Z_DEFLATED + ((s->w_bits - 8) << 4)) << 8;
|
||||
uInt level_flags;
|
||||
|
||||
if (s->strategy >= Z_HUFFMAN_ONLY || s->level < 2)
|
||||
|
@ -1131,7 +1131,7 @@ int ZEXPORT deflate (strm, flush)
|
|||
}
|
||||
|
||||
/* ========================================================================= */
|
||||
int ZEXPORT deflateEnd (strm)
|
||||
int ZEXPORT deflateEnd(strm)
|
||||
z_streamp strm;
|
||||
{
|
||||
int status;
|
||||
|
@ -1157,7 +1157,7 @@ int ZEXPORT deflateEnd (strm)
|
|||
* To simplify the source, this is not supported for 16-bit MSDOS (which
|
||||
* doesn't have enough memory anyway to duplicate compression states).
|
||||
*/
|
||||
int ZEXPORT deflateCopy (dest, source)
|
||||
int ZEXPORT deflateCopy(dest, source)
|
||||
z_streamp dest;
|
||||
z_streamp source;
|
||||
{
|
||||
|
@ -1246,7 +1246,7 @@ local unsigned read_buf(strm, buf, size)
|
|||
/* ===========================================================================
|
||||
* Initialize the "longest match" routines for a new zlib stream
|
||||
*/
|
||||
local void lm_init (s)
|
||||
local void lm_init(s)
|
||||
deflate_state *s;
|
||||
{
|
||||
s->window_size = (ulg)2L*s->w_size;
|
||||
|
@ -1312,10 +1312,10 @@ local uInt longest_match(s, cur_match)
|
|||
*/
|
||||
register Bytef *strend = s->window + s->strstart + MAX_MATCH - 1;
|
||||
register ush scan_start = *(ushf*)scan;
|
||||
register ush scan_end = *(ushf*)(scan+best_len-1);
|
||||
register ush scan_end = *(ushf*)(scan + best_len - 1);
|
||||
#else
|
||||
register Bytef *strend = s->window + s->strstart + MAX_MATCH;
|
||||
register Byte scan_end1 = scan[best_len-1];
|
||||
register Byte scan_end1 = scan[best_len - 1];
|
||||
register Byte scan_end = scan[best_len];
|
||||
#endif
|
||||
|
||||
|
@ -1333,7 +1333,8 @@ local uInt longest_match(s, cur_match)
|
|||
*/
|
||||
if ((uInt)nice_match > s->lookahead) nice_match = (int)s->lookahead;
|
||||
|
||||
Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
|
||||
Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD,
|
||||
"need lookahead");
|
||||
|
||||
do {
|
||||
Assert(cur_match < s->strstart, "no future");
|
||||
|
@ -1351,43 +1352,44 @@ local uInt longest_match(s, cur_match)
|
|||
/* This code assumes sizeof(unsigned short) == 2. Do not use
|
||||
* UNALIGNED_OK if your compiler uses a different size.
|
||||
*/
|
||||
if (*(ushf*)(match+best_len-1) != scan_end ||
|
||||
if (*(ushf*)(match + best_len - 1) != scan_end ||
|
||||
*(ushf*)match != scan_start) continue;
|
||||
|
||||
/* It is not necessary to compare scan[2] and match[2] since they are
|
||||
* always equal when the other bytes match, given that the hash keys
|
||||
* are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at
|
||||
* strstart+3, +5, ... up to strstart+257. We check for insufficient
|
||||
* strstart + 3, + 5, up to strstart + 257. We check for insufficient
|
||||
* lookahead only every 4th comparison; the 128th check will be made
|
||||
* at strstart+257. If MAX_MATCH-2 is not a multiple of 8, it is
|
||||
* at strstart + 257. If MAX_MATCH-2 is not a multiple of 8, it is
|
||||
* necessary to put more guard bytes at the end of the window, or
|
||||
* to check more often for insufficient lookahead.
|
||||
*/
|
||||
Assert(scan[2] == match[2], "scan[2]?");
|
||||
scan++, match++;
|
||||
do {
|
||||
} while (*(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
|
||||
*(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
|
||||
*(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
|
||||
*(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
|
||||
} while (*(ushf*)(scan += 2) == *(ushf*)(match += 2) &&
|
||||
*(ushf*)(scan += 2) == *(ushf*)(match += 2) &&
|
||||
*(ushf*)(scan += 2) == *(ushf*)(match += 2) &&
|
||||
*(ushf*)(scan += 2) == *(ushf*)(match += 2) &&
|
||||
scan < strend);
|
||||
/* The funny "do {}" generates better code on most compilers */
|
||||
|
||||
/* Here, scan <= window+strstart+257 */
|
||||
Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
|
||||
/* Here, scan <= window + strstart + 257 */
|
||||
Assert(scan <= s->window + (unsigned)(s->window_size - 1),
|
||||
"wild scan");
|
||||
if (*scan == *match) scan++;
|
||||
|
||||
len = (MAX_MATCH - 1) - (int)(strend-scan);
|
||||
len = (MAX_MATCH - 1) - (int)(strend - scan);
|
||||
scan = strend - (MAX_MATCH-1);
|
||||
|
||||
#else /* UNALIGNED_OK */
|
||||
|
||||
if (match[best_len] != scan_end ||
|
||||
match[best_len-1] != scan_end1 ||
|
||||
*match != *scan ||
|
||||
*++match != scan[1]) continue;
|
||||
if (match[best_len] != scan_end ||
|
||||
match[best_len - 1] != scan_end1 ||
|
||||
*match != *scan ||
|
||||
*++match != scan[1]) continue;
|
||||
|
||||
/* The check at best_len-1 can be removed because it will be made
|
||||
/* The check at best_len - 1 can be removed because it will be made
|
||||
* again later. (This heuristic is not always a win.)
|
||||
* It is not necessary to compare scan[2] and match[2] since they
|
||||
* are always equal when the other bytes match, given that
|
||||
|
@ -1397,7 +1399,7 @@ local uInt longest_match(s, cur_match)
|
|||
Assert(*scan == *match, "match[2]?");
|
||||
|
||||
/* We check for insufficient lookahead only every 8th comparison;
|
||||
* the 256th check will be made at strstart+258.
|
||||
* the 256th check will be made at strstart + 258.
|
||||
*/
|
||||
do {
|
||||
} while (*++scan == *++match && *++scan == *++match &&
|
||||
|
@ -1406,7 +1408,8 @@ local uInt longest_match(s, cur_match)
|
|||
*++scan == *++match && *++scan == *++match &&
|
||||
scan < strend);
|
||||
|
||||
Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
|
||||
Assert(scan <= s->window + (unsigned)(s->window_size - 1),
|
||||
"wild scan");
|
||||
|
||||
len = MAX_MATCH - (int)(strend - scan);
|
||||
scan = strend - MAX_MATCH;
|
||||
|
@ -1418,9 +1421,9 @@ local uInt longest_match(s, cur_match)
|
|||
best_len = len;
|
||||
if (len >= nice_match) break;
|
||||
#ifdef UNALIGNED_OK
|
||||
scan_end = *(ushf*)(scan+best_len-1);
|
||||
scan_end = *(ushf*)(scan + best_len - 1);
|
||||
#else
|
||||
scan_end1 = scan[best_len-1];
|
||||
scan_end1 = scan[best_len - 1];
|
||||
scan_end = scan[best_len];
|
||||
#endif
|
||||
}
|
||||
|
@ -1451,7 +1454,8 @@ local uInt longest_match(s, cur_match)
|
|||
*/
|
||||
Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
|
||||
|
||||
Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
|
||||
Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD,
|
||||
"need lookahead");
|
||||
|
||||
Assert(cur_match < s->strstart, "no future");
|
||||
|
||||
|
@ -1461,7 +1465,7 @@ local uInt longest_match(s, cur_match)
|
|||
*/
|
||||
if (match[0] != scan[0] || match[1] != scan[1]) return MIN_MATCH-1;
|
||||
|
||||
/* The check at best_len-1 can be removed because it will be made
|
||||
/* The check at best_len - 1 can be removed because it will be made
|
||||
* again later. (This heuristic is not always a win.)
|
||||
* It is not necessary to compare scan[2] and match[2] since they
|
||||
* are always equal when the other bytes match, given that
|
||||
|
@ -1471,7 +1475,7 @@ local uInt longest_match(s, cur_match)
|
|||
Assert(*scan == *match, "match[2]?");
|
||||
|
||||
/* We check for insufficient lookahead only every 8th comparison;
|
||||
* the 256th check will be made at strstart+258.
|
||||
* the 256th check will be made at strstart + 258.
|
||||
*/
|
||||
do {
|
||||
} while (*++scan == *++match && *++scan == *++match &&
|
||||
|
@ -1480,7 +1484,7 @@ local uInt longest_match(s, cur_match)
|
|||
*++scan == *++match && *++scan == *++match &&
|
||||
scan < strend);
|
||||
|
||||
Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
|
||||
Assert(scan <= s->window + (unsigned)(s->window_size - 1), "wild scan");
|
||||
|
||||
len = MAX_MATCH - (int)(strend - scan);
|
||||
|
||||
|
@ -1516,7 +1520,7 @@ local void check_match(s, start, match, length)
|
|||
z_error("invalid match");
|
||||
}
|
||||
if (z_verbose > 1) {
|
||||
fprintf(stderr,"\\[%d,%d]", start-match, length);
|
||||
fprintf(stderr,"\\[%d,%d]", start - match, length);
|
||||
do { putc(s->window[start++], stderr); } while (--length != 0);
|
||||
}
|
||||
}
|
||||
|
@ -1562,9 +1566,9 @@ local void fill_window(s)
|
|||
/* If the window is almost full and there is insufficient lookahead,
|
||||
* move the upper half to the lower one to make room in the upper half.
|
||||
*/
|
||||
if (s->strstart >= wsize+MAX_DIST(s)) {
|
||||
if (s->strstart >= wsize + MAX_DIST(s)) {
|
||||
|
||||
zmemcpy(s->window, s->window+wsize, (unsigned)wsize - more);
|
||||
zmemcpy(s->window, s->window + wsize, (unsigned)wsize - more);
|
||||
s->match_start -= wsize;
|
||||
s->strstart -= wsize; /* we now have strstart >= MAX_DIST */
|
||||
s->block_start -= (long) wsize;
|
||||
|
@ -1905,7 +1909,7 @@ local block_state deflate_fast(s, flush)
|
|||
if (s->lookahead == 0) break; /* flush the current block */
|
||||
}
|
||||
|
||||
/* Insert the string window[strstart .. strstart+2] in the
|
||||
/* Insert the string window[strstart .. strstart + 2] in the
|
||||
* dictionary, and set hash_head to the head of the hash chain:
|
||||
*/
|
||||
hash_head = NIL;
|
||||
|
@ -1953,7 +1957,7 @@ local block_state deflate_fast(s, flush)
|
|||
s->strstart += s->match_length;
|
||||
s->match_length = 0;
|
||||
s->ins_h = s->window[s->strstart];
|
||||
UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
|
||||
UPDATE_HASH(s, s->ins_h, s->window[s->strstart + 1]);
|
||||
#if MIN_MATCH != 3
|
||||
Call UPDATE_HASH() MIN_MATCH-3 more times
|
||||
#endif
|
||||
|
@ -1964,7 +1968,7 @@ local block_state deflate_fast(s, flush)
|
|||
} else {
|
||||
/* No match, output a literal byte */
|
||||
Tracevv((stderr,"%c", s->window[s->strstart]));
|
||||
_tr_tally_lit (s, s->window[s->strstart], bflush);
|
||||
_tr_tally_lit(s, s->window[s->strstart], bflush);
|
||||
s->lookahead--;
|
||||
s->strstart++;
|
||||
}
|
||||
|
@ -2008,7 +2012,7 @@ local block_state deflate_slow(s, flush)
|
|||
if (s->lookahead == 0) break; /* flush the current block */
|
||||
}
|
||||
|
||||
/* Insert the string window[strstart .. strstart+2] in the
|
||||
/* Insert the string window[strstart .. strstart + 2] in the
|
||||
* dictionary, and set hash_head to the head of the hash chain:
|
||||
*/
|
||||
hash_head = NIL;
|
||||
|
@ -2050,17 +2054,17 @@ local block_state deflate_slow(s, flush)
|
|||
uInt max_insert = s->strstart + s->lookahead - MIN_MATCH;
|
||||
/* Do not insert strings in hash table beyond this. */
|
||||
|
||||
check_match(s, s->strstart-1, s->prev_match, s->prev_length);
|
||||
check_match(s, s->strstart - 1, s->prev_match, s->prev_length);
|
||||
|
||||
_tr_tally_dist(s, s->strstart -1 - s->prev_match,
|
||||
_tr_tally_dist(s, s->strstart - 1 - s->prev_match,
|
||||
s->prev_length - MIN_MATCH, bflush);
|
||||
|
||||
/* Insert in hash table all strings up to the end of the match.
|
||||
* strstart-1 and strstart are already inserted. If there is not
|
||||
* strstart - 1 and strstart are already inserted. If there is not
|
||||
* enough lookahead, the last two strings are not inserted in
|
||||
* the hash table.
|
||||
*/
|
||||
s->lookahead -= s->prev_length-1;
|
||||
s->lookahead -= s->prev_length - 1;
|
||||
s->prev_length -= 2;
|
||||
do {
|
||||
if (++s->strstart <= max_insert) {
|
||||
|
@ -2078,8 +2082,8 @@ local block_state deflate_slow(s, flush)
|
|||
* single literal. If there was a match but the current match
|
||||
* is longer, truncate the previous match to a single literal.
|
||||
*/
|
||||
Tracevv((stderr,"%c", s->window[s->strstart-1]));
|
||||
_tr_tally_lit(s, s->window[s->strstart-1], bflush);
|
||||
Tracevv((stderr,"%c", s->window[s->strstart - 1]));
|
||||
_tr_tally_lit(s, s->window[s->strstart - 1], bflush);
|
||||
if (bflush) {
|
||||
FLUSH_BLOCK_ONLY(s, 0);
|
||||
}
|
||||
|
@ -2097,8 +2101,8 @@ local block_state deflate_slow(s, flush)
|
|||
}
|
||||
Assert (flush != Z_NO_FLUSH, "no flush?");
|
||||
if (s->match_available) {
|
||||
Tracevv((stderr,"%c", s->window[s->strstart-1]));
|
||||
_tr_tally_lit(s, s->window[s->strstart-1], bflush);
|
||||
Tracevv((stderr,"%c", s->window[s->strstart - 1]));
|
||||
_tr_tally_lit(s, s->window[s->strstart - 1], bflush);
|
||||
s->match_available = 0;
|
||||
}
|
||||
s->insert = s->strstart < MIN_MATCH-1 ? s->strstart : MIN_MATCH-1;
|
||||
|
@ -2155,7 +2159,8 @@ local block_state deflate_rle(s, flush)
|
|||
if (s->match_length > s->lookahead)
|
||||
s->match_length = s->lookahead;
|
||||
}
|
||||
Assert(scan <= s->window+(uInt)(s->window_size-1), "wild scan");
|
||||
Assert(scan <= s->window + (uInt)(s->window_size - 1),
|
||||
"wild scan");
|
||||
}
|
||||
|
||||
/* Emit match if have run of MIN_MATCH or longer, else emit literal */
|
||||
|
@ -2170,7 +2175,7 @@ local block_state deflate_rle(s, flush)
|
|||
} else {
|
||||
/* No match, output a literal byte */
|
||||
Tracevv((stderr,"%c", s->window[s->strstart]));
|
||||
_tr_tally_lit (s, s->window[s->strstart], bflush);
|
||||
_tr_tally_lit(s, s->window[s->strstart], bflush);
|
||||
s->lookahead--;
|
||||
s->strstart++;
|
||||
}
|
||||
|
@ -2210,7 +2215,7 @@ local block_state deflate_huff(s, flush)
|
|||
/* Output a literal byte */
|
||||
s->match_length = 0;
|
||||
Tracevv((stderr,"%c", s->window[s->strstart]));
|
||||
_tr_tally_lit (s, s->window[s->strstart], bflush);
|
||||
_tr_tally_lit(s, s->window[s->strstart], bflush);
|
||||
s->lookahead--;
|
||||
s->strstart++;
|
||||
if (bflush) FLUSH_BLOCK(s, 0);
|
||||
|
|
2
gzlib.c
2
gzlib.c
|
@ -30,7 +30,7 @@ local gzFile gz_open OF((const void *, int, const char *));
|
|||
|
||||
The gz_strwinerror function does not change the current setting of
|
||||
GetLastError. */
|
||||
char ZLIB_INTERNAL *gz_strwinerror (error)
|
||||
char ZLIB_INTERNAL *gz_strwinerror(error)
|
||||
DWORD error;
|
||||
{
|
||||
static char buf[1024];
|
||||
|
|
|
@ -474,7 +474,7 @@ int ZEXPORTVA gzprintf(gzFile file, const char *format, ...)
|
|||
#else /* !STDC && !Z_HAVE_STDARG_H */
|
||||
|
||||
/* -- see zlib.h -- */
|
||||
int ZEXPORTVA gzprintf (file, format, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10,
|
||||
int ZEXPORTVA gzprintf(file, format, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10,
|
||||
a11, a12, a13, a14, a15, a16, a17, a18, a19, a20)
|
||||
gzFile file;
|
||||
const char *format;
|
||||
|
|
104
trees.c
104
trees.c
|
@ -193,7 +193,7 @@ local void send_bits(s, value, length)
|
|||
s->bits_sent += (ulg)length;
|
||||
|
||||
/* If not enough room in bi_buf, use (valid) bits from bi_buf and
|
||||
* (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
|
||||
* (16 - bi_valid) bits from value, leaving (width - (16 - bi_valid))
|
||||
* unused bits in value.
|
||||
*/
|
||||
if (s->bi_valid > (int)Buf_size - length) {
|
||||
|
@ -256,7 +256,7 @@ local void tr_static_init()
|
|||
length = 0;
|
||||
for (code = 0; code < LENGTH_CODES-1; code++) {
|
||||
base_length[code] = length;
|
||||
for (n = 0; n < (1<<extra_lbits[code]); n++) {
|
||||
for (n = 0; n < (1 << extra_lbits[code]); n++) {
|
||||
_length_code[length++] = (uch)code;
|
||||
}
|
||||
}
|
||||
|
@ -265,13 +265,13 @@ local void tr_static_init()
|
|||
* in two different ways: code 284 + 5 bits or code 285, so we
|
||||
* overwrite length_code[255] to use the best encoding:
|
||||
*/
|
||||
_length_code[length-1] = (uch)code;
|
||||
_length_code[length - 1] = (uch)code;
|
||||
|
||||
/* Initialize the mapping dist (0..32K) -> dist code (0..29) */
|
||||
dist = 0;
|
||||
for (code = 0 ; code < 16; code++) {
|
||||
base_dist[code] = dist;
|
||||
for (n = 0; n < (1<<extra_dbits[code]); n++) {
|
||||
for (n = 0; n < (1 << extra_dbits[code]); n++) {
|
||||
_dist_code[dist++] = (uch)code;
|
||||
}
|
||||
}
|
||||
|
@ -279,11 +279,11 @@ local void tr_static_init()
|
|||
dist >>= 7; /* from now on, all distances are divided by 128 */
|
||||
for ( ; code < D_CODES; code++) {
|
||||
base_dist[code] = dist << 7;
|
||||
for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) {
|
||||
for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) {
|
||||
_dist_code[256 + dist++] = (uch)code;
|
||||
}
|
||||
}
|
||||
Assert (dist == 256, "tr_static_init: 256+dist != 512");
|
||||
Assert (dist == 256, "tr_static_init: 256 + dist != 512");
|
||||
|
||||
/* Construct the codes of the static literal tree */
|
||||
for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0;
|
||||
|
@ -321,7 +321,7 @@ local void tr_static_init()
|
|||
|
||||
# define SEPARATOR(i, last, width) \
|
||||
((i) == (last)? "\n};\n\n" : \
|
||||
((i) % (width) == (width)-1 ? ",\n" : ", "))
|
||||
((i) % (width) == (width) - 1 ? ",\n" : ", "))
|
||||
|
||||
void gen_trees_header()
|
||||
{
|
||||
|
@ -458,7 +458,7 @@ local void pqdownheap(s, tree, k)
|
|||
while (j <= s->heap_len) {
|
||||
/* Set j to the smallest of the two sons: */
|
||||
if (j < s->heap_len &&
|
||||
smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {
|
||||
smaller(tree, s->heap[j + 1], s->heap[j], s->depth)) {
|
||||
j++;
|
||||
}
|
||||
/* Exit if v is smaller than both sons */
|
||||
|
@ -507,7 +507,7 @@ local void gen_bitlen(s, desc)
|
|||
*/
|
||||
tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */
|
||||
|
||||
for (h = s->heap_max+1; h < HEAP_SIZE; h++) {
|
||||
for (h = s->heap_max + 1; h < HEAP_SIZE; h++) {
|
||||
n = s->heap[h];
|
||||
bits = tree[tree[n].Dad].Len + 1;
|
||||
if (bits > max_length) bits = max_length, overflow++;
|
||||
|
@ -518,7 +518,7 @@ local void gen_bitlen(s, desc)
|
|||
|
||||
s->bl_count[bits]++;
|
||||
xbits = 0;
|
||||
if (n >= base) xbits = extra[n-base];
|
||||
if (n >= base) xbits = extra[n - base];
|
||||
f = tree[n].Freq;
|
||||
s->opt_len += (ulg)f * (unsigned)(bits + xbits);
|
||||
if (stree) s->static_len += (ulg)f * (unsigned)(stree[n].Len + xbits);
|
||||
|
@ -530,10 +530,10 @@ local void gen_bitlen(s, desc)
|
|||
|
||||
/* Find the first bit length which could increase: */
|
||||
do {
|
||||
bits = max_length-1;
|
||||
bits = max_length - 1;
|
||||
while (s->bl_count[bits] == 0) bits--;
|
||||
s->bl_count[bits]--; /* move one leaf down the tree */
|
||||
s->bl_count[bits+1] += 2; /* move one overflow item as its brother */
|
||||
s->bl_count[bits]--; /* move one leaf down the tree */
|
||||
s->bl_count[bits + 1] += 2; /* move one overflow item as its brother */
|
||||
s->bl_count[max_length]--;
|
||||
/* The brother of the overflow item also moves one step up,
|
||||
* but this does not affect bl_count[max_length]
|
||||
|
@ -569,7 +569,7 @@ local void gen_bitlen(s, desc)
|
|||
* OUT assertion: the field code is set for all tree elements of non
|
||||
* zero code length.
|
||||
*/
|
||||
local void gen_codes (tree, max_code, bl_count)
|
||||
local void gen_codes(tree, max_code, bl_count)
|
||||
ct_data *tree; /* the tree to decorate */
|
||||
int max_code; /* largest code with non zero frequency */
|
||||
ushf *bl_count; /* number of codes at each bit length */
|
||||
|
@ -583,13 +583,13 @@ local void gen_codes (tree, max_code, bl_count)
|
|||
* without bit reversal.
|
||||
*/
|
||||
for (bits = 1; bits <= MAX_BITS; bits++) {
|
||||
code = (code + bl_count[bits-1]) << 1;
|
||||
code = (code + bl_count[bits - 1]) << 1;
|
||||
next_code[bits] = (ush)code;
|
||||
}
|
||||
/* Check that the bit counts in bl_count are consistent. The last code
|
||||
* must be all ones.
|
||||
*/
|
||||
Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
|
||||
Assert (code + bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1,
|
||||
"inconsistent bit counts");
|
||||
Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
|
||||
|
||||
|
@ -600,7 +600,7 @@ local void gen_codes (tree, max_code, bl_count)
|
|||
tree[n].Code = (ush)bi_reverse(next_code[len]++, len);
|
||||
|
||||
Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
|
||||
n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1));
|
||||
n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len] - 1));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -624,7 +624,7 @@ local void build_tree(s, desc)
|
|||
int node; /* new node being created */
|
||||
|
||||
/* Construct the initial heap, with least frequent element in
|
||||
* heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
|
||||
* heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n + 1].
|
||||
* heap[0] is not used.
|
||||
*/
|
||||
s->heap_len = 0, s->heap_max = HEAP_SIZE;
|
||||
|
@ -652,7 +652,7 @@ local void build_tree(s, desc)
|
|||
}
|
||||
desc->max_code = max_code;
|
||||
|
||||
/* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
|
||||
/* The elements heap[heap_len/2 + 1 .. heap_len] are leaves of the tree,
|
||||
* establish sub-heaps of increasing lengths:
|
||||
*/
|
||||
for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);
|
||||
|
@ -700,7 +700,7 @@ local void build_tree(s, desc)
|
|||
* Scan a literal or distance tree to determine the frequencies of the codes
|
||||
* in the bit length tree.
|
||||
*/
|
||||
local void scan_tree (s, tree, max_code)
|
||||
local void scan_tree(s, tree, max_code)
|
||||
deflate_state *s;
|
||||
ct_data *tree; /* the tree to be scanned */
|
||||
int max_code; /* and its largest code of non zero frequency */
|
||||
|
@ -714,10 +714,10 @@ local void scan_tree (s, tree, max_code)
|
|||
int min_count = 4; /* min repeat count */
|
||||
|
||||
if (nextlen == 0) max_count = 138, min_count = 3;
|
||||
tree[max_code+1].Len = (ush)0xffff; /* guard */
|
||||
tree[max_code + 1].Len = (ush)0xffff; /* guard */
|
||||
|
||||
for (n = 0; n <= max_code; n++) {
|
||||
curlen = nextlen; nextlen = tree[n+1].Len;
|
||||
curlen = nextlen; nextlen = tree[n + 1].Len;
|
||||
if (++count < max_count && curlen == nextlen) {
|
||||
continue;
|
||||
} else if (count < min_count) {
|
||||
|
@ -745,7 +745,7 @@ local void scan_tree (s, tree, max_code)
|
|||
* Send a literal or distance tree in compressed form, using the codes in
|
||||
* bl_tree.
|
||||
*/
|
||||
local void send_tree (s, tree, max_code)
|
||||
local void send_tree(s, tree, max_code)
|
||||
deflate_state *s;
|
||||
ct_data *tree; /* the tree to be scanned */
|
||||
int max_code; /* and its largest code of non zero frequency */
|
||||
|
@ -758,11 +758,11 @@ local void send_tree (s, tree, max_code)
|
|||
int max_count = 7; /* max repeat count */
|
||||
int min_count = 4; /* min repeat count */
|
||||
|
||||
/* tree[max_code+1].Len = -1; */ /* guard already set */
|
||||
/* tree[max_code + 1].Len = -1; */ /* guard already set */
|
||||
if (nextlen == 0) max_count = 138, min_count = 3;
|
||||
|
||||
for (n = 0; n <= max_code; n++) {
|
||||
curlen = nextlen; nextlen = tree[n+1].Len;
|
||||
curlen = nextlen; nextlen = tree[n + 1].Len;
|
||||
if (++count < max_count && curlen == nextlen) {
|
||||
continue;
|
||||
} else if (count < min_count) {
|
||||
|
@ -773,13 +773,13 @@ local void send_tree (s, tree, max_code)
|
|||
send_code(s, curlen, s->bl_tree); count--;
|
||||
}
|
||||
Assert(count >= 3 && count <= 6, " 3_6?");
|
||||
send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);
|
||||
send_code(s, REP_3_6, s->bl_tree); send_bits(s, count - 3, 2);
|
||||
|
||||
} else if (count <= 10) {
|
||||
send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);
|
||||
send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count - 3, 3);
|
||||
|
||||
} else {
|
||||
send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);
|
||||
send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count - 11, 7);
|
||||
}
|
||||
count = 0; prevlen = curlen;
|
||||
if (nextlen == 0) {
|
||||
|
@ -807,8 +807,8 @@ local int build_bl_tree(s)
|
|||
|
||||
/* Build the bit length tree: */
|
||||
build_tree(s, (tree_desc *)(&(s->bl_desc)));
|
||||
/* opt_len now includes the length of the tree representations, except
|
||||
* the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
|
||||
/* opt_len now includes the length of the tree representations, except the
|
||||
* lengths of the bit lengths codes and the 5 + 5 + 4 bits for the counts.
|
||||
*/
|
||||
|
||||
/* Determine the number of bit length codes to send. The pkzip format
|
||||
|
@ -819,7 +819,7 @@ local int build_bl_tree(s)
|
|||
if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;
|
||||
}
|
||||
/* Update opt_len to include the bit length tree and counts */
|
||||
s->opt_len += 3*((ulg)max_blindex+1) + 5+5+4;
|
||||
s->opt_len += 3*((ulg)max_blindex + 1) + 5 + 5 + 4;
|
||||
Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
|
||||
s->opt_len, s->static_len));
|
||||
|
||||
|
@ -841,19 +841,19 @@ local void send_all_trees(s, lcodes, dcodes, blcodes)
|
|||
Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
|
||||
"too many codes");
|
||||
Tracev((stderr, "\nbl counts: "));
|
||||
send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */
|
||||
send_bits(s, dcodes-1, 5);
|
||||
send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */
|
||||
send_bits(s, lcodes - 257, 5); /* not +255 as stated in appnote.txt */
|
||||
send_bits(s, dcodes - 1, 5);
|
||||
send_bits(s, blcodes - 4, 4); /* not -3 as stated in appnote.txt */
|
||||
for (rank = 0; rank < blcodes; rank++) {
|
||||
Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
|
||||
send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);
|
||||
}
|
||||
Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
|
||||
|
||||
send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */
|
||||
send_tree(s, (ct_data *)s->dyn_ltree, lcodes - 1); /* literal tree */
|
||||
Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
|
||||
|
||||
send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */
|
||||
send_tree(s, (ct_data *)s->dyn_dtree, dcodes - 1); /* distance tree */
|
||||
Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
|
||||
}
|
||||
|
||||
|
@ -866,7 +866,7 @@ void ZLIB_INTERNAL _tr_stored_block(s, buf, stored_len, last)
|
|||
ulg stored_len; /* length of input block */
|
||||
int last; /* one if this is the last block for a file */
|
||||
{
|
||||
send_bits(s, (STORED_BLOCK<<1)+last, 3); /* send block type */
|
||||
send_bits(s, (STORED_BLOCK<<1) + last, 3); /* send block type */
|
||||
bi_windup(s); /* align on byte boundary */
|
||||
put_short(s, (ush)stored_len);
|
||||
put_short(s, (ush)~stored_len);
|
||||
|
@ -877,7 +877,7 @@ void ZLIB_INTERNAL _tr_stored_block(s, buf, stored_len, last)
|
|||
s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L;
|
||||
s->compressed_len += (stored_len + 4) << 3;
|
||||
s->bits_sent += 2*16;
|
||||
s->bits_sent += stored_len<<3;
|
||||
s->bits_sent += stored_len << 3;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
@ -943,8 +943,8 @@ void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last)
|
|||
max_blindex = build_bl_tree(s);
|
||||
|
||||
/* Determine the best encoding. Compute the block lengths in bytes. */
|
||||
opt_lenb = (s->opt_len+3+7)>>3;
|
||||
static_lenb = (s->static_len+3+7)>>3;
|
||||
opt_lenb = (s->opt_len + 3 + 7) >> 3;
|
||||
static_lenb = (s->static_len + 3 + 7) >> 3;
|
||||
|
||||
Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
|
||||
opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
|
||||
|
@ -963,7 +963,7 @@ void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last)
|
|||
#ifdef FORCE_STORED
|
||||
if (buf != (char*)0) { /* force stored block */
|
||||
#else
|
||||
if (stored_len+4 <= opt_lenb && buf != (char*)0) {
|
||||
if (stored_len + 4 <= opt_lenb && buf != (char*)0) {
|
||||
/* 4: two words for the lengths */
|
||||
#endif
|
||||
/* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
|
||||
|
@ -975,16 +975,16 @@ void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last)
|
|||
_tr_stored_block(s, buf, stored_len, last);
|
||||
|
||||
} else if (static_lenb == opt_lenb) {
|
||||
send_bits(s, (STATIC_TREES<<1)+last, 3);
|
||||
send_bits(s, (STATIC_TREES<<1) + last, 3);
|
||||
compress_block(s, (const ct_data *)static_ltree,
|
||||
(const ct_data *)static_dtree);
|
||||
#ifdef ZLIB_DEBUG
|
||||
s->compressed_len += 3 + s->static_len;
|
||||
#endif
|
||||
} else {
|
||||
send_bits(s, (DYN_TREES<<1)+last, 3);
|
||||
send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,
|
||||
max_blindex+1);
|
||||
send_bits(s, (DYN_TREES<<1) + last, 3);
|
||||
send_all_trees(s, s->l_desc.max_code + 1, s->d_desc.max_code + 1,
|
||||
max_blindex + 1);
|
||||
compress_block(s, (const ct_data *)s->dyn_ltree,
|
||||
(const ct_data *)s->dyn_dtree);
|
||||
#ifdef ZLIB_DEBUG
|
||||
|
@ -1003,18 +1003,18 @@ void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last)
|
|||
s->compressed_len += 7; /* align on byte boundary */
|
||||
#endif
|
||||
}
|
||||
Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
|
||||
s->compressed_len-7*last));
|
||||
Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len >> 3,
|
||||
s->compressed_len - 7*last));
|
||||
}
|
||||
|
||||
/* ===========================================================================
|
||||
* Save the match info and tally the frequency counts. Return true if
|
||||
* the current block must be flushed.
|
||||
*/
|
||||
int ZLIB_INTERNAL _tr_tally (s, dist, lc)
|
||||
int ZLIB_INTERNAL _tr_tally(s, dist, lc)
|
||||
deflate_state *s;
|
||||
unsigned dist; /* distance of matched string */
|
||||
unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */
|
||||
unsigned lc; /* match length - MIN_MATCH or unmatched char (dist==0) */
|
||||
{
|
||||
s->sym_buf[s->sym_next++] = (uch)dist;
|
||||
s->sym_buf[s->sym_next++] = (uch)(dist >> 8);
|
||||
|
@ -1030,7 +1030,7 @@ int ZLIB_INTERNAL _tr_tally (s, dist, lc)
|
|||
(ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&
|
||||
(ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match");
|
||||
|
||||
s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++;
|
||||
s->dyn_ltree[_length_code[lc] + LITERALS + 1].Freq++;
|
||||
s->dyn_dtree[d_code(dist)].Freq++;
|
||||
}
|
||||
return (s->sym_next == s->sym_end);
|
||||
|
@ -1060,7 +1060,7 @@ local void compress_block(s, ltree, dtree)
|
|||
} else {
|
||||
/* Here, lc is the match length - MIN_MATCH */
|
||||
code = _length_code[lc];
|
||||
send_code(s, code+LITERALS+1, ltree); /* send the length code */
|
||||
send_code(s, code + LITERALS + 1, ltree); /* send length code */
|
||||
extra = extra_lbits[code];
|
||||
if (extra != 0) {
|
||||
lc -= base_length[code];
|
||||
|
@ -1176,6 +1176,6 @@ local void bi_windup(s)
|
|||
s->bi_buf = 0;
|
||||
s->bi_valid = 0;
|
||||
#ifdef ZLIB_DEBUG
|
||||
s->bits_sent = (s->bits_sent+7) & ~7;
|
||||
s->bits_sent = (s->bits_sent + 7) & ~7;
|
||||
#endif
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
Z_DATA_ERROR if the input data was corrupted, including if the input data is
|
||||
an incomplete zlib stream.
|
||||
*/
|
||||
int ZEXPORT uncompress2 (dest, destLen, source, sourceLen)
|
||||
int ZEXPORT uncompress2(dest, destLen, source, sourceLen)
|
||||
Bytef *dest;
|
||||
uLongf *destLen;
|
||||
const Bytef *source;
|
||||
|
@ -83,7 +83,7 @@ int ZEXPORT uncompress2 (dest, destLen, source, sourceLen)
|
|||
err;
|
||||
}
|
||||
|
||||
int ZEXPORT uncompress (dest, destLen, source, sourceLen)
|
||||
int ZEXPORT uncompress(dest, destLen, source, sourceLen)
|
||||
Bytef *dest;
|
||||
uLongf *destLen;
|
||||
const Bytef *source;
|
||||
|
|
2
zlib.h
2
zlib.h
|
@ -1913,7 +1913,7 @@ ZEXTERN int ZEXPORT inflateSyncPoint OF((z_streamp));
|
|||
ZEXTERN const z_crc_t FAR * ZEXPORT get_crc_table OF((void));
|
||||
ZEXTERN int ZEXPORT inflateUndermine OF((z_streamp, int));
|
||||
ZEXTERN int ZEXPORT inflateValidate OF((z_streamp, int));
|
||||
ZEXTERN unsigned long ZEXPORT inflateCodesUsed OF ((z_streamp));
|
||||
ZEXTERN unsigned long ZEXPORT inflateCodesUsed OF((z_streamp));
|
||||
ZEXTERN int ZEXPORT inflateResetKeep OF((z_streamp));
|
||||
ZEXTERN int ZEXPORT deflateResetKeep OF((z_streamp));
|
||||
#if defined(_WIN32) && !defined(Z_SOLO)
|
||||
|
|
14
zutil.c
14
zutil.c
|
@ -119,7 +119,7 @@ uLong ZEXPORT zlibCompileFlags()
|
|||
# endif
|
||||
int ZLIB_INTERNAL z_verbose = verbose;
|
||||
|
||||
void ZLIB_INTERNAL z_error (m)
|
||||
void ZLIB_INTERNAL z_error(m)
|
||||
char *m;
|
||||
{
|
||||
fprintf(stderr, "%s\n", m);
|
||||
|
@ -214,7 +214,7 @@ local ptr_table table[MAX_PTR];
|
|||
* a protected system like OS/2. Use Microsoft C instead.
|
||||
*/
|
||||
|
||||
voidpf ZLIB_INTERNAL zcalloc (voidpf opaque, unsigned items, unsigned size)
|
||||
voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, unsigned items, unsigned size)
|
||||
{
|
||||
voidpf buf;
|
||||
ulg bsize = (ulg)items*size;
|
||||
|
@ -240,7 +240,7 @@ voidpf ZLIB_INTERNAL zcalloc (voidpf opaque, unsigned items, unsigned size)
|
|||
return buf;
|
||||
}
|
||||
|
||||
void ZLIB_INTERNAL zcfree (voidpf opaque, voidpf ptr)
|
||||
void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr)
|
||||
{
|
||||
int n;
|
||||
|
||||
|
@ -277,13 +277,13 @@ void ZLIB_INTERNAL zcfree (voidpf opaque, voidpf ptr)
|
|||
# define _hfree hfree
|
||||
#endif
|
||||
|
||||
voidpf ZLIB_INTERNAL zcalloc (voidpf opaque, uInt items, uInt size)
|
||||
voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, uInt items, uInt size)
|
||||
{
|
||||
(void)opaque;
|
||||
return _halloc((long)items, size);
|
||||
}
|
||||
|
||||
void ZLIB_INTERNAL zcfree (voidpf opaque, voidpf ptr)
|
||||
void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr)
|
||||
{
|
||||
(void)opaque;
|
||||
_hfree(ptr);
|
||||
|
@ -302,7 +302,7 @@ extern voidp calloc OF((uInt items, uInt size));
|
|||
extern void free OF((voidpf ptr));
|
||||
#endif
|
||||
|
||||
voidpf ZLIB_INTERNAL zcalloc (opaque, items, size)
|
||||
voidpf ZLIB_INTERNAL zcalloc(opaque, items, size)
|
||||
voidpf opaque;
|
||||
unsigned items;
|
||||
unsigned size;
|
||||
|
@ -312,7 +312,7 @@ voidpf ZLIB_INTERNAL zcalloc (opaque, items, size)
|
|||
(voidpf)calloc(items, size);
|
||||
}
|
||||
|
||||
void ZLIB_INTERNAL zcfree (opaque, ptr)
|
||||
void ZLIB_INTERNAL zcfree(opaque, ptr)
|
||||
voidpf opaque;
|
||||
voidpf ptr;
|
||||
{
|
||||
|
|
Loading…
Reference in New Issue