Annotation of /trunk/mkinitrd-magellan/busybox/archival/gzip.c
Parent Directory | Revision Log
Revision 816 -
(hide annotations)
(download)
Fri Apr 24 18:33:46 2009 UTC (15 years, 1 month ago) by niro
File MIME type: text/plain
File size: 65265 byte(s)
Fri Apr 24 18:33:46 2009 UTC (15 years, 1 month ago) by niro
File MIME type: text/plain
File size: 65265 byte(s)
-updated to busybox-1.13.4
1 | niro | 532 | /* vi: set sw=4 ts=4: */ |
2 | /* | ||
3 | * Gzip implementation for busybox | ||
4 | * | ||
5 | * Based on GNU gzip Copyright (C) 1992-1993 Jean-loup Gailly. | ||
6 | * | ||
7 | * Originally adjusted for busybox by Charles P. Wright <cpw@unix.asb.com> | ||
8 | * "this is a stripped down version of gzip I put into busybox, it does | ||
9 | * only standard in to standard out with -9 compression. It also requires | ||
10 | * the zcat module for some important functions." | ||
11 | * | ||
12 | * Adjusted further by Erik Andersen <andersen@codepoet.org> to support | ||
13 | * files as well as stdin/stdout, and to generally behave itself wrt | ||
14 | * command line handling. | ||
15 | * | ||
16 | * Licensed under GPLv2 or later, see file LICENSE in this tarball for details. | ||
17 | */ | ||
18 | |||
19 | /* big objects in bss: | ||
20 | * 00000020 b bl_count | ||
21 | * 00000074 b base_length | ||
22 | * 00000078 b base_dist | ||
23 | * 00000078 b static_dtree | ||
24 | * 0000009c b bl_tree | ||
25 | * 000000f4 b dyn_dtree | ||
26 | * 00000100 b length_code | ||
27 | * 00000200 b dist_code | ||
28 | * 0000023d b depth | ||
29 | * 00000400 b flag_buf | ||
30 | * 0000047a b heap | ||
31 | * 00000480 b static_ltree | ||
32 | * 000008f4 b dyn_ltree | ||
33 | */ | ||
34 | |||
35 | /* TODO: full support for -v for DESKTOP | ||
36 | * "/usr/bin/gzip -v a bogus aa" should say: | ||
37 | a: 85.1% -- replaced with a.gz | ||
38 | gzip: bogus: No such file or directory | ||
39 | aa: 85.1% -- replaced with aa.gz | ||
40 | */ | ||
41 | |||
42 | niro | 816 | #include "libbb.h" |
43 | #include "unarchive.h" | ||
44 | niro | 532 | |
45 | |||
46 | /* =========================================================================== | ||
47 | */ | ||
48 | //#define DEBUG 1 | ||
49 | /* Diagnostic functions */ | ||
50 | #ifdef DEBUG | ||
51 | niro | 816 | # define Assert(cond,msg) { if (!(cond)) bb_error_msg(msg); } |
52 | niro | 532 | # define Trace(x) fprintf x |
53 | niro | 816 | # define Tracev(x) {if (verbose) fprintf x; } |
54 | # define Tracevv(x) {if (verbose > 1) fprintf x; } | ||
55 | # define Tracec(c,x) {if (verbose && (c)) fprintf x; } | ||
56 | # define Tracecv(c,x) {if (verbose > 1 && (c)) fprintf x; } | ||
57 | niro | 532 | #else |
58 | # define Assert(cond,msg) | ||
59 | # define Trace(x) | ||
60 | # define Tracev(x) | ||
61 | # define Tracevv(x) | ||
62 | # define Tracec(c,x) | ||
63 | # define Tracecv(c,x) | ||
64 | #endif | ||
65 | |||
66 | |||
67 | /* =========================================================================== | ||
68 | */ | ||
69 | #define SMALL_MEM | ||
70 | |||
71 | #ifndef INBUFSIZ | ||
72 | # ifdef SMALL_MEM | ||
73 | # define INBUFSIZ 0x2000 /* input buffer size */ | ||
74 | # else | ||
75 | # define INBUFSIZ 0x8000 /* input buffer size */ | ||
76 | # endif | ||
77 | #endif | ||
78 | |||
79 | #ifndef OUTBUFSIZ | ||
80 | # ifdef SMALL_MEM | ||
81 | # define OUTBUFSIZ 8192 /* output buffer size */ | ||
82 | # else | ||
83 | # define OUTBUFSIZ 16384 /* output buffer size */ | ||
84 | # endif | ||
85 | #endif | ||
86 | |||
87 | #ifndef DIST_BUFSIZE | ||
88 | # ifdef SMALL_MEM | ||
89 | # define DIST_BUFSIZE 0x2000 /* buffer for distances, see trees.c */ | ||
90 | # else | ||
91 | # define DIST_BUFSIZE 0x8000 /* buffer for distances, see trees.c */ | ||
92 | # endif | ||
93 | #endif | ||
94 | |||
95 | /* gzip flag byte */ | ||
96 | #define ASCII_FLAG 0x01 /* bit 0 set: file probably ascii text */ | ||
97 | #define CONTINUATION 0x02 /* bit 1 set: continuation of multi-part gzip file */ | ||
98 | #define EXTRA_FIELD 0x04 /* bit 2 set: extra field present */ | ||
99 | #define ORIG_NAME 0x08 /* bit 3 set: original file name present */ | ||
100 | #define COMMENT 0x10 /* bit 4 set: file comment present */ | ||
101 | #define RESERVED 0xC0 /* bit 6,7: reserved */ | ||
102 | |||
103 | /* internal file attribute */ | ||
104 | #define UNKNOWN 0xffff | ||
105 | #define BINARY 0 | ||
106 | #define ASCII 1 | ||
107 | |||
108 | #ifndef WSIZE | ||
109 | # define WSIZE 0x8000 /* window size--must be a power of two, and */ | ||
110 | #endif /* at least 32K for zip's deflate method */ | ||
111 | |||
112 | #define MIN_MATCH 3 | ||
113 | #define MAX_MATCH 258 | ||
114 | /* The minimum and maximum match lengths */ | ||
115 | |||
116 | #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1) | ||
117 | /* Minimum amount of lookahead, except at the end of the input file. | ||
118 | * See deflate.c for comments about the MIN_MATCH+1. | ||
119 | */ | ||
120 | |||
121 | #define MAX_DIST (WSIZE-MIN_LOOKAHEAD) | ||
122 | /* In order to simplify the code, particularly on 16 bit machines, match | ||
123 | * distances are limited to MAX_DIST instead of WSIZE. | ||
124 | */ | ||
125 | |||
126 | #ifndef MAX_PATH_LEN | ||
127 | # define MAX_PATH_LEN 1024 /* max pathname length */ | ||
128 | #endif | ||
129 | |||
130 | #define seekable() 0 /* force sequential output */ | ||
131 | #define translate_eol 0 /* no option -a yet */ | ||
132 | |||
133 | #ifndef BITS | ||
134 | # define BITS 16 | ||
135 | #endif | ||
136 | #define INIT_BITS 9 /* Initial number of bits per code */ | ||
137 | |||
138 | #define BIT_MASK 0x1f /* Mask for 'number of compression bits' */ | ||
139 | /* Mask 0x20 is reserved to mean a fourth header byte, and 0x40 is free. | ||
140 | * It's a pity that old uncompress does not check bit 0x20. That makes | ||
141 | * extension of the format actually undesirable because old compress | ||
142 | * would just crash on the new format instead of giving a meaningful | ||
143 | * error message. It does check the number of bits, but it's more | ||
144 | * helpful to say "unsupported format, get a new version" than | ||
145 | * "can only handle 16 bits". | ||
146 | */ | ||
147 | |||
148 | #ifdef MAX_EXT_CHARS | ||
149 | # define MAX_SUFFIX MAX_EXT_CHARS | ||
150 | #else | ||
151 | # define MAX_SUFFIX 30 | ||
152 | #endif | ||
153 | |||
154 | |||
155 | /* =========================================================================== | ||
156 | * Compile with MEDIUM_MEM to reduce the memory requirements or | ||
157 | * with SMALL_MEM to use as little memory as possible. Use BIG_MEM if the | ||
158 | * entire input file can be held in memory (not possible on 16 bit systems). | ||
159 | * Warning: defining these symbols affects HASH_BITS (see below) and thus | ||
160 | * affects the compression ratio. The compressed output | ||
161 | * is still correct, and might even be smaller in some cases. | ||
162 | */ | ||
163 | |||
164 | #ifdef SMALL_MEM | ||
165 | # define HASH_BITS 13 /* Number of bits used to hash strings */ | ||
166 | #endif | ||
167 | #ifdef MEDIUM_MEM | ||
168 | # define HASH_BITS 14 | ||
169 | #endif | ||
170 | #ifndef HASH_BITS | ||
171 | # define HASH_BITS 15 | ||
172 | /* For portability to 16 bit machines, do not use values above 15. */ | ||
173 | #endif | ||
174 | |||
175 | #define HASH_SIZE (unsigned)(1<<HASH_BITS) | ||
176 | #define HASH_MASK (HASH_SIZE-1) | ||
177 | #define WMASK (WSIZE-1) | ||
178 | /* HASH_SIZE and WSIZE must be powers of two */ | ||
179 | #ifndef TOO_FAR | ||
180 | # define TOO_FAR 4096 | ||
181 | #endif | ||
182 | /* Matches of length 3 are discarded if their distance exceeds TOO_FAR */ | ||
183 | |||
184 | |||
185 | /* =========================================================================== | ||
186 | * These types are not really 'char', 'short' and 'long' | ||
187 | */ | ||
188 | typedef uint8_t uch; | ||
189 | typedef uint16_t ush; | ||
190 | typedef uint32_t ulg; | ||
191 | typedef int32_t lng; | ||
192 | |||
193 | typedef ush Pos; | ||
194 | typedef unsigned IPos; | ||
195 | /* A Pos is an index in the character window. We use short instead of int to | ||
196 | * save space in the various tables. IPos is used only for parameter passing. | ||
197 | */ | ||
198 | |||
199 | enum { | ||
200 | WINDOW_SIZE = 2 * WSIZE, | ||
201 | /* window size, 2*WSIZE except for MMAP or BIG_MEM, where it is the | ||
202 | * input file length plus MIN_LOOKAHEAD. | ||
203 | */ | ||
204 | |||
205 | max_chain_length = 4096, | ||
206 | /* To speed up deflation, hash chains are never searched beyond this length. | ||
207 | * A higher limit improves compression ratio but degrades the speed. | ||
208 | */ | ||
209 | |||
210 | max_lazy_match = 258, | ||
211 | /* Attempt to find a better match only when the current match is strictly | ||
212 | * smaller than this value. This mechanism is used only for compression | ||
213 | * levels >= 4. | ||
214 | */ | ||
215 | |||
216 | max_insert_length = max_lazy_match, | ||
217 | /* Insert new strings in the hash table only if the match length | ||
218 | * is not greater than this length. This saves time but degrades compression. | ||
219 | * max_insert_length is used only for compression levels <= 3. | ||
220 | */ | ||
221 | |||
222 | good_match = 32, | ||
223 | /* Use a faster search when the previous match is longer than this */ | ||
224 | |||
225 | /* Values for max_lazy_match, good_match and max_chain_length, depending on | ||
226 | * the desired pack level (0..9). The values given below have been tuned to | ||
227 | * exclude worst case performance for pathological files. Better values may be | ||
228 | * found for specific files. | ||
229 | */ | ||
230 | |||
231 | nice_match = 258, /* Stop searching when current match exceeds this */ | ||
232 | /* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4 | ||
233 | * For deflate_fast() (levels <= 3) good is ignored and lazy has a different | ||
234 | * meaning. | ||
235 | */ | ||
236 | }; | ||
237 | |||
238 | |||
239 | niro | 816 | struct globals { |
240 | |||
241 | lng block_start; | ||
242 | |||
243 | /* window position at the beginning of the current output block. Gets | ||
244 | * negative when the window is moved backwards. | ||
245 | */ | ||
246 | unsigned ins_h; /* hash index of string to be inserted */ | ||
247 | |||
248 | #define H_SHIFT ((HASH_BITS+MIN_MATCH-1) / MIN_MATCH) | ||
249 | /* Number of bits by which ins_h and del_h must be shifted at each | ||
250 | * input step. It must be such that after MIN_MATCH steps, the oldest | ||
251 | * byte no longer takes part in the hash key, that is: | ||
252 | * H_SHIFT * MIN_MATCH >= HASH_BITS | ||
253 | */ | ||
254 | |||
255 | unsigned prev_length; | ||
256 | |||
257 | /* Length of the best match at previous step. Matches not greater than this | ||
258 | * are discarded. This is used in the lazy match evaluation. | ||
259 | */ | ||
260 | |||
261 | unsigned strstart; /* start of string to insert */ | ||
262 | unsigned match_start; /* start of matching string */ | ||
263 | unsigned lookahead; /* number of valid bytes ahead in window */ | ||
264 | |||
265 | niro | 532 | /* =========================================================================== |
266 | */ | ||
267 | #define DECLARE(type, array, size) \ | ||
268 | niro | 816 | type * array |
269 | niro | 532 | #define ALLOC(type, array, size) \ |
270 | niro | 816 | array = xzalloc((size_t)(((size)+1L)/2) * 2*sizeof(type)); |
271 | niro | 532 | #define FREE(array) \ |
272 | niro | 816 | do { free(array); array = NULL; } while (0) |
273 | niro | 532 | |
274 | niro | 816 | /* global buffers */ |
275 | niro | 532 | |
276 | niro | 816 | /* buffer for literals or lengths */ |
277 | /* DECLARE(uch, l_buf, LIT_BUFSIZE); */ | ||
278 | DECLARE(uch, l_buf, INBUFSIZ); | ||
279 | niro | 532 | |
280 | niro | 816 | DECLARE(ush, d_buf, DIST_BUFSIZE); |
281 | DECLARE(uch, outbuf, OUTBUFSIZ); | ||
282 | niro | 532 | |
283 | /* Sliding window. Input bytes are read into the second half of the window, | ||
284 | * and move to the first half later to keep a dictionary of at least WSIZE | ||
285 | * bytes. With this organization, matches are limited to a distance of | ||
286 | * WSIZE-MAX_MATCH bytes, but this ensures that IO is always | ||
287 | * performed with a length multiple of the block size. Also, it limits | ||
288 | * the window size to 64K, which is quite useful on MSDOS. | ||
289 | * To do: limit the window size to WSIZE+BSZ if SMALL_MEM (the code would | ||
290 | * be less efficient). | ||
291 | */ | ||
292 | niro | 816 | DECLARE(uch, window, 2L * WSIZE); |
293 | niro | 532 | |
294 | /* Link to older string with same hash index. To limit the size of this | ||
295 | * array to 64K, this link is maintained only for the last 32K strings. | ||
296 | * An index in this array is thus a window index modulo 32K. | ||
297 | */ | ||
298 | niro | 816 | /* DECLARE(Pos, prev, WSIZE); */ |
299 | DECLARE(ush, prev, 1L << BITS); | ||
300 | niro | 532 | |
301 | /* Heads of the hash chains or 0. */ | ||
302 | niro | 816 | /* DECLARE(Pos, head, 1<<HASH_BITS); */ |
303 | #define head (G1.prev + WSIZE) /* hash head (see deflate.c) */ | ||
304 | niro | 532 | |
305 | /* number of input bytes */ | ||
306 | niro | 816 | ulg isize; /* only 32 bits stored in .gz file */ |
307 | niro | 532 | |
308 | niro | 816 | /* bbox always use stdin/stdout */ |
309 | #define ifd STDIN_FILENO /* input file descriptor */ | ||
310 | #define ofd STDOUT_FILENO /* output file descriptor */ | ||
311 | niro | 532 | |
312 | #ifdef DEBUG | ||
313 | niro | 816 | unsigned insize; /* valid bytes in l_buf */ |
314 | niro | 532 | #endif |
315 | niro | 816 | unsigned outcnt; /* bytes in output buffer */ |
316 | niro | 532 | |
317 | niro | 816 | smallint eofile; /* flag set at end of input file */ |
318 | niro | 532 | |
319 | /* =========================================================================== | ||
320 | * Local data used by the "bit string" routines. | ||
321 | */ | ||
322 | |||
323 | niro | 816 | unsigned short bi_buf; |
324 | niro | 532 | |
325 | /* Output buffer. bits are inserted starting at the bottom (least significant | ||
326 | * bits). | ||
327 | */ | ||
328 | |||
329 | #undef BUF_SIZE | ||
330 | niro | 816 | #define BUF_SIZE (8 * sizeof(G1.bi_buf)) |
331 | niro | 532 | /* Number of bits used within bi_buf. (bi_buf might be implemented on |
332 | * more than 16 bits on some systems.) | ||
333 | */ | ||
334 | |||
335 | niro | 816 | int bi_valid; |
336 | niro | 532 | |
337 | /* Current input function. Set to mem_read for in-memory compression */ | ||
338 | |||
339 | #ifdef DEBUG | ||
340 | niro | 816 | ulg bits_sent; /* bit length of the compressed data */ |
341 | niro | 532 | #endif |
342 | |||
343 | niro | 816 | uint32_t *crc_32_tab; |
344 | uint32_t crc; /* shift register contents */ | ||
345 | }; | ||
346 | niro | 532 | |
347 | niro | 816 | #define G1 (*(ptr_to_globals - 1)) |
348 | |||
349 | |||
350 | niro | 532 | /* =========================================================================== |
351 | * Write the output buffer outbuf[0..outcnt-1] and update bytes_out. | ||
352 | * (used for the compressed data only) | ||
353 | */ | ||
354 | static void flush_outbuf(void) | ||
355 | { | ||
356 | niro | 816 | if (G1.outcnt == 0) |
357 | niro | 532 | return; |
358 | |||
359 | niro | 816 | xwrite(ofd, (char *) G1.outbuf, G1.outcnt); |
360 | G1.outcnt = 0; | ||
361 | niro | 532 | } |
362 | |||
363 | |||
364 | /* =========================================================================== | ||
365 | */ | ||
366 | /* put_8bit is used for the compressed output */ | ||
367 | #define put_8bit(c) \ | ||
368 | niro | 816 | do { \ |
369 | G1.outbuf[G1.outcnt++] = (c); \ | ||
370 | if (G1.outcnt == OUTBUFSIZ) flush_outbuf(); \ | ||
371 | } while (0) | ||
372 | niro | 532 | |
373 | /* Output a 16 bit value, lsb first */ | ||
374 | static void put_16bit(ush w) | ||
375 | { | ||
376 | niro | 816 | if (G1.outcnt < OUTBUFSIZ - 2) { |
377 | G1.outbuf[G1.outcnt++] = w; | ||
378 | G1.outbuf[G1.outcnt++] = w >> 8; | ||
379 | niro | 532 | } else { |
380 | put_8bit(w); | ||
381 | put_8bit(w >> 8); | ||
382 | } | ||
383 | } | ||
384 | |||
385 | static void put_32bit(ulg n) | ||
386 | { | ||
387 | put_16bit(n); | ||
388 | put_16bit(n >> 16); | ||
389 | } | ||
390 | |||
391 | /* =========================================================================== | ||
392 | * Clear input and output buffers | ||
393 | */ | ||
394 | static void clear_bufs(void) | ||
395 | { | ||
396 | niro | 816 | G1.outcnt = 0; |
397 | niro | 532 | #ifdef DEBUG |
398 | niro | 816 | G1.insize = 0; |
399 | niro | 532 | #endif |
400 | niro | 816 | G1.isize = 0; |
401 | niro | 532 | } |
402 | |||
403 | |||
404 | /* =========================================================================== | ||
405 | * Run a set of bytes through the crc shift register. If s is a NULL | ||
406 | * pointer, then initialize the crc shift register contents instead. | ||
407 | * Return the current crc in either case. | ||
408 | */ | ||
409 | static uint32_t updcrc(uch * s, unsigned n) | ||
410 | { | ||
411 | niro | 816 | uint32_t c = G1.crc; |
412 | niro | 532 | while (n) { |
413 | niro | 816 | c = G1.crc_32_tab[(uch)(c ^ *s++)] ^ (c >> 8); |
414 | niro | 532 | n--; |
415 | } | ||
416 | niro | 816 | G1.crc = c; |
417 | niro | 532 | return c; |
418 | } | ||
419 | |||
420 | |||
421 | /* =========================================================================== | ||
422 | * Read a new buffer from the current input file, perform end-of-line | ||
423 | * translation, and update the crc and input file size. | ||
424 | * IN assertion: size >= 2 (for end-of-line translation) | ||
425 | */ | ||
426 | static unsigned file_read(void *buf, unsigned size) | ||
427 | { | ||
428 | unsigned len; | ||
429 | |||
430 | niro | 816 | Assert(G1.insize == 0, "l_buf not empty"); |
431 | niro | 532 | |
432 | len = safe_read(ifd, buf, size); | ||
433 | if (len == (unsigned)(-1) || len == 0) | ||
434 | return len; | ||
435 | |||
436 | updcrc(buf, len); | ||
437 | niro | 816 | G1.isize += len; |
438 | niro | 532 | return len; |
439 | } | ||
440 | |||
441 | |||
442 | /* =========================================================================== | ||
443 | * Send a value on a given number of bits. | ||
444 | * IN assertion: length <= 16 and value fits in length bits. | ||
445 | */ | ||
446 | static void send_bits(int value, int length) | ||
447 | { | ||
448 | #ifdef DEBUG | ||
449 | Tracev((stderr, " l %2d v %4x ", length, value)); | ||
450 | Assert(length > 0 && length <= 15, "invalid length"); | ||
451 | niro | 816 | G1.bits_sent += length; |
452 | niro | 532 | #endif |
453 | /* If not enough room in bi_buf, use (valid) bits from bi_buf and | ||
454 | * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid)) | ||
455 | * unused bits in value. | ||
456 | */ | ||
457 | niro | 816 | if (G1.bi_valid > (int) BUF_SIZE - length) { |
458 | G1.bi_buf |= (value << G1.bi_valid); | ||
459 | put_16bit(G1.bi_buf); | ||
460 | G1.bi_buf = (ush) value >> (BUF_SIZE - G1.bi_valid); | ||
461 | G1.bi_valid += length - BUF_SIZE; | ||
462 | niro | 532 | } else { |
463 | niro | 816 | G1.bi_buf |= value << G1.bi_valid; |
464 | G1.bi_valid += length; | ||
465 | niro | 532 | } |
466 | } | ||
467 | |||
468 | |||
469 | /* =========================================================================== | ||
470 | * Reverse the first len bits of a code, using straightforward code (a faster | ||
471 | * method would use a table) | ||
472 | * IN assertion: 1 <= len <= 15 | ||
473 | */ | ||
474 | static unsigned bi_reverse(unsigned code, int len) | ||
475 | { | ||
476 | unsigned res = 0; | ||
477 | |||
478 | while (1) { | ||
479 | res |= code & 1; | ||
480 | if (--len <= 0) return res; | ||
481 | code >>= 1; | ||
482 | res <<= 1; | ||
483 | } | ||
484 | } | ||
485 | |||
486 | |||
487 | /* =========================================================================== | ||
488 | * Write out any remaining bits in an incomplete byte. | ||
489 | */ | ||
490 | static void bi_windup(void) | ||
491 | { | ||
492 | niro | 816 | if (G1.bi_valid > 8) { |
493 | put_16bit(G1.bi_buf); | ||
494 | } else if (G1.bi_valid > 0) { | ||
495 | put_8bit(G1.bi_buf); | ||
496 | niro | 532 | } |
497 | niro | 816 | G1.bi_buf = 0; |
498 | G1.bi_valid = 0; | ||
499 | niro | 532 | #ifdef DEBUG |
500 | niro | 816 | G1.bits_sent = (G1.bits_sent + 7) & ~7; |
501 | niro | 532 | #endif |
502 | } | ||
503 | |||
504 | |||
505 | /* =========================================================================== | ||
506 | * Copy a stored block to the zip file, storing first the length and its | ||
507 | * one's complement if requested. | ||
508 | */ | ||
509 | static void copy_block(char *buf, unsigned len, int header) | ||
510 | { | ||
511 | bi_windup(); /* align on byte boundary */ | ||
512 | |||
513 | if (header) { | ||
514 | put_16bit(len); | ||
515 | put_16bit(~len); | ||
516 | #ifdef DEBUG | ||
517 | niro | 816 | G1.bits_sent += 2 * 16; |
518 | niro | 532 | #endif |
519 | } | ||
520 | #ifdef DEBUG | ||
521 | niro | 816 | G1.bits_sent += (ulg) len << 3; |
522 | niro | 532 | #endif |
523 | while (len--) { | ||
524 | put_8bit(*buf++); | ||
525 | } | ||
526 | } | ||
527 | |||
528 | |||
529 | /* =========================================================================== | ||
530 | * Fill the window when the lookahead becomes insufficient. | ||
531 | * Updates strstart and lookahead, and sets eofile if end of input file. | ||
532 | * IN assertion: lookahead < MIN_LOOKAHEAD && strstart + lookahead > 0 | ||
533 | * OUT assertions: at least one byte has been read, or eofile is set; | ||
534 | * file reads are performed for at least two bytes (required for the | ||
535 | * translate_eol option). | ||
536 | */ | ||
537 | static void fill_window(void) | ||
538 | { | ||
539 | unsigned n, m; | ||
540 | niro | 816 | unsigned more = WINDOW_SIZE - G1.lookahead - G1.strstart; |
541 | niro | 532 | /* Amount of free space at the end of the window. */ |
542 | |||
543 | /* If the window is almost full and there is insufficient lookahead, | ||
544 | * move the upper half to the lower one to make room in the upper half. | ||
545 | */ | ||
546 | if (more == (unsigned) -1) { | ||
547 | /* Very unlikely, but possible on 16 bit machine if strstart == 0 | ||
548 | * and lookahead == 1 (input done one byte at time) | ||
549 | */ | ||
550 | more--; | ||
551 | niro | 816 | } else if (G1.strstart >= WSIZE + MAX_DIST) { |
552 | niro | 532 | /* By the IN assertion, the window is not empty so we can't confuse |
553 | * more == 0 with more == 64K on a 16 bit machine. | ||
554 | */ | ||
555 | Assert(WINDOW_SIZE == 2 * WSIZE, "no sliding with BIG_MEM"); | ||
556 | |||
557 | niro | 816 | memcpy(G1.window, G1.window + WSIZE, WSIZE); |
558 | G1.match_start -= WSIZE; | ||
559 | G1.strstart -= WSIZE; /* we now have strstart >= MAX_DIST: */ | ||
560 | niro | 532 | |
561 | niro | 816 | G1.block_start -= WSIZE; |
562 | niro | 532 | |
563 | for (n = 0; n < HASH_SIZE; n++) { | ||
564 | m = head[n]; | ||
565 | head[n] = (Pos) (m >= WSIZE ? m - WSIZE : 0); | ||
566 | } | ||
567 | for (n = 0; n < WSIZE; n++) { | ||
568 | niro | 816 | m = G1.prev[n]; |
569 | G1.prev[n] = (Pos) (m >= WSIZE ? m - WSIZE : 0); | ||
570 | niro | 532 | /* If n is not on any hash chain, prev[n] is garbage but |
571 | * its value will never be used. | ||
572 | */ | ||
573 | } | ||
574 | more += WSIZE; | ||
575 | } | ||
576 | /* At this point, more >= 2 */ | ||
577 | niro | 816 | if (!G1.eofile) { |
578 | n = file_read(G1.window + G1.strstart + G1.lookahead, more); | ||
579 | niro | 532 | if (n == 0 || n == (unsigned) -1) { |
580 | niro | 816 | G1.eofile = 1; |
581 | niro | 532 | } else { |
582 | niro | 816 | G1.lookahead += n; |
583 | niro | 532 | } |
584 | } | ||
585 | } | ||
586 | |||
587 | |||
588 | /* =========================================================================== | ||
589 | * Set match_start to the longest match starting at the given string and | ||
590 | * return its length. Matches shorter or equal to prev_length are discarded, | ||
591 | * in which case the result is equal to prev_length and match_start is | ||
592 | * garbage. | ||
593 | * IN assertions: cur_match is the head of the hash chain for the current | ||
594 | * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 | ||
595 | */ | ||
596 | |||
597 | /* For MSDOS, OS/2 and 386 Unix, an optimized version is in match.asm or | ||
598 | * match.s. The code is functionally equivalent, so you can use the C version | ||
599 | * if desired. | ||
600 | */ | ||
601 | static int longest_match(IPos cur_match) | ||
602 | { | ||
603 | unsigned chain_length = max_chain_length; /* max hash chain length */ | ||
604 | niro | 816 | uch *scan = G1.window + G1.strstart; /* current string */ |
605 | niro | 532 | uch *match; /* matched string */ |
606 | int len; /* length of current match */ | ||
607 | niro | 816 | int best_len = G1.prev_length; /* best match length so far */ |
608 | IPos limit = G1.strstart > (IPos) MAX_DIST ? G1.strstart - (IPos) MAX_DIST : 0; | ||
609 | niro | 532 | /* Stop when cur_match becomes <= limit. To simplify the code, |
610 | * we prevent matches with the string of window index 0. | ||
611 | */ | ||
612 | |||
613 | /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. | ||
614 | * It is easy to get rid of this optimization if necessary. | ||
615 | */ | ||
616 | #if HASH_BITS < 8 || MAX_MATCH != 258 | ||
617 | # error Code too clever | ||
618 | #endif | ||
619 | niro | 816 | uch *strend = G1.window + G1.strstart + MAX_MATCH; |
620 | niro | 532 | uch scan_end1 = scan[best_len - 1]; |
621 | uch scan_end = scan[best_len]; | ||
622 | |||
623 | /* Do not waste too much time if we already have a good match: */ | ||
624 | niro | 816 | if (G1.prev_length >= good_match) { |
625 | niro | 532 | chain_length >>= 2; |
626 | } | ||
627 | niro | 816 | Assert(G1.strstart <= WINDOW_SIZE - MIN_LOOKAHEAD, "insufficient lookahead"); |
628 | niro | 532 | |
629 | do { | ||
630 | niro | 816 | Assert(cur_match < G1.strstart, "no future"); |
631 | match = G1.window + cur_match; | ||
632 | niro | 532 | |
633 | /* Skip to next match if the match length cannot increase | ||
634 | * or if the match length is less than 2: | ||
635 | */ | ||
636 | if (match[best_len] != scan_end || | ||
637 | match[best_len - 1] != scan_end1 || | ||
638 | *match != *scan || *++match != scan[1]) | ||
639 | continue; | ||
640 | |||
641 | /* The check at best_len-1 can be removed because it will be made | ||
642 | * again later. (This heuristic is not always a win.) | ||
643 | * It is not necessary to compare scan[2] and match[2] since they | ||
644 | * are always equal when the other bytes match, given that | ||
645 | * the hash keys are equal and that HASH_BITS >= 8. | ||
646 | */ | ||
647 | scan += 2, match++; | ||
648 | |||
649 | /* We check for insufficient lookahead only every 8th comparison; | ||
650 | * the 256th check will be made at strstart+258. | ||
651 | */ | ||
652 | do { | ||
653 | } while (*++scan == *++match && *++scan == *++match && | ||
654 | *++scan == *++match && *++scan == *++match && | ||
655 | *++scan == *++match && *++scan == *++match && | ||
656 | *++scan == *++match && *++scan == *++match && scan < strend); | ||
657 | |||
658 | len = MAX_MATCH - (int) (strend - scan); | ||
659 | scan = strend - MAX_MATCH; | ||
660 | |||
661 | if (len > best_len) { | ||
662 | niro | 816 | G1.match_start = cur_match; |
663 | niro | 532 | best_len = len; |
664 | if (len >= nice_match) | ||
665 | break; | ||
666 | scan_end1 = scan[best_len - 1]; | ||
667 | scan_end = scan[best_len]; | ||
668 | } | ||
669 | niro | 816 | } while ((cur_match = G1.prev[cur_match & WMASK]) > limit |
670 | niro | 532 | && --chain_length != 0); |
671 | |||
672 | return best_len; | ||
673 | } | ||
674 | |||
675 | |||
676 | #ifdef DEBUG | ||
677 | /* =========================================================================== | ||
678 | * Check that the match at match_start is indeed a match. | ||
679 | */ | ||
680 | static void check_match(IPos start, IPos match, int length) | ||
681 | { | ||
682 | /* check that the match is indeed a match */ | ||
683 | niro | 816 | if (memcmp(G1.window + match, G1.window + start, length) != 0) { |
684 | niro | 532 | bb_error_msg(" start %d, match %d, length %d", start, match, length); |
685 | bb_error_msg("invalid match"); | ||
686 | } | ||
687 | if (verbose > 1) { | ||
688 | bb_error_msg("\\[%d,%d]", start - match, length); | ||
689 | do { | ||
690 | niro | 816 | fputc(G1.window[start++], stderr); |
691 | niro | 532 | } while (--length != 0); |
692 | } | ||
693 | } | ||
694 | #else | ||
695 | # define check_match(start, match, length) ((void)0) | ||
696 | #endif | ||
697 | |||
698 | |||
699 | /* trees.c -- output deflated data using Huffman coding | ||
700 | * Copyright (C) 1992-1993 Jean-loup Gailly | ||
701 | * This is free software; you can redistribute it and/or modify it under the | ||
702 | * terms of the GNU General Public License, see the file COPYING. | ||
703 | */ | ||
704 | |||
705 | /* PURPOSE | ||
706 | * Encode various sets of source values using variable-length | ||
707 | * binary code trees. | ||
708 | * | ||
709 | * DISCUSSION | ||
710 | * The PKZIP "deflation" process uses several Huffman trees. The more | ||
711 | * common source values are represented by shorter bit sequences. | ||
712 | * | ||
713 | * Each code tree is stored in the ZIP file in a compressed form | ||
714 | * which is itself a Huffman encoding of the lengths of | ||
715 | * all the code strings (in ascending order by source values). | ||
716 | * The actual code strings are reconstructed from the lengths in | ||
717 | * the UNZIP process, as described in the "application note" | ||
718 | * (APPNOTE.TXT) distributed as part of PKWARE's PKZIP program. | ||
719 | * | ||
720 | * REFERENCES | ||
721 | * Lynch, Thomas J. | ||
722 | * Data Compression: Techniques and Applications, pp. 53-55. | ||
723 | * Lifetime Learning Publications, 1985. ISBN 0-534-03418-7. | ||
724 | * | ||
725 | * Storer, James A. | ||
726 | * Data Compression: Methods and Theory, pp. 49-50. | ||
727 | * Computer Science Press, 1988. ISBN 0-7167-8156-5. | ||
728 | * | ||
729 | * Sedgewick, R. | ||
730 | * Algorithms, p290. | ||
731 | * Addison-Wesley, 1983. ISBN 0-201-06672-6. | ||
732 | * | ||
733 | * INTERFACE | ||
734 | niro | 816 | * void ct_init() |
735 | * Allocate the match buffer, initialize the various tables [and save | ||
736 | niro | 532 | * the location of the internal file attribute (ascii/binary) and |
737 | niro | 816 | * method (DEFLATE/STORE) -- deleted in bbox] |
738 | niro | 532 | * |
739 | * void ct_tally(int dist, int lc); | ||
740 | * Save the match info and tally the frequency counts. | ||
741 | * | ||
742 | * ulg flush_block(char *buf, ulg stored_len, int eof) | ||
743 | * Determine the best encoding for the current block: dynamic trees, | ||
744 | * static trees or store, and output the encoded block to the zip | ||
745 | * file. Returns the total compressed length for the file so far. | ||
746 | */ | ||
747 | |||
748 | #define MAX_BITS 15 | ||
749 | /* All codes must not exceed MAX_BITS bits */ | ||
750 | |||
751 | #define MAX_BL_BITS 7 | ||
752 | /* Bit length codes must not exceed MAX_BL_BITS bits */ | ||
753 | |||
754 | #define LENGTH_CODES 29 | ||
755 | /* number of length codes, not counting the special END_BLOCK code */ | ||
756 | |||
757 | #define LITERALS 256 | ||
758 | /* number of literal bytes 0..255 */ | ||
759 | |||
760 | #define END_BLOCK 256 | ||
761 | /* end of block literal code */ | ||
762 | |||
763 | #define L_CODES (LITERALS+1+LENGTH_CODES) | ||
764 | /* number of Literal or Length codes, including the END_BLOCK code */ | ||
765 | |||
766 | #define D_CODES 30 | ||
767 | /* number of distance codes */ | ||
768 | |||
769 | #define BL_CODES 19 | ||
770 | /* number of codes used to transfer the bit lengths */ | ||
771 | |||
772 | /* extra bits for each length code */ | ||
773 | niro | 816 | static const uint8_t extra_lbits[LENGTH_CODES] ALIGN1 = { |
774 | niro | 532 | 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, |
775 | 4, 4, 5, 5, 5, 5, 0 | ||
776 | }; | ||
777 | |||
778 | /* extra bits for each distance code */ | ||
779 | niro | 816 | static const uint8_t extra_dbits[D_CODES] ALIGN1 = { |
780 | niro | 532 | 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, |
781 | 10, 10, 11, 11, 12, 12, 13, 13 | ||
782 | }; | ||
783 | |||
784 | /* extra bits for each bit length code */ | ||
785 | niro | 816 | static const uint8_t extra_blbits[BL_CODES] ALIGN1 = { |
786 | niro | 532 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7 }; |
787 | |||
788 | niro | 816 | /* number of codes at each bit length for an optimal tree */ |
789 | static const uint8_t bl_order[BL_CODES] ALIGN1 = { | ||
790 | 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 }; | ||
791 | |||
792 | niro | 532 | #define STORED_BLOCK 0 |
793 | #define STATIC_TREES 1 | ||
794 | #define DYN_TREES 2 | ||
795 | /* The three kinds of block type */ | ||
796 | |||
797 | #ifndef LIT_BUFSIZE | ||
798 | # ifdef SMALL_MEM | ||
799 | # define LIT_BUFSIZE 0x2000 | ||
800 | # else | ||
801 | # ifdef MEDIUM_MEM | ||
802 | # define LIT_BUFSIZE 0x4000 | ||
803 | # else | ||
804 | # define LIT_BUFSIZE 0x8000 | ||
805 | # endif | ||
806 | # endif | ||
807 | #endif | ||
808 | #ifndef DIST_BUFSIZE | ||
809 | # define DIST_BUFSIZE LIT_BUFSIZE | ||
810 | #endif | ||
811 | /* Sizes of match buffers for literals/lengths and distances. There are | ||
812 | * 4 reasons for limiting LIT_BUFSIZE to 64K: | ||
813 | * - frequencies can be kept in 16 bit counters | ||
814 | * - if compression is not successful for the first block, all input data is | ||
815 | * still in the window so we can still emit a stored block even when input | ||
816 | * comes from standard input. (This can also be done for all blocks if | ||
817 | * LIT_BUFSIZE is not greater than 32K.) | ||
818 | * - if compression is not successful for a file smaller than 64K, we can | ||
819 | * even emit a stored file instead of a stored block (saving 5 bytes). | ||
820 | * - creating new Huffman trees less frequently may not provide fast | ||
821 | * adaptation to changes in the input data statistics. (Take for | ||
822 | * example a binary file with poorly compressible code followed by | ||
823 | * a highly compressible string table.) Smaller buffer sizes give | ||
824 | * fast adaptation but have of course the overhead of transmitting trees | ||
825 | * more frequently. | ||
826 | * - I can't count above 4 | ||
827 | * The current code is general and allows DIST_BUFSIZE < LIT_BUFSIZE (to save | ||
828 | * memory at the expense of compression). Some optimizations would be possible | ||
829 | * if we rely on DIST_BUFSIZE == LIT_BUFSIZE. | ||
830 | */ | ||
831 | #define REP_3_6 16 | ||
832 | /* repeat previous bit length 3-6 times (2 bits of repeat count) */ | ||
833 | #define REPZ_3_10 17 | ||
834 | /* repeat a zero length 3-10 times (3 bits of repeat count) */ | ||
835 | #define REPZ_11_138 18 | ||
836 | /* repeat a zero length 11-138 times (7 bits of repeat count) */ | ||
837 | |||
838 | /* =========================================================================== | ||
839 | */ | ||
840 | /* Data structure describing a single value and its code string. */ | ||
841 | typedef struct ct_data { | ||
842 | union { | ||
843 | ush freq; /* frequency count */ | ||
844 | ush code; /* bit string */ | ||
845 | } fc; | ||
846 | union { | ||
847 | ush dad; /* father node in Huffman tree */ | ||
848 | ush len; /* length of bit string */ | ||
849 | } dl; | ||
850 | } ct_data; | ||
851 | |||
852 | #define Freq fc.freq | ||
853 | #define Code fc.code | ||
854 | #define Dad dl.dad | ||
855 | #define Len dl.len | ||
856 | |||
857 | #define HEAP_SIZE (2*L_CODES + 1) | ||
858 | /* maximum heap size */ | ||
859 | |||
860 | niro | 816 | typedef struct tree_desc { |
861 | ct_data *dyn_tree; /* the dynamic tree */ | ||
862 | ct_data *static_tree; /* corresponding static tree or NULL */ | ||
863 | const uint8_t *extra_bits; /* extra bits for each code or NULL */ | ||
864 | int extra_base; /* base index for extra_bits */ | ||
865 | int elems; /* max number of elements in the tree */ | ||
866 | int max_length; /* max bit length for the codes */ | ||
867 | int max_code; /* largest code with non zero frequency */ | ||
868 | } tree_desc; | ||
869 | niro | 532 | |
870 | niro | 816 | struct globals2 { |
871 | |||
872 | ush heap[HEAP_SIZE]; /* heap used to build the Huffman trees */ | ||
873 | int heap_len; /* number of elements in the heap */ | ||
874 | int heap_max; /* element of largest frequency */ | ||
875 | |||
876 | niro | 532 | /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used. |
877 | * The same heap array is used to build all trees. | ||
878 | */ | ||
879 | |||
880 | niro | 816 | ct_data dyn_ltree[HEAP_SIZE]; /* literal and length tree */ |
881 | ct_data dyn_dtree[2 * D_CODES + 1]; /* distance tree */ | ||
882 | niro | 532 | |
883 | niro | 816 | ct_data static_ltree[L_CODES + 2]; |
884 | niro | 532 | |
885 | /* The static literal tree. Since the bit lengths are imposed, there is no | ||
886 | * need for the L_CODES extra codes used during heap construction. However | ||
887 | * The codes 286 and 287 are needed to build a canonical tree (see ct_init | ||
888 | * below). | ||
889 | */ | ||
890 | |||
891 | niro | 816 | ct_data static_dtree[D_CODES]; |
892 | niro | 532 | |
893 | /* The static distance tree. (Actually a trivial tree since all codes use | ||
894 | * 5 bits.) | ||
895 | */ | ||
896 | |||
897 | niro | 816 | ct_data bl_tree[2 * BL_CODES + 1]; |
898 | niro | 532 | |
899 | /* Huffman tree for the bit lengths */ | ||
900 | |||
901 | niro | 816 | tree_desc l_desc; |
902 | tree_desc d_desc; | ||
903 | tree_desc bl_desc; | ||
904 | niro | 532 | |
905 | niro | 816 | ush bl_count[MAX_BITS + 1]; |
906 | niro | 532 | |
907 | /* The lengths of the bit length codes are sent in order of decreasing | ||
908 | * probability, to avoid transmitting the lengths for unused bit length codes. | ||
909 | */ | ||
910 | |||
911 | niro | 816 | uch depth[2 * L_CODES + 1]; |
912 | niro | 532 | |
913 | /* Depth of each subtree used as tie breaker for trees of equal frequency */ | ||
914 | |||
915 | niro | 816 | uch length_code[MAX_MATCH - MIN_MATCH + 1]; |
916 | niro | 532 | |
917 | /* length code for each normalized match length (0 == MIN_MATCH) */ | ||
918 | |||
919 | niro | 816 | uch dist_code[512]; |
920 | niro | 532 | |
921 | /* distance codes. The first 256 values correspond to the distances | ||
922 | * 3 .. 258, the last 256 values correspond to the top 8 bits of | ||
923 | * the 15 bit distances. | ||
924 | */ | ||
925 | |||
926 | niro | 816 | int base_length[LENGTH_CODES]; |
927 | niro | 532 | |
928 | /* First normalized length for each code (0 = MIN_MATCH) */ | ||
929 | |||
930 | niro | 816 | int base_dist[D_CODES]; |
931 | niro | 532 | |
932 | /* First normalized distance for each code (0 = distance of 1) */ | ||
933 | |||
934 | niro | 816 | uch flag_buf[LIT_BUFSIZE / 8]; |
935 | niro | 532 | |
936 | /* flag_buf is a bit array distinguishing literals from lengths in | ||
937 | * l_buf, thus indicating the presence or absence of a distance. | ||
938 | */ | ||
939 | |||
940 | niro | 816 | unsigned last_lit; /* running index in l_buf */ |
941 | unsigned last_dist; /* running index in d_buf */ | ||
942 | unsigned last_flags; /* running index in flag_buf */ | ||
943 | uch flags; /* current flags not yet saved in flag_buf */ | ||
944 | uch flag_bit; /* current bit used in flags */ | ||
945 | niro | 532 | |
946 | /* bits are filled in flags starting at bit 0 (least significant). | ||
947 | * Note: these flags are overkill in the current code since we don't | ||
948 | * take advantage of DIST_BUFSIZE == LIT_BUFSIZE. | ||
949 | */ | ||
950 | |||
951 | niro | 816 | ulg opt_len; /* bit length of current block with optimal trees */ |
952 | ulg static_len; /* bit length of current block with static trees */ | ||
953 | niro | 532 | |
954 | niro | 816 | ulg compressed_len; /* total bit length of compressed file */ |
955 | }; | ||
956 | niro | 532 | |
957 | niro | 816 | #define G2ptr ((struct globals2*)(ptr_to_globals)) |
958 | #define G2 (*G2ptr) | ||
959 | niro | 532 | |
960 | niro | 816 | |
961 | niro | 532 | /* =========================================================================== |
962 | */ | ||
963 | static void gen_codes(ct_data * tree, int max_code); | ||
964 | static void build_tree(tree_desc * desc); | ||
965 | static void scan_tree(ct_data * tree, int max_code); | ||
966 | static void send_tree(ct_data * tree, int max_code); | ||
967 | static int build_bl_tree(void); | ||
968 | static void send_all_trees(int lcodes, int dcodes, int blcodes); | ||
969 | static void compress_block(ct_data * ltree, ct_data * dtree); | ||
970 | |||
971 | |||
972 | #ifndef DEBUG | ||
973 | /* Send a code of the given tree. c and tree must not have side effects */ | ||
974 | # define SEND_CODE(c, tree) send_bits(tree[c].Code, tree[c].Len) | ||
975 | #else | ||
976 | # define SEND_CODE(c, tree) \ | ||
977 | { \ | ||
978 | if (verbose > 1) bb_error_msg("\ncd %3d ",(c)); \ | ||
979 | send_bits(tree[c].Code, tree[c].Len); \ | ||
980 | } | ||
981 | #endif | ||
982 | |||
983 | #define D_CODE(dist) \ | ||
984 | niro | 816 | ((dist) < 256 ? G2.dist_code[dist] : G2.dist_code[256 + ((dist)>>7)]) |
985 | niro | 532 | /* Mapping from a distance to a distance code. dist is the distance - 1 and |
986 | * must not have side effects. dist_code[256] and dist_code[257] are never | ||
987 | * used. | ||
988 | * The arguments must not have side effects. | ||
989 | */ | ||
990 | |||
991 | |||
992 | /* =========================================================================== | ||
993 | * Initialize a new block. | ||
994 | */ | ||
995 | static void init_block(void) | ||
996 | { | ||
997 | int n; /* iterates over tree elements */ | ||
998 | |||
999 | /* Initialize the trees. */ | ||
1000 | for (n = 0; n < L_CODES; n++) | ||
1001 | niro | 816 | G2.dyn_ltree[n].Freq = 0; |
1002 | niro | 532 | for (n = 0; n < D_CODES; n++) |
1003 | niro | 816 | G2.dyn_dtree[n].Freq = 0; |
1004 | niro | 532 | for (n = 0; n < BL_CODES; n++) |
1005 | niro | 816 | G2.bl_tree[n].Freq = 0; |
1006 | niro | 532 | |
1007 | niro | 816 | G2.dyn_ltree[END_BLOCK].Freq = 1; |
1008 | G2.opt_len = G2.static_len = 0; | ||
1009 | G2.last_lit = G2.last_dist = G2.last_flags = 0; | ||
1010 | G2.flags = 0; | ||
1011 | G2.flag_bit = 1; | ||
1012 | niro | 532 | } |
1013 | |||
1014 | |||
1015 | /* =========================================================================== | ||
1016 | * Restore the heap property by moving down the tree starting at node k, | ||
1017 | * exchanging a node with the smallest of its two sons if necessary, stopping | ||
1018 | * when the heap property is re-established (each father smaller than its | ||
1019 | * two sons). | ||
1020 | */ | ||
1021 | |||
1022 | /* Compares to subtrees, using the tree depth as tie breaker when | ||
1023 | * the subtrees have equal frequency. This minimizes the worst case length. */ | ||
1024 | #define SMALLER(tree, n, m) \ | ||
1025 | (tree[n].Freq < tree[m].Freq \ | ||
1026 | niro | 816 | || (tree[n].Freq == tree[m].Freq && G2.depth[n] <= G2.depth[m])) |
1027 | niro | 532 | |
1028 | static void pqdownheap(ct_data * tree, int k) | ||
1029 | { | ||
1030 | niro | 816 | int v = G2.heap[k]; |
1031 | niro | 532 | int j = k << 1; /* left son of k */ |
1032 | |||
1033 | niro | 816 | while (j <= G2.heap_len) { |
1034 | niro | 532 | /* Set j to the smallest of the two sons: */ |
1035 | niro | 816 | if (j < G2.heap_len && SMALLER(tree, G2.heap[j + 1], G2.heap[j])) |
1036 | niro | 532 | j++; |
1037 | |||
1038 | /* Exit if v is smaller than both sons */ | ||
1039 | niro | 816 | if (SMALLER(tree, v, G2.heap[j])) |
1040 | niro | 532 | break; |
1041 | |||
1042 | /* Exchange v with the smallest son */ | ||
1043 | niro | 816 | G2.heap[k] = G2.heap[j]; |
1044 | niro | 532 | k = j; |
1045 | |||
1046 | /* And continue down the tree, setting j to the left son of k */ | ||
1047 | j <<= 1; | ||
1048 | } | ||
1049 | niro | 816 | G2.heap[k] = v; |
1050 | niro | 532 | } |
1051 | |||
1052 | |||
1053 | /* =========================================================================== | ||
1054 | * Compute the optimal bit lengths for a tree and update the total bit length | ||
1055 | * for the current block. | ||
1056 | * IN assertion: the fields freq and dad are set, heap[heap_max] and | ||
1057 | * above are the tree nodes sorted by increasing frequency. | ||
1058 | * OUT assertions: the field len is set to the optimal bit length, the | ||
1059 | * array bl_count contains the frequencies for each bit length. | ||
1060 | * The length opt_len is updated; static_len is also updated if stree is | ||
1061 | * not null. | ||
1062 | */ | ||
1063 | static void gen_bitlen(tree_desc * desc) | ||
1064 | { | ||
1065 | ct_data *tree = desc->dyn_tree; | ||
1066 | niro | 816 | const uint8_t *extra = desc->extra_bits; |
1067 | niro | 532 | int base = desc->extra_base; |
1068 | int max_code = desc->max_code; | ||
1069 | int max_length = desc->max_length; | ||
1070 | ct_data *stree = desc->static_tree; | ||
1071 | int h; /* heap index */ | ||
1072 | int n, m; /* iterate over the tree elements */ | ||
1073 | int bits; /* bit length */ | ||
1074 | int xbits; /* extra bits */ | ||
1075 | ush f; /* frequency */ | ||
1076 | int overflow = 0; /* number of elements with bit length too large */ | ||
1077 | |||
1078 | for (bits = 0; bits <= MAX_BITS; bits++) | ||
1079 | niro | 816 | G2.bl_count[bits] = 0; |
1080 | niro | 532 | |
1081 | /* In a first pass, compute the optimal bit lengths (which may | ||
1082 | * overflow in the case of the bit length tree). | ||
1083 | */ | ||
1084 | niro | 816 | tree[G2.heap[G2.heap_max]].Len = 0; /* root of the heap */ |
1085 | niro | 532 | |
1086 | niro | 816 | for (h = G2.heap_max + 1; h < HEAP_SIZE; h++) { |
1087 | n = G2.heap[h]; | ||
1088 | niro | 532 | bits = tree[tree[n].Dad].Len + 1; |
1089 | if (bits > max_length) { | ||
1090 | bits = max_length; | ||
1091 | overflow++; | ||
1092 | } | ||
1093 | tree[n].Len = (ush) bits; | ||
1094 | /* We overwrite tree[n].Dad which is no longer needed */ | ||
1095 | |||
1096 | if (n > max_code) | ||
1097 | continue; /* not a leaf node */ | ||
1098 | |||
1099 | niro | 816 | G2.bl_count[bits]++; |
1100 | niro | 532 | xbits = 0; |
1101 | if (n >= base) | ||
1102 | xbits = extra[n - base]; | ||
1103 | f = tree[n].Freq; | ||
1104 | niro | 816 | G2.opt_len += (ulg) f *(bits + xbits); |
1105 | niro | 532 | |
1106 | if (stree) | ||
1107 | niro | 816 | G2.static_len += (ulg) f * (stree[n].Len + xbits); |
1108 | niro | 532 | } |
1109 | if (overflow == 0) | ||
1110 | return; | ||
1111 | |||
1112 | Trace((stderr, "\nbit length overflow\n")); | ||
1113 | /* This happens for example on obj2 and pic of the Calgary corpus */ | ||
1114 | |||
1115 | /* Find the first bit length which could increase: */ | ||
1116 | do { | ||
1117 | bits = max_length - 1; | ||
1118 | niro | 816 | while (G2.bl_count[bits] == 0) |
1119 | niro | 532 | bits--; |
1120 | niro | 816 | G2.bl_count[bits]--; /* move one leaf down the tree */ |
1121 | G2.bl_count[bits + 1] += 2; /* move one overflow item as its brother */ | ||
1122 | G2.bl_count[max_length]--; | ||
1123 | niro | 532 | /* The brother of the overflow item also moves one step up, |
1124 | * but this does not affect bl_count[max_length] | ||
1125 | */ | ||
1126 | overflow -= 2; | ||
1127 | } while (overflow > 0); | ||
1128 | |||
1129 | /* Now recompute all bit lengths, scanning in increasing frequency. | ||
1130 | * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all | ||
1131 | * lengths instead of fixing only the wrong ones. This idea is taken | ||
1132 | * from 'ar' written by Haruhiko Okumura.) | ||
1133 | */ | ||
1134 | for (bits = max_length; bits != 0; bits--) { | ||
1135 | niro | 816 | n = G2.bl_count[bits]; |
1136 | niro | 532 | while (n != 0) { |
1137 | niro | 816 | m = G2.heap[--h]; |
1138 | niro | 532 | if (m > max_code) |
1139 | continue; | ||
1140 | if (tree[m].Len != (unsigned) bits) { | ||
1141 | Trace((stderr, "code %d bits %d->%d\n", m, tree[m].Len, bits)); | ||
1142 | niro | 816 | G2.opt_len += ((int32_t) bits - tree[m].Len) * tree[m].Freq; |
1143 | niro | 532 | tree[m].Len = bits; |
1144 | } | ||
1145 | n--; | ||
1146 | } | ||
1147 | } | ||
1148 | } | ||
1149 | |||
1150 | |||
1151 | /* =========================================================================== | ||
1152 | * Generate the codes for a given tree and bit counts (which need not be | ||
1153 | * optimal). | ||
1154 | * IN assertion: the array bl_count contains the bit length statistics for | ||
1155 | * the given tree and the field len is set for all tree elements. | ||
1156 | * OUT assertion: the field code is set for all tree elements of non | ||
1157 | * zero code length. | ||
1158 | */ | ||
1159 | static void gen_codes(ct_data * tree, int max_code) | ||
1160 | { | ||
1161 | ush next_code[MAX_BITS + 1]; /* next code value for each bit length */ | ||
1162 | ush code = 0; /* running code value */ | ||
1163 | int bits; /* bit index */ | ||
1164 | int n; /* code index */ | ||
1165 | |||
1166 | /* The distribution counts are first used to generate the code values | ||
1167 | * without bit reversal. | ||
1168 | */ | ||
1169 | for (bits = 1; bits <= MAX_BITS; bits++) { | ||
1170 | niro | 816 | next_code[bits] = code = (code + G2.bl_count[bits - 1]) << 1; |
1171 | niro | 532 | } |
1172 | /* Check that the bit counts in bl_count are consistent. The last code | ||
1173 | * must be all ones. | ||
1174 | */ | ||
1175 | niro | 816 | Assert(code + G2.bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1, |
1176 | niro | 532 | "inconsistent bit counts"); |
1177 | Tracev((stderr, "\ngen_codes: max_code %d ", max_code)); | ||
1178 | |||
1179 | for (n = 0; n <= max_code; n++) { | ||
1180 | int len = tree[n].Len; | ||
1181 | |||
1182 | if (len == 0) | ||
1183 | continue; | ||
1184 | /* Now reverse the bits */ | ||
1185 | tree[n].Code = bi_reverse(next_code[len]++, len); | ||
1186 | |||
1187 | niro | 816 | Tracec(tree != G2.static_ltree, |
1188 | niro | 532 | (stderr, "\nn %3d %c l %2d c %4x (%x) ", n, |
1189 | (isgraph(n) ? n : ' '), len, tree[n].Code, | ||
1190 | next_code[len] - 1)); | ||
1191 | } | ||
1192 | } | ||
1193 | |||
1194 | |||
1195 | /* =========================================================================== | ||
1196 | * Construct one Huffman tree and assigns the code bit strings and lengths. | ||
1197 | * Update the total bit length for the current block. | ||
1198 | * IN assertion: the field freq is set for all tree elements. | ||
1199 | * OUT assertions: the fields len and code are set to the optimal bit length | ||
1200 | * and corresponding code. The length opt_len is updated; static_len is | ||
1201 | * also updated if stree is not null. The field max_code is set. | ||
1202 | */ | ||
1203 | |||
1204 | /* Remove the smallest element from the heap and recreate the heap with | ||
1205 | * one less element. Updates heap and heap_len. */ | ||
1206 | |||
1207 | #define SMALLEST 1 | ||
1208 | /* Index within the heap array of least frequent node in the Huffman tree */ | ||
1209 | |||
1210 | #define PQREMOVE(tree, top) \ | ||
1211 | niro | 816 | do { \ |
1212 | top = G2.heap[SMALLEST]; \ | ||
1213 | G2.heap[SMALLEST] = G2.heap[G2.heap_len--]; \ | ||
1214 | niro | 532 | pqdownheap(tree, SMALLEST); \ |
1215 | niro | 816 | } while (0) |
1216 | niro | 532 | |
1217 | static void build_tree(tree_desc * desc) | ||
1218 | { | ||
1219 | ct_data *tree = desc->dyn_tree; | ||
1220 | ct_data *stree = desc->static_tree; | ||
1221 | int elems = desc->elems; | ||
1222 | int n, m; /* iterate over heap elements */ | ||
1223 | int max_code = -1; /* largest code with non zero frequency */ | ||
1224 | int node = elems; /* next internal node of the tree */ | ||
1225 | |||
1226 | /* Construct the initial heap, with least frequent element in | ||
1227 | * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. | ||
1228 | * heap[0] is not used. | ||
1229 | */ | ||
1230 | niro | 816 | G2.heap_len = 0; |
1231 | G2.heap_max = HEAP_SIZE; | ||
1232 | niro | 532 | |
1233 | for (n = 0; n < elems; n++) { | ||
1234 | if (tree[n].Freq != 0) { | ||
1235 | niro | 816 | G2.heap[++G2.heap_len] = max_code = n; |
1236 | G2.depth[n] = 0; | ||
1237 | niro | 532 | } else { |
1238 | tree[n].Len = 0; | ||
1239 | } | ||
1240 | } | ||
1241 | |||
1242 | /* The pkzip format requires that at least one distance code exists, | ||
1243 | * and that at least one bit should be sent even if there is only one | ||
1244 | * possible code. So to avoid special checks later on we force at least | ||
1245 | * two codes of non zero frequency. | ||
1246 | */ | ||
1247 | niro | 816 | while (G2.heap_len < 2) { |
1248 | int new = G2.heap[++G2.heap_len] = (max_code < 2 ? ++max_code : 0); | ||
1249 | niro | 532 | |
1250 | tree[new].Freq = 1; | ||
1251 | niro | 816 | G2.depth[new] = 0; |
1252 | G2.opt_len--; | ||
1253 | niro | 532 | if (stree) |
1254 | niro | 816 | G2.static_len -= stree[new].Len; |
1255 | niro | 532 | /* new is 0 or 1 so it does not have extra bits */ |
1256 | } | ||
1257 | desc->max_code = max_code; | ||
1258 | |||
1259 | /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, | ||
1260 | * establish sub-heaps of increasing lengths: | ||
1261 | */ | ||
1262 | niro | 816 | for (n = G2.heap_len / 2; n >= 1; n--) |
1263 | niro | 532 | pqdownheap(tree, n); |
1264 | |||
1265 | /* Construct the Huffman tree by repeatedly combining the least two | ||
1266 | * frequent nodes. | ||
1267 | */ | ||
1268 | do { | ||
1269 | PQREMOVE(tree, n); /* n = node of least frequency */ | ||
1270 | niro | 816 | m = G2.heap[SMALLEST]; /* m = node of next least frequency */ |
1271 | niro | 532 | |
1272 | niro | 816 | G2.heap[--G2.heap_max] = n; /* keep the nodes sorted by frequency */ |
1273 | G2.heap[--G2.heap_max] = m; | ||
1274 | niro | 532 | |
1275 | /* Create a new node father of n and m */ | ||
1276 | tree[node].Freq = tree[n].Freq + tree[m].Freq; | ||
1277 | niro | 816 | G2.depth[node] = MAX(G2.depth[n], G2.depth[m]) + 1; |
1278 | niro | 532 | tree[n].Dad = tree[m].Dad = (ush) node; |
1279 | #ifdef DUMP_BL_TREE | ||
1280 | niro | 816 | if (tree == G2.bl_tree) { |
1281 | niro | 532 | bb_error_msg("\nnode %d(%d), sons %d(%d) %d(%d)", |
1282 | node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq); | ||
1283 | } | ||
1284 | #endif | ||
1285 | /* and insert the new node in the heap */ | ||
1286 | niro | 816 | G2.heap[SMALLEST] = node++; |
1287 | niro | 532 | pqdownheap(tree, SMALLEST); |
1288 | |||
1289 | niro | 816 | } while (G2.heap_len >= 2); |
1290 | niro | 532 | |
1291 | niro | 816 | G2.heap[--G2.heap_max] = G2.heap[SMALLEST]; |
1292 | niro | 532 | |
1293 | /* At this point, the fields freq and dad are set. We can now | ||
1294 | * generate the bit lengths. | ||
1295 | */ | ||
1296 | gen_bitlen((tree_desc *) desc); | ||
1297 | |||
1298 | /* The field len is now set, we can generate the bit codes */ | ||
1299 | gen_codes((ct_data *) tree, max_code); | ||
1300 | } | ||
1301 | |||
1302 | |||
1303 | /* =========================================================================== | ||
1304 | * Scan a literal or distance tree to determine the frequencies of the codes | ||
1305 | * in the bit length tree. Updates opt_len to take into account the repeat | ||
1306 | * counts. (The contribution of the bit length codes will be added later | ||
1307 | * during the construction of bl_tree.) | ||
1308 | */ | ||
1309 | static void scan_tree(ct_data * tree, int max_code) | ||
1310 | { | ||
1311 | int n; /* iterates over all tree elements */ | ||
1312 | int prevlen = -1; /* last emitted length */ | ||
1313 | int curlen; /* length of current code */ | ||
1314 | int nextlen = tree[0].Len; /* length of next code */ | ||
1315 | int count = 0; /* repeat count of the current code */ | ||
1316 | int max_count = 7; /* max repeat count */ | ||
1317 | int min_count = 4; /* min repeat count */ | ||
1318 | |||
1319 | if (nextlen == 0) { | ||
1320 | max_count = 138; | ||
1321 | min_count = 3; | ||
1322 | } | ||
1323 | tree[max_code + 1].Len = 0xffff; /* guard */ | ||
1324 | |||
1325 | for (n = 0; n <= max_code; n++) { | ||
1326 | curlen = nextlen; | ||
1327 | nextlen = tree[n + 1].Len; | ||
1328 | if (++count < max_count && curlen == nextlen) | ||
1329 | continue; | ||
1330 | |||
1331 | if (count < min_count) { | ||
1332 | niro | 816 | G2.bl_tree[curlen].Freq += count; |
1333 | niro | 532 | } else if (curlen != 0) { |
1334 | if (curlen != prevlen) | ||
1335 | niro | 816 | G2.bl_tree[curlen].Freq++; |
1336 | G2.bl_tree[REP_3_6].Freq++; | ||
1337 | niro | 532 | } else if (count <= 10) { |
1338 | niro | 816 | G2.bl_tree[REPZ_3_10].Freq++; |
1339 | niro | 532 | } else { |
1340 | niro | 816 | G2.bl_tree[REPZ_11_138].Freq++; |
1341 | niro | 532 | } |
1342 | count = 0; | ||
1343 | prevlen = curlen; | ||
1344 | |||
1345 | max_count = 7; | ||
1346 | min_count = 4; | ||
1347 | if (nextlen == 0) { | ||
1348 | max_count = 138; | ||
1349 | min_count = 3; | ||
1350 | } else if (curlen == nextlen) { | ||
1351 | max_count = 6; | ||
1352 | min_count = 3; | ||
1353 | } | ||
1354 | } | ||
1355 | } | ||
1356 | |||
1357 | |||
1358 | /* =========================================================================== | ||
1359 | * Send a literal or distance tree in compressed form, using the codes in | ||
1360 | * bl_tree. | ||
1361 | */ | ||
1362 | static void send_tree(ct_data * tree, int max_code) | ||
1363 | { | ||
1364 | int n; /* iterates over all tree elements */ | ||
1365 | int prevlen = -1; /* last emitted length */ | ||
1366 | int curlen; /* length of current code */ | ||
1367 | int nextlen = tree[0].Len; /* length of next code */ | ||
1368 | int count = 0; /* repeat count of the current code */ | ||
1369 | int max_count = 7; /* max repeat count */ | ||
1370 | int min_count = 4; /* min repeat count */ | ||
1371 | |||
1372 | /* tree[max_code+1].Len = -1; *//* guard already set */ | ||
1373 | if (nextlen == 0) | ||
1374 | max_count = 138, min_count = 3; | ||
1375 | |||
1376 | for (n = 0; n <= max_code; n++) { | ||
1377 | curlen = nextlen; | ||
1378 | nextlen = tree[n + 1].Len; | ||
1379 | if (++count < max_count && curlen == nextlen) { | ||
1380 | continue; | ||
1381 | } else if (count < min_count) { | ||
1382 | do { | ||
1383 | niro | 816 | SEND_CODE(curlen, G2.bl_tree); |
1384 | niro | 532 | } while (--count); |
1385 | } else if (curlen != 0) { | ||
1386 | if (curlen != prevlen) { | ||
1387 | niro | 816 | SEND_CODE(curlen, G2.bl_tree); |
1388 | niro | 532 | count--; |
1389 | } | ||
1390 | Assert(count >= 3 && count <= 6, " 3_6?"); | ||
1391 | niro | 816 | SEND_CODE(REP_3_6, G2.bl_tree); |
1392 | niro | 532 | send_bits(count - 3, 2); |
1393 | } else if (count <= 10) { | ||
1394 | niro | 816 | SEND_CODE(REPZ_3_10, G2.bl_tree); |
1395 | niro | 532 | send_bits(count - 3, 3); |
1396 | } else { | ||
1397 | niro | 816 | SEND_CODE(REPZ_11_138, G2.bl_tree); |
1398 | niro | 532 | send_bits(count - 11, 7); |
1399 | } | ||
1400 | count = 0; | ||
1401 | prevlen = curlen; | ||
1402 | if (nextlen == 0) { | ||
1403 | max_count = 138; | ||
1404 | min_count = 3; | ||
1405 | } else if (curlen == nextlen) { | ||
1406 | max_count = 6; | ||
1407 | min_count = 3; | ||
1408 | } else { | ||
1409 | max_count = 7; | ||
1410 | min_count = 4; | ||
1411 | } | ||
1412 | } | ||
1413 | } | ||
1414 | |||
1415 | |||
1416 | /* =========================================================================== | ||
1417 | * Construct the Huffman tree for the bit lengths and return the index in | ||
1418 | * bl_order of the last bit length code to send. | ||
1419 | */ | ||
1420 | static int build_bl_tree(void) | ||
1421 | { | ||
1422 | int max_blindex; /* index of last bit length code of non zero freq */ | ||
1423 | |||
1424 | /* Determine the bit length frequencies for literal and distance trees */ | ||
1425 | niro | 816 | scan_tree(G2.dyn_ltree, G2.l_desc.max_code); |
1426 | scan_tree(G2.dyn_dtree, G2.d_desc.max_code); | ||
1427 | niro | 532 | |
1428 | /* Build the bit length tree: */ | ||
1429 | niro | 816 | build_tree(&G2.bl_desc); |
1430 | niro | 532 | /* opt_len now includes the length of the tree representations, except |
1431 | * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. | ||
1432 | */ | ||
1433 | |||
1434 | /* Determine the number of bit length codes to send. The pkzip format | ||
1435 | * requires that at least 4 bit length codes be sent. (appnote.txt says | ||
1436 | * 3 but the actual value used is 4.) | ||
1437 | */ | ||
1438 | for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) { | ||
1439 | niro | 816 | if (G2.bl_tree[bl_order[max_blindex]].Len != 0) |
1440 | niro | 532 | break; |
1441 | } | ||
1442 | /* Update opt_len to include the bit length tree and counts */ | ||
1443 | niro | 816 | G2.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4; |
1444 | Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", G2.opt_len, G2.static_len)); | ||
1445 | niro | 532 | |
1446 | return max_blindex; | ||
1447 | } | ||
1448 | |||
1449 | |||
1450 | /* =========================================================================== | ||
1451 | * Send the header for a block using dynamic Huffman trees: the counts, the | ||
1452 | * lengths of the bit length codes, the literal tree and the distance tree. | ||
1453 | * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. | ||
1454 | */ | ||
1455 | static void send_all_trees(int lcodes, int dcodes, int blcodes) | ||
1456 | { | ||
1457 | int rank; /* index in bl_order */ | ||
1458 | |||
1459 | Assert(lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); | ||
1460 | Assert(lcodes <= L_CODES && dcodes <= D_CODES | ||
1461 | && blcodes <= BL_CODES, "too many codes"); | ||
1462 | Tracev((stderr, "\nbl counts: ")); | ||
1463 | send_bits(lcodes - 257, 5); /* not +255 as stated in appnote.txt */ | ||
1464 | send_bits(dcodes - 1, 5); | ||
1465 | send_bits(blcodes - 4, 4); /* not -3 as stated in appnote.txt */ | ||
1466 | for (rank = 0; rank < blcodes; rank++) { | ||
1467 | Tracev((stderr, "\nbl code %2d ", bl_order[rank])); | ||
1468 | niro | 816 | send_bits(G2.bl_tree[bl_order[rank]].Len, 3); |
1469 | niro | 532 | } |
1470 | niro | 816 | Tracev((stderr, "\nbl tree: sent %ld", G1.bits_sent)); |
1471 | niro | 532 | |
1472 | niro | 816 | send_tree((ct_data *) G2.dyn_ltree, lcodes - 1); /* send the literal tree */ |
1473 | Tracev((stderr, "\nlit tree: sent %ld", G1.bits_sent)); | ||
1474 | niro | 532 | |
1475 | niro | 816 | send_tree((ct_data *) G2.dyn_dtree, dcodes - 1); /* send the distance tree */ |
1476 | Tracev((stderr, "\ndist tree: sent %ld", G1.bits_sent)); | ||
1477 | niro | 532 | } |
1478 | |||
1479 | |||
1480 | /* =========================================================================== | ||
1481 | * Save the match info and tally the frequency counts. Return true if | ||
1482 | * the current block must be flushed. | ||
1483 | */ | ||
1484 | static int ct_tally(int dist, int lc) | ||
1485 | { | ||
1486 | niro | 816 | G1.l_buf[G2.last_lit++] = lc; |
1487 | niro | 532 | if (dist == 0) { |
1488 | /* lc is the unmatched char */ | ||
1489 | niro | 816 | G2.dyn_ltree[lc].Freq++; |
1490 | niro | 532 | } else { |
1491 | /* Here, lc is the match length - MIN_MATCH */ | ||
1492 | dist--; /* dist = match distance - 1 */ | ||
1493 | Assert((ush) dist < (ush) MAX_DIST | ||
1494 | && (ush) lc <= (ush) (MAX_MATCH - MIN_MATCH) | ||
1495 | && (ush) D_CODE(dist) < (ush) D_CODES, "ct_tally: bad match" | ||
1496 | ); | ||
1497 | |||
1498 | niro | 816 | G2.dyn_ltree[G2.length_code[lc] + LITERALS + 1].Freq++; |
1499 | G2.dyn_dtree[D_CODE(dist)].Freq++; | ||
1500 | niro | 532 | |
1501 | niro | 816 | G1.d_buf[G2.last_dist++] = dist; |
1502 | G2.flags |= G2.flag_bit; | ||
1503 | niro | 532 | } |
1504 | niro | 816 | G2.flag_bit <<= 1; |
1505 | niro | 532 | |
1506 | /* Output the flags if they fill a byte: */ | ||
1507 | niro | 816 | if ((G2.last_lit & 7) == 0) { |
1508 | G2.flag_buf[G2.last_flags++] = G2.flags; | ||
1509 | G2.flags = 0; | ||
1510 | G2.flag_bit = 1; | ||
1511 | niro | 532 | } |
1512 | /* Try to guess if it is profitable to stop the current block here */ | ||
1513 | niro | 816 | if ((G2.last_lit & 0xfff) == 0) { |
1514 | niro | 532 | /* Compute an upper bound for the compressed length */ |
1515 | niro | 816 | ulg out_length = G2.last_lit * 8L; |
1516 | ulg in_length = (ulg) G1.strstart - G1.block_start; | ||
1517 | niro | 532 | int dcode; |
1518 | |||
1519 | for (dcode = 0; dcode < D_CODES; dcode++) { | ||
1520 | niro | 816 | out_length += G2.dyn_dtree[dcode].Freq * (5L + extra_dbits[dcode]); |
1521 | niro | 532 | } |
1522 | out_length >>= 3; | ||
1523 | Trace((stderr, | ||
1524 | "\nlast_lit %u, last_dist %u, in %ld, out ~%ld(%ld%%) ", | ||
1525 | niro | 816 | G2.last_lit, G2.last_dist, in_length, out_length, |
1526 | niro | 532 | 100L - out_length * 100L / in_length)); |
1527 | niro | 816 | if (G2.last_dist < G2.last_lit / 2 && out_length < in_length / 2) |
1528 | niro | 532 | return 1; |
1529 | } | ||
1530 | niro | 816 | return (G2.last_lit == LIT_BUFSIZE - 1 || G2.last_dist == DIST_BUFSIZE); |
1531 | niro | 532 | /* We avoid equality with LIT_BUFSIZE because of wraparound at 64K |
1532 | * on 16 bit machines and because stored blocks are restricted to | ||
1533 | * 64K-1 bytes. | ||
1534 | */ | ||
1535 | } | ||
1536 | |||
1537 | /* =========================================================================== | ||
1538 | * Send the block data compressed using the given Huffman trees | ||
1539 | */ | ||
1540 | static void compress_block(ct_data * ltree, ct_data * dtree) | ||
1541 | { | ||
1542 | unsigned dist; /* distance of matched string */ | ||
1543 | int lc; /* match length or unmatched char (if dist == 0) */ | ||
1544 | unsigned lx = 0; /* running index in l_buf */ | ||
1545 | unsigned dx = 0; /* running index in d_buf */ | ||
1546 | unsigned fx = 0; /* running index in flag_buf */ | ||
1547 | uch flag = 0; /* current flags */ | ||
1548 | unsigned code; /* the code to send */ | ||
1549 | int extra; /* number of extra bits to send */ | ||
1550 | |||
1551 | niro | 816 | if (G2.last_lit != 0) do { |
1552 | niro | 532 | if ((lx & 7) == 0) |
1553 | niro | 816 | flag = G2.flag_buf[fx++]; |
1554 | lc = G1.l_buf[lx++]; | ||
1555 | niro | 532 | if ((flag & 1) == 0) { |
1556 | SEND_CODE(lc, ltree); /* send a literal byte */ | ||
1557 | Tracecv(isgraph(lc), (stderr, " '%c' ", lc)); | ||
1558 | } else { | ||
1559 | /* Here, lc is the match length - MIN_MATCH */ | ||
1560 | niro | 816 | code = G2.length_code[lc]; |
1561 | niro | 532 | SEND_CODE(code + LITERALS + 1, ltree); /* send the length code */ |
1562 | extra = extra_lbits[code]; | ||
1563 | if (extra != 0) { | ||
1564 | niro | 816 | lc -= G2.base_length[code]; |
1565 | niro | 532 | send_bits(lc, extra); /* send the extra length bits */ |
1566 | } | ||
1567 | niro | 816 | dist = G1.d_buf[dx++]; |
1568 | niro | 532 | /* Here, dist is the match distance - 1 */ |
1569 | code = D_CODE(dist); | ||
1570 | Assert(code < D_CODES, "bad d_code"); | ||
1571 | |||
1572 | SEND_CODE(code, dtree); /* send the distance code */ | ||
1573 | extra = extra_dbits[code]; | ||
1574 | if (extra != 0) { | ||
1575 | niro | 816 | dist -= G2.base_dist[code]; |
1576 | niro | 532 | send_bits(dist, extra); /* send the extra distance bits */ |
1577 | } | ||
1578 | } /* literal or match pair ? */ | ||
1579 | flag >>= 1; | ||
1580 | niro | 816 | } while (lx < G2.last_lit); |
1581 | niro | 532 | |
1582 | SEND_CODE(END_BLOCK, ltree); | ||
1583 | } | ||
1584 | |||
1585 | |||
1586 | /* =========================================================================== | ||
1587 | * Determine the best encoding for the current block: dynamic trees, static | ||
1588 | * trees or store, and output the encoded block to the zip file. This function | ||
1589 | * returns the total compressed length for the file so far. | ||
1590 | */ | ||
1591 | static ulg flush_block(char *buf, ulg stored_len, int eof) | ||
1592 | { | ||
1593 | ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */ | ||
1594 | int max_blindex; /* index of last bit length code of non zero freq */ | ||
1595 | |||
1596 | niro | 816 | G2.flag_buf[G2.last_flags] = G2.flags; /* Save the flags for the last 8 items */ |
1597 | niro | 532 | |
1598 | /* Construct the literal and distance trees */ | ||
1599 | niro | 816 | build_tree(&G2.l_desc); |
1600 | Tracev((stderr, "\nlit data: dyn %ld, stat %ld", G2.opt_len, G2.static_len)); | ||
1601 | niro | 532 | |
1602 | niro | 816 | build_tree(&G2.d_desc); |
1603 | Tracev((stderr, "\ndist data: dyn %ld, stat %ld", G2.opt_len, G2.static_len)); | ||
1604 | niro | 532 | /* At this point, opt_len and static_len are the total bit lengths of |
1605 | * the compressed block data, excluding the tree representations. | ||
1606 | */ | ||
1607 | |||
1608 | /* Build the bit length tree for the above two trees, and get the index | ||
1609 | * in bl_order of the last bit length code to send. | ||
1610 | */ | ||
1611 | max_blindex = build_bl_tree(); | ||
1612 | |||
1613 | /* Determine the best encoding. Compute first the block length in bytes */ | ||
1614 | niro | 816 | opt_lenb = (G2.opt_len + 3 + 7) >> 3; |
1615 | static_lenb = (G2.static_len + 3 + 7) >> 3; | ||
1616 | niro | 532 | |
1617 | Trace((stderr, | ||
1618 | "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u dist %u ", | ||
1619 | niro | 816 | opt_lenb, G2.opt_len, static_lenb, G2.static_len, stored_len, |
1620 | G2.last_lit, G2.last_dist)); | ||
1621 | niro | 532 | |
1622 | if (static_lenb <= opt_lenb) | ||
1623 | opt_lenb = static_lenb; | ||
1624 | |||
1625 | /* If compression failed and this is the first and last block, | ||
1626 | * and if the zip file can be seeked (to rewrite the local header), | ||
1627 | * the whole file is transformed into a stored file: | ||
1628 | */ | ||
1629 | niro | 816 | if (stored_len <= opt_lenb && eof && G2.compressed_len == 0L && seekable()) { |
1630 | niro | 532 | /* Since LIT_BUFSIZE <= 2*WSIZE, the input data must be there: */ |
1631 | if (buf == NULL) | ||
1632 | bb_error_msg("block vanished"); | ||
1633 | |||
1634 | copy_block(buf, (unsigned) stored_len, 0); /* without header */ | ||
1635 | niro | 816 | G2.compressed_len = stored_len << 3; |
1636 | niro | 532 | |
1637 | } else if (stored_len + 4 <= opt_lenb && buf != NULL) { | ||
1638 | /* 4: two words for the lengths */ | ||
1639 | /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. | ||
1640 | * Otherwise we can't have processed more than WSIZE input bytes since | ||
1641 | * the last block flush, because compression would have been | ||
1642 | * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to | ||
1643 | * transform a block into a stored block. | ||
1644 | */ | ||
1645 | send_bits((STORED_BLOCK << 1) + eof, 3); /* send block type */ | ||
1646 | niro | 816 | G2.compressed_len = (G2.compressed_len + 3 + 7) & ~7L; |
1647 | G2.compressed_len += (stored_len + 4) << 3; | ||
1648 | niro | 532 | |
1649 | copy_block(buf, (unsigned) stored_len, 1); /* with header */ | ||
1650 | |||
1651 | } else if (static_lenb == opt_lenb) { | ||
1652 | send_bits((STATIC_TREES << 1) + eof, 3); | ||
1653 | niro | 816 | compress_block((ct_data *) G2.static_ltree, (ct_data *) G2.static_dtree); |
1654 | G2.compressed_len += 3 + G2.static_len; | ||
1655 | niro | 532 | } else { |
1656 | send_bits((DYN_TREES << 1) + eof, 3); | ||
1657 | niro | 816 | send_all_trees(G2.l_desc.max_code + 1, G2.d_desc.max_code + 1, |
1658 | niro | 532 | max_blindex + 1); |
1659 | niro | 816 | compress_block((ct_data *) G2.dyn_ltree, (ct_data *) G2.dyn_dtree); |
1660 | G2.compressed_len += 3 + G2.opt_len; | ||
1661 | niro | 532 | } |
1662 | niro | 816 | Assert(G2.compressed_len == G1.bits_sent, "bad compressed size"); |
1663 | niro | 532 | init_block(); |
1664 | |||
1665 | if (eof) { | ||
1666 | bi_windup(); | ||
1667 | niro | 816 | G2.compressed_len += 7; /* align on byte boundary */ |
1668 | niro | 532 | } |
1669 | niro | 816 | Tracev((stderr, "\ncomprlen %lu(%lu) ", G2.compressed_len >> 3, |
1670 | G2.compressed_len - 7 * eof)); | ||
1671 | niro | 532 | |
1672 | niro | 816 | return G2.compressed_len >> 3; |
1673 | niro | 532 | } |
1674 | |||
1675 | |||
1676 | /* =========================================================================== | ||
1677 | * Update a hash value with the given input byte | ||
1678 | * IN assertion: all calls to to UPDATE_HASH are made with consecutive | ||
1679 | * input characters, so that a running hash key can be computed from the | ||
1680 | * previous key instead of complete recalculation each time. | ||
1681 | */ | ||
1682 | #define UPDATE_HASH(h, c) (h = (((h)<<H_SHIFT) ^ (c)) & HASH_MASK) | ||
1683 | |||
1684 | |||
1685 | /* =========================================================================== | ||
1686 | * Same as above, but achieves better compression. We use a lazy | ||
1687 | * evaluation for matches: a match is finally adopted only if there is | ||
1688 | * no better match at the next window position. | ||
1689 | * | ||
1690 | * Processes a new input file and return its compressed length. Sets | ||
1691 | * the compressed length, crc, deflate flags and internal file | ||
1692 | * attributes. | ||
1693 | */ | ||
1694 | |||
1695 | /* Flush the current block, with given end-of-file flag. | ||
1696 | * IN assertion: strstart is set to the end of the current match. */ | ||
1697 | #define FLUSH_BLOCK(eof) \ | ||
1698 | flush_block( \ | ||
1699 | niro | 816 | G1.block_start >= 0L \ |
1700 | ? (char*)&G1.window[(unsigned)G1.block_start] \ | ||
1701 | niro | 532 | : (char*)NULL, \ |
1702 | niro | 816 | (ulg)G1.strstart - G1.block_start, \ |
1703 | niro | 532 | (eof) \ |
1704 | ) | ||
1705 | |||
1706 | /* Insert string s in the dictionary and set match_head to the previous head | ||
1707 | * of the hash chain (the most recent string with same hash key). Return | ||
1708 | * the previous length of the hash chain. | ||
1709 | * IN assertion: all calls to to INSERT_STRING are made with consecutive | ||
1710 | * input characters and the first MIN_MATCH bytes of s are valid | ||
1711 | * (except for the last MIN_MATCH-1 bytes of the input file). */ | ||
1712 | #define INSERT_STRING(s, match_head) \ | ||
1713 | niro | 816 | do { \ |
1714 | UPDATE_HASH(G1.ins_h, G1.window[(s) + MIN_MATCH-1]); \ | ||
1715 | G1.prev[(s) & WMASK] = match_head = head[G1.ins_h]; \ | ||
1716 | head[G1.ins_h] = (s); \ | ||
1717 | } while (0) | ||
1718 | niro | 532 | |
1719 | static ulg deflate(void) | ||
1720 | { | ||
1721 | IPos hash_head; /* head of hash chain */ | ||
1722 | IPos prev_match; /* previous match */ | ||
1723 | int flush; /* set if current block must be flushed */ | ||
1724 | int match_available = 0; /* set if previous match exists */ | ||
1725 | unsigned match_length = MIN_MATCH - 1; /* length of best match */ | ||
1726 | |||
1727 | /* Process the input block. */ | ||
1728 | niro | 816 | while (G1.lookahead != 0) { |
1729 | niro | 532 | /* Insert the string window[strstart .. strstart+2] in the |
1730 | * dictionary, and set hash_head to the head of the hash chain: | ||
1731 | */ | ||
1732 | niro | 816 | INSERT_STRING(G1.strstart, hash_head); |
1733 | niro | 532 | |
1734 | /* Find the longest match, discarding those <= prev_length. | ||
1735 | */ | ||
1736 | niro | 816 | G1.prev_length = match_length; |
1737 | prev_match = G1.match_start; | ||
1738 | niro | 532 | match_length = MIN_MATCH - 1; |
1739 | |||
1740 | niro | 816 | if (hash_head != 0 && G1.prev_length < max_lazy_match |
1741 | && G1.strstart - hash_head <= MAX_DIST | ||
1742 | niro | 532 | ) { |
1743 | /* To simplify the code, we prevent matches with the string | ||
1744 | * of window index 0 (in particular we have to avoid a match | ||
1745 | * of the string with itself at the start of the input file). | ||
1746 | */ | ||
1747 | match_length = longest_match(hash_head); | ||
1748 | /* longest_match() sets match_start */ | ||
1749 | niro | 816 | if (match_length > G1.lookahead) |
1750 | match_length = G1.lookahead; | ||
1751 | niro | 532 | |
1752 | /* Ignore a length 3 match if it is too distant: */ | ||
1753 | niro | 816 | if (match_length == MIN_MATCH && G1.strstart - G1.match_start > TOO_FAR) { |
1754 | /* If prev_match is also MIN_MATCH, G1.match_start is garbage | ||
1755 | niro | 532 | * but we will ignore the current match anyway. |
1756 | */ | ||
1757 | match_length--; | ||
1758 | } | ||
1759 | } | ||
1760 | /* If there was a match at the previous step and the current | ||
1761 | * match is not better, output the previous match: | ||
1762 | */ | ||
1763 | niro | 816 | if (G1.prev_length >= MIN_MATCH && match_length <= G1.prev_length) { |
1764 | check_match(G1.strstart - 1, prev_match, G1.prev_length); | ||
1765 | flush = ct_tally(G1.strstart - 1 - prev_match, G1.prev_length - MIN_MATCH); | ||
1766 | niro | 532 | |
1767 | /* Insert in hash table all strings up to the end of the match. | ||
1768 | * strstart-1 and strstart are already inserted. | ||
1769 | */ | ||
1770 | niro | 816 | G1.lookahead -= G1.prev_length - 1; |
1771 | G1.prev_length -= 2; | ||
1772 | niro | 532 | do { |
1773 | niro | 816 | G1.strstart++; |
1774 | INSERT_STRING(G1.strstart, hash_head); | ||
1775 | niro | 532 | /* strstart never exceeds WSIZE-MAX_MATCH, so there are |
1776 | * always MIN_MATCH bytes ahead. If lookahead < MIN_MATCH | ||
1777 | * these bytes are garbage, but it does not matter since the | ||
1778 | * next lookahead bytes will always be emitted as literals. | ||
1779 | */ | ||
1780 | niro | 816 | } while (--G1.prev_length != 0); |
1781 | niro | 532 | match_available = 0; |
1782 | match_length = MIN_MATCH - 1; | ||
1783 | niro | 816 | G1.strstart++; |
1784 | niro | 532 | if (flush) { |
1785 | FLUSH_BLOCK(0); | ||
1786 | niro | 816 | G1.block_start = G1.strstart; |
1787 | niro | 532 | } |
1788 | } else if (match_available) { | ||
1789 | /* If there was no match at the previous position, output a | ||
1790 | * single literal. If there was a match but the current match | ||
1791 | * is longer, truncate the previous match to a single literal. | ||
1792 | */ | ||
1793 | niro | 816 | Tracevv((stderr, "%c", G1.window[G1.strstart - 1])); |
1794 | if (ct_tally(0, G1.window[G1.strstart - 1])) { | ||
1795 | niro | 532 | FLUSH_BLOCK(0); |
1796 | niro | 816 | G1.block_start = G1.strstart; |
1797 | niro | 532 | } |
1798 | niro | 816 | G1.strstart++; |
1799 | G1.lookahead--; | ||
1800 | niro | 532 | } else { |
1801 | /* There is no previous match to compare with, wait for | ||
1802 | * the next step to decide. | ||
1803 | */ | ||
1804 | match_available = 1; | ||
1805 | niro | 816 | G1.strstart++; |
1806 | G1.lookahead--; | ||
1807 | niro | 532 | } |
1808 | niro | 816 | Assert(G1.strstart <= G1.isize && lookahead <= G1.isize, "a bit too far"); |
1809 | niro | 532 | |
1810 | /* Make sure that we always have enough lookahead, except | ||
1811 | * at the end of the input file. We need MAX_MATCH bytes | ||
1812 | * for the next match, plus MIN_MATCH bytes to insert the | ||
1813 | * string following the next match. | ||
1814 | */ | ||
1815 | niro | 816 | while (G1.lookahead < MIN_LOOKAHEAD && !G1.eofile) |
1816 | niro | 532 | fill_window(); |
1817 | } | ||
1818 | if (match_available) | ||
1819 | niro | 816 | ct_tally(0, G1.window[G1.strstart - 1]); |
1820 | niro | 532 | |
1821 | return FLUSH_BLOCK(1); /* eof */ | ||
1822 | } | ||
1823 | |||
1824 | |||
1825 | /* =========================================================================== | ||
1826 | * Initialize the bit string routines. | ||
1827 | */ | ||
1828 | niro | 816 | static void bi_init(void) |
1829 | niro | 532 | { |
1830 | niro | 816 | G1.bi_buf = 0; |
1831 | G1.bi_valid = 0; | ||
1832 | niro | 532 | #ifdef DEBUG |
1833 | niro | 816 | G1.bits_sent = 0L; |
1834 | niro | 532 | #endif |
1835 | } | ||
1836 | |||
1837 | |||
1838 | /* =========================================================================== | ||
1839 | * Initialize the "longest match" routines for a new file | ||
1840 | */ | ||
1841 | static void lm_init(ush * flagsp) | ||
1842 | { | ||
1843 | unsigned j; | ||
1844 | |||
1845 | /* Initialize the hash table. */ | ||
1846 | memset(head, 0, HASH_SIZE * sizeof(*head)); | ||
1847 | /* prev will be initialized on the fly */ | ||
1848 | |||
1849 | /* speed options for the general purpose bit flag */ | ||
1850 | *flagsp |= 2; /* FAST 4, SLOW 2 */ | ||
1851 | /* ??? reduce max_chain_length for binary files */ | ||
1852 | |||
1853 | niro | 816 | G1.strstart = 0; |
1854 | G1.block_start = 0L; | ||
1855 | niro | 532 | |
1856 | niro | 816 | G1.lookahead = file_read(G1.window, |
1857 | niro | 532 | sizeof(int) <= 2 ? (unsigned) WSIZE : 2 * WSIZE); |
1858 | |||
1859 | niro | 816 | if (G1.lookahead == 0 || G1.lookahead == (unsigned) -1) { |
1860 | G1.eofile = 1; | ||
1861 | G1.lookahead = 0; | ||
1862 | niro | 532 | return; |
1863 | } | ||
1864 | niro | 816 | G1.eofile = 0; |
1865 | niro | 532 | /* Make sure that we always have enough lookahead. This is important |
1866 | * if input comes from a device such as a tty. | ||
1867 | */ | ||
1868 | niro | 816 | while (G1.lookahead < MIN_LOOKAHEAD && !G1.eofile) |
1869 | niro | 532 | fill_window(); |
1870 | |||
1871 | niro | 816 | G1.ins_h = 0; |
1872 | niro | 532 | for (j = 0; j < MIN_MATCH - 1; j++) |
1873 | niro | 816 | UPDATE_HASH(G1.ins_h, G1.window[j]); |
1874 | niro | 532 | /* If lookahead < MIN_MATCH, ins_h is garbage, but this is |
1875 | * not important since only literal bytes will be emitted. | ||
1876 | */ | ||
1877 | } | ||
1878 | |||
1879 | |||
1880 | /* =========================================================================== | ||
1881 | * Allocate the match buffer, initialize the various tables and save the | ||
1882 | * location of the internal file attribute (ascii/binary) and method | ||
1883 | * (DEFLATE/STORE). | ||
1884 | * One callsite in zip() | ||
1885 | */ | ||
1886 | niro | 816 | static void ct_init(void) |
1887 | niro | 532 | { |
1888 | int n; /* iterates over tree elements */ | ||
1889 | int length; /* length value */ | ||
1890 | int code; /* code value */ | ||
1891 | int dist; /* distance index */ | ||
1892 | |||
1893 | niro | 816 | G2.compressed_len = 0L; |
1894 | niro | 532 | |
1895 | #ifdef NOT_NEEDED | ||
1896 | niro | 816 | if (G2.static_dtree[0].Len != 0) |
1897 | niro | 532 | return; /* ct_init already called */ |
1898 | #endif | ||
1899 | |||
1900 | /* Initialize the mapping length (0..255) -> length code (0..28) */ | ||
1901 | length = 0; | ||
1902 | for (code = 0; code < LENGTH_CODES - 1; code++) { | ||
1903 | niro | 816 | G2.base_length[code] = length; |
1904 | niro | 532 | for (n = 0; n < (1 << extra_lbits[code]); n++) { |
1905 | niro | 816 | G2.length_code[length++] = code; |
1906 | niro | 532 | } |
1907 | } | ||
1908 | Assert(length == 256, "ct_init: length != 256"); | ||
1909 | /* Note that the length 255 (match length 258) can be represented | ||
1910 | * in two different ways: code 284 + 5 bits or code 285, so we | ||
1911 | * overwrite length_code[255] to use the best encoding: | ||
1912 | */ | ||
1913 | niro | 816 | G2.length_code[length - 1] = code; |
1914 | niro | 532 | |
1915 | /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ | ||
1916 | dist = 0; | ||
1917 | for (code = 0; code < 16; code++) { | ||
1918 | niro | 816 | G2.base_dist[code] = dist; |
1919 | niro | 532 | for (n = 0; n < (1 << extra_dbits[code]); n++) { |
1920 | niro | 816 | G2.dist_code[dist++] = code; |
1921 | niro | 532 | } |
1922 | } | ||
1923 | Assert(dist == 256, "ct_init: dist != 256"); | ||
1924 | dist >>= 7; /* from now on, all distances are divided by 128 */ | ||
1925 | for (; code < D_CODES; code++) { | ||
1926 | niro | 816 | G2.base_dist[code] = dist << 7; |
1927 | niro | 532 | for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) { |
1928 | niro | 816 | G2.dist_code[256 + dist++] = code; |
1929 | niro | 532 | } |
1930 | } | ||
1931 | Assert(dist == 256, "ct_init: 256+dist != 512"); | ||
1932 | |||
1933 | /* Construct the codes of the static literal tree */ | ||
1934 | /* already zeroed - it's in bss | ||
1935 | for (n = 0; n <= MAX_BITS; n++) | ||
1936 | niro | 816 | G2.bl_count[n] = 0; */ |
1937 | niro | 532 | |
1938 | n = 0; | ||
1939 | while (n <= 143) { | ||
1940 | niro | 816 | G2.static_ltree[n++].Len = 8; |
1941 | G2.bl_count[8]++; | ||
1942 | niro | 532 | } |
1943 | while (n <= 255) { | ||
1944 | niro | 816 | G2.static_ltree[n++].Len = 9; |
1945 | G2.bl_count[9]++; | ||
1946 | niro | 532 | } |
1947 | while (n <= 279) { | ||
1948 | niro | 816 | G2.static_ltree[n++].Len = 7; |
1949 | G2.bl_count[7]++; | ||
1950 | niro | 532 | } |
1951 | while (n <= 287) { | ||
1952 | niro | 816 | G2.static_ltree[n++].Len = 8; |
1953 | G2.bl_count[8]++; | ||
1954 | niro | 532 | } |
1955 | /* Codes 286 and 287 do not exist, but we must include them in the | ||
1956 | * tree construction to get a canonical Huffman tree (longest code | ||
1957 | * all ones) | ||
1958 | */ | ||
1959 | niro | 816 | gen_codes((ct_data *) G2.static_ltree, L_CODES + 1); |
1960 | niro | 532 | |
1961 | /* The static distance tree is trivial: */ | ||
1962 | for (n = 0; n < D_CODES; n++) { | ||
1963 | niro | 816 | G2.static_dtree[n].Len = 5; |
1964 | G2.static_dtree[n].Code = bi_reverse(n, 5); | ||
1965 | niro | 532 | } |
1966 | |||
1967 | /* Initialize the first block of the first file: */ | ||
1968 | init_block(); | ||
1969 | } | ||
1970 | |||
1971 | |||
1972 | /* =========================================================================== | ||
1973 | * Deflate in to out. | ||
1974 | * IN assertions: the input and output buffers are cleared. | ||
1975 | */ | ||
1976 | |||
1977 | niro | 816 | static void zip(ulg time_stamp) |
1978 | niro | 532 | { |
1979 | ush deflate_flags = 0; /* pkzip -es, -en or -ex equivalent */ | ||
1980 | |||
1981 | niro | 816 | G1.outcnt = 0; |
1982 | niro | 532 | |
1983 | /* Write the header to the gzip file. See algorithm.doc for the format */ | ||
1984 | niro | 816 | /* magic header for gzip files: 1F 8B */ |
1985 | /* compression method: 8 (DEFLATED) */ | ||
1986 | /* general flags: 0 */ | ||
1987 | put_32bit(0x00088b1f); | ||
1988 | niro | 532 | put_32bit(time_stamp); |
1989 | |||
1990 | /* Write deflated file to zip file */ | ||
1991 | niro | 816 | G1.crc = ~0; |
1992 | niro | 532 | |
1993 | niro | 816 | bi_init(); |
1994 | ct_init(); | ||
1995 | niro | 532 | lm_init(&deflate_flags); |
1996 | |||
1997 | put_8bit(deflate_flags); /* extra flags */ | ||
1998 | put_8bit(3); /* OS identifier = 3 (Unix) */ | ||
1999 | |||
2000 | deflate(); | ||
2001 | |||
2002 | /* Write the crc and uncompressed size */ | ||
2003 | niro | 816 | put_32bit(~G1.crc); |
2004 | put_32bit(G1.isize); | ||
2005 | niro | 532 | |
2006 | flush_outbuf(); | ||
2007 | } | ||
2008 | |||
2009 | |||
2010 | /* ======================================================================== */ | ||
2011 | niro | 816 | static |
2012 | char* make_new_name_gzip(char *filename) | ||
2013 | niro | 532 | { |
2014 | niro | 816 | return xasprintf("%s.gz", filename); |
2015 | niro | 532 | } |
2016 | |||
2017 | niro | 816 | static |
2018 | USE_DESKTOP(long long) int pack_gzip(unpack_info_t *info UNUSED_PARAM) | ||
2019 | niro | 532 | { |
2020 | niro | 816 | struct stat s; |
2021 | niro | 532 | |
2022 | clear_bufs(); | ||
2023 | niro | 816 | s.st_ctime = 0; |
2024 | fstat(STDIN_FILENO, &s); | ||
2025 | zip(s.st_ctime); | ||
2026 | return 0; | ||
2027 | } | ||
2028 | niro | 532 | |
2029 | niro | 816 | /* |
2030 | * Linux kernel build uses gzip -d -n. We accept and ignore it. | ||
2031 | * Man page says: | ||
2032 | * -n --no-name | ||
2033 | * gzip: do not save the original file name and time stamp. | ||
2034 | * (The original name is always saved if the name had to be truncated.) | ||
2035 | * gunzip: do not restore the original file name/time even if present | ||
2036 | * (remove only the gzip suffix from the compressed file name). | ||
2037 | * This option is the default when decompressing. | ||
2038 | * -N --name | ||
2039 | * gzip: always save the original file name and time stamp (this is the default) | ||
2040 | * gunzip: restore the original file name and time stamp if present. | ||
2041 | */ | ||
2042 | niro | 532 | |
2043 | niro | 816 | int gzip_main(int argc, char **argv) MAIN_EXTERNALLY_VISIBLE; |
2044 | #if ENABLE_GUNZIP | ||
2045 | int gzip_main(int argc, char **argv) | ||
2046 | niro | 532 | #else |
2047 | niro | 816 | int gzip_main(int argc UNUSED_PARAM, char **argv) |
2048 | niro | 532 | #endif |
2049 | niro | 816 | { |
2050 | unsigned opt; | ||
2051 | niro | 532 | |
2052 | niro | 816 | /* Must match bbunzip's constants OPT_STDOUT, OPT_FORCE! */ |
2053 | opt = getopt32(argv, "cfv" USE_GUNZIP("dt") "q123456789n"); | ||
2054 | #if ENABLE_GUNZIP /* gunzip_main may not be visible... */ | ||
2055 | if (opt & 0x18) // -d and/or -t | ||
2056 | return gunzip_main(argc, argv); | ||
2057 | #endif | ||
2058 | option_mask32 &= 0x7; /* ignore -q, -0..9 */ | ||
2059 | //if (opt & 0x1) // -c | ||
2060 | //if (opt & 0x2) // -f | ||
2061 | //if (opt & 0x4) // -v | ||
2062 | argv += optind; | ||
2063 | niro | 532 | |
2064 | niro | 816 | SET_PTR_TO_GLOBALS(xzalloc(sizeof(struct globals) + sizeof(struct globals2)) |
2065 | + sizeof(struct globals)); | ||
2066 | barrier(); | ||
2067 | G2.l_desc.dyn_tree = G2.dyn_ltree; | ||
2068 | G2.l_desc.static_tree = G2.static_ltree; | ||
2069 | G2.l_desc.extra_bits = extra_lbits; | ||
2070 | G2.l_desc.extra_base = LITERALS + 1; | ||
2071 | G2.l_desc.elems = L_CODES; | ||
2072 | G2.l_desc.max_length = MAX_BITS; | ||
2073 | //G2.l_desc.max_code = 0; | ||
2074 | niro | 532 | |
2075 | niro | 816 | G2.d_desc.dyn_tree = G2.dyn_dtree; |
2076 | G2.d_desc.static_tree = G2.static_dtree; | ||
2077 | G2.d_desc.extra_bits = extra_dbits; | ||
2078 | //G2.d_desc.extra_base = 0; | ||
2079 | G2.d_desc.elems = D_CODES; | ||
2080 | G2.d_desc.max_length = MAX_BITS; | ||
2081 | //G2.d_desc.max_code = 0; | ||
2082 | niro | 532 | |
2083 | niro | 816 | G2.bl_desc.dyn_tree = G2.bl_tree; |
2084 | //G2.bl_desc.static_tree = NULL; | ||
2085 | G2.bl_desc.extra_bits = extra_blbits, | ||
2086 | //G2.bl_desc.extra_base = 0; | ||
2087 | G2.bl_desc.elems = BL_CODES; | ||
2088 | G2.bl_desc.max_length = MAX_BL_BITS; | ||
2089 | //G2.bl_desc.max_code = 0; | ||
2090 | niro | 532 | |
2091 | niro | 816 | /* Allocate all global buffers (for DYN_ALLOC option) */ |
2092 | ALLOC(uch, G1.l_buf, INBUFSIZ); | ||
2093 | ALLOC(uch, G1.outbuf, OUTBUFSIZ); | ||
2094 | ALLOC(ush, G1.d_buf, DIST_BUFSIZE); | ||
2095 | ALLOC(uch, G1.window, 2L * WSIZE); | ||
2096 | ALLOC(ush, G1.prev, 1L << BITS); | ||
2097 | niro | 532 | |
2098 | niro | 816 | /* Initialise the CRC32 table */ |
2099 | G1.crc_32_tab = crc32_filltable(NULL, 0); | ||
2100 | niro | 532 | |
2101 | niro | 816 | return bbunpack(argv, make_new_name_gzip, pack_gzip); |
2102 | niro | 532 | } |