lemon-project-template-glpk
comparison deps/glpk/src/zlib/trees.c @ 11:4fc6ad2fb8a6
Test GLPK in src/main.cc
author | Alpar Juttner <alpar@cs.elte.hu> |
---|---|
date | Sun, 06 Nov 2011 21:43:29 +0100 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:ffd953b09382 |
---|---|
1 /* trees.c -- output deflated data using Huffman coding | |
2 * Copyright (C) 1995-2010 Jean-loup Gailly | |
3 * detect_data_type() function provided freely by Cosmin Truta, 2006 | |
4 * For conditions of distribution and use, see copyright notice in zlib.h | |
5 */ | |
6 | |
7 /* | |
8 * ALGORITHM | |
9 * | |
10 * The "deflation" process uses several Huffman trees. The more | |
11 * common source values are represented by shorter bit sequences. | |
12 * | |
13 * Each code tree is stored in a compressed form which is itself | |
14 * a Huffman encoding of the lengths of all the code strings (in | |
15 * ascending order by source values). The actual code strings are | |
16 * reconstructed from the lengths in the inflate process, as described | |
17 * in the deflate specification. | |
18 * | |
19 * REFERENCES | |
20 * | |
21 * Deutsch, L.P.,"'Deflate' Compressed Data Format Specification". | |
22 * Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc | |
23 * | |
24 * Storer, James A. | |
25 * Data Compression: Methods and Theory, pp. 49-50. | |
26 * Computer Science Press, 1988. ISBN 0-7167-8156-5. | |
27 * | |
28 * Sedgewick, R. | |
29 * Algorithms, p290. | |
30 * Addison-Wesley, 1983. ISBN 0-201-06672-6. | |
31 */ | |
32 | |
33 /* @(#) $Id$ */ | |
34 | |
35 /* #define GEN_TREES_H */ | |
36 | |
37 #include "deflate.h" | |
38 | |
39 #ifdef DEBUG | |
40 # include <ctype.h> | |
41 #endif | |
42 | |
43 /* =========================================================================== | |
44 * Constants | |
45 */ | |
46 | |
47 #define MAX_BL_BITS 7 | |
48 /* Bit length codes must not exceed MAX_BL_BITS bits */ | |
49 | |
50 #define END_BLOCK 256 | |
51 /* end of block literal code */ | |
52 | |
53 #define REP_3_6 16 | |
54 /* repeat previous bit length 3-6 times (2 bits of repeat count) */ | |
55 | |
56 #define REPZ_3_10 17 | |
57 /* repeat a zero length 3-10 times (3 bits of repeat count) */ | |
58 | |
59 #define REPZ_11_138 18 | |
60 /* repeat a zero length 11-138 times (7 bits of repeat count) */ | |
61 | |
62 local const int extra_lbits[LENGTH_CODES] /* extra bits for each length code */ | |
63 = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0}; | |
64 | |
65 local const int extra_dbits[D_CODES] /* extra bits for each distance code */ | |
66 = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13}; | |
67 | |
68 local const int extra_blbits[BL_CODES]/* extra bits for each bit length code */ | |
69 = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7}; | |
70 | |
71 local const uch bl_order[BL_CODES] | |
72 = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15}; | |
73 /* The lengths of the bit length codes are sent in order of decreasing | |
74 * probability, to avoid transmitting the lengths for unused bit length codes. | |
75 */ | |
76 | |
77 #define Buf_size (8 * 2*sizeof(char)) | |
78 /* Number of bits used within bi_buf. (bi_buf might be implemented on | |
79 * more than 16 bits on some systems.) | |
80 */ | |
81 | |
82 /* =========================================================================== | |
83 * Local data. These are initialized only once. | |
84 */ | |
85 | |
86 #define DIST_CODE_LEN 512 /* see definition of array dist_code below */ | |
87 | |
88 #if defined(GEN_TREES_H) || !defined(STDC) | |
89 /* non ANSI compilers may not accept trees.h */ | |
90 | |
91 local ct_data static_ltree[L_CODES+2]; | |
92 /* The static literal tree. Since the bit lengths are imposed, there is no | |
93 * need for the L_CODES extra codes used during heap construction. However | |
94 * The codes 286 and 287 are needed to build a canonical tree (see _tr_init | |
95 * below). | |
96 */ | |
97 | |
98 local ct_data static_dtree[D_CODES]; | |
99 /* The static distance tree. (Actually a trivial tree since all codes use | |
100 * 5 bits.) | |
101 */ | |
102 | |
103 uch _dist_code[DIST_CODE_LEN]; | |
104 /* Distance codes. The first 256 values correspond to the distances | |
105 * 3 .. 258, the last 256 values correspond to the top 8 bits of | |
106 * the 15 bit distances. | |
107 */ | |
108 | |
109 uch _length_code[MAX_MATCH-MIN_MATCH+1]; | |
110 /* length code for each normalized match length (0 == MIN_MATCH) */ | |
111 | |
112 local int base_length[LENGTH_CODES]; | |
113 /* First normalized length for each code (0 = MIN_MATCH) */ | |
114 | |
115 local int base_dist[D_CODES]; | |
116 /* First normalized distance for each code (0 = distance of 1) */ | |
117 | |
118 #else | |
119 # include "trees.h" | |
120 #endif /* GEN_TREES_H */ | |
121 | |
122 struct static_tree_desc_s { | |
123 const ct_data *static_tree; /* static tree or NULL */ | |
124 const intf *extra_bits; /* extra bits for each code or NULL */ | |
125 int extra_base; /* base index for extra_bits */ | |
126 int elems; /* max number of elements in the tree */ | |
127 int max_length; /* max bit length for the codes */ | |
128 }; | |
129 | |
130 local static_tree_desc static_l_desc = | |
131 {static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS}; | |
132 | |
133 local static_tree_desc static_d_desc = | |
134 {static_dtree, extra_dbits, 0, D_CODES, MAX_BITS}; | |
135 | |
136 local static_tree_desc static_bl_desc = | |
137 {(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS}; | |
138 | |
139 /* =========================================================================== | |
140 * Local (static) routines in this file. | |
141 */ | |
142 | |
143 local void tr_static_init OF((void)); | |
144 local void init_block OF((deflate_state *s)); | |
145 local void pqdownheap OF((deflate_state *s, ct_data *tree, int k)); | |
146 local void gen_bitlen OF((deflate_state *s, tree_desc *desc)); | |
147 local void gen_codes OF((ct_data *tree, int max_code, ushf *bl_count)); | |
148 local void build_tree OF((deflate_state *s, tree_desc *desc)); | |
149 local void scan_tree OF((deflate_state *s, ct_data *tree, int max_code)); | |
150 local void send_tree OF((deflate_state *s, ct_data *tree, int max_code)); | |
151 local int build_bl_tree OF((deflate_state *s)); | |
152 local void send_all_trees OF((deflate_state *s, int lcodes, int dcodes, | |
153 int blcodes)); | |
154 local void compress_block OF((deflate_state *s, ct_data *ltree, | |
155 ct_data *dtree)); | |
156 local int detect_data_type OF((deflate_state *s)); | |
157 local unsigned bi_reverse OF((unsigned value, int length)); | |
158 local void bi_windup OF((deflate_state *s)); | |
159 local void bi_flush OF((deflate_state *s)); | |
160 local void copy_block OF((deflate_state *s, charf *buf, unsigned len, | |
161 int header)); | |
162 | |
163 #ifdef GEN_TREES_H | |
164 local void gen_trees_header OF((void)); | |
165 #endif | |
166 | |
167 #ifndef DEBUG | |
168 # define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len) | |
169 /* Send a code of the given tree. c and tree must not have side effects */ | |
170 | |
171 #else /* DEBUG */ | |
172 # define send_code(s, c, tree) \ | |
173 { if (z_verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \ | |
174 send_bits(s, tree[c].Code, tree[c].Len); } | |
175 #endif | |
176 | |
177 /* =========================================================================== | |
178 * Output a short LSB first on the stream. | |
179 * IN assertion: there is enough room in pendingBuf. | |
180 */ | |
181 #define put_short(s, w) { \ | |
182 put_byte(s, (uch)((w) & 0xff)); \ | |
183 put_byte(s, (uch)((ush)(w) >> 8)); \ | |
184 } | |
185 | |
186 /* =========================================================================== | |
187 * Send a value on a given number of bits. | |
188 * IN assertion: length <= 16 and value fits in length bits. | |
189 */ | |
190 #ifdef DEBUG | |
191 local void send_bits OF((deflate_state *s, int value, int length)); | |
192 | |
193 local void send_bits(s, value, length) | |
194 deflate_state *s; | |
195 int value; /* value to send */ | |
196 int length; /* number of bits */ | |
197 { | |
198 Tracevv((stderr," l %2d v %4x ", length, value)); | |
199 Assert(length > 0 && length <= 15, "invalid length"); | |
200 s->bits_sent += (ulg)length; | |
201 | |
202 /* If not enough room in bi_buf, use (valid) bits from bi_buf and | |
203 * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid)) | |
204 * unused bits in value. | |
205 */ | |
206 if (s->bi_valid > (int)Buf_size - length) { | |
207 s->bi_buf |= (ush)value << s->bi_valid; | |
208 put_short(s, s->bi_buf); | |
209 s->bi_buf = (ush)value >> (Buf_size - s->bi_valid); | |
210 s->bi_valid += length - Buf_size; | |
211 } else { | |
212 s->bi_buf |= (ush)value << s->bi_valid; | |
213 s->bi_valid += length; | |
214 } | |
215 } | |
216 #else /* !DEBUG */ | |
217 | |
218 #define send_bits(s, value, length) \ | |
219 { int len = length;\ | |
220 if (s->bi_valid > (int)Buf_size - len) {\ | |
221 int val = value;\ | |
222 s->bi_buf |= (ush)val << s->bi_valid;\ | |
223 put_short(s, s->bi_buf);\ | |
224 s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\ | |
225 s->bi_valid += len - Buf_size;\ | |
226 } else {\ | |
227 s->bi_buf |= (ush)(value) << s->bi_valid;\ | |
228 s->bi_valid += len;\ | |
229 }\ | |
230 } | |
231 #endif /* DEBUG */ | |
232 | |
233 | |
234 /* the arguments must not have side effects */ | |
235 | |
236 /* =========================================================================== | |
237 * Initialize the various 'constant' tables. | |
238 */ | |
239 local void tr_static_init() | |
240 { | |
241 #if defined(GEN_TREES_H) || !defined(STDC) | |
242 static int static_init_done = 0; | |
243 int n; /* iterates over tree elements */ | |
244 int bits; /* bit counter */ | |
245 int length; /* length value */ | |
246 int code; /* code value */ | |
247 int dist; /* distance index */ | |
248 ush bl_count[MAX_BITS+1]; | |
249 /* number of codes at each bit length for an optimal tree */ | |
250 | |
251 if (static_init_done) return; | |
252 | |
253 /* For some embedded targets, global variables are not initialized: */ | |
254 #ifdef NO_INIT_GLOBAL_POINTERS | |
255 static_l_desc.static_tree = static_ltree; | |
256 static_l_desc.extra_bits = extra_lbits; | |
257 static_d_desc.static_tree = static_dtree; | |
258 static_d_desc.extra_bits = extra_dbits; | |
259 static_bl_desc.extra_bits = extra_blbits; | |
260 #endif | |
261 | |
262 /* Initialize the mapping length (0..255) -> length code (0..28) */ | |
263 length = 0; | |
264 for (code = 0; code < LENGTH_CODES-1; code++) { | |
265 base_length[code] = length; | |
266 for (n = 0; n < (1<<extra_lbits[code]); n++) { | |
267 _length_code[length++] = (uch)code; | |
268 } | |
269 } | |
270 Assert (length == 256, "tr_static_init: length != 256"); | |
271 /* Note that the length 255 (match length 258) can be represented | |
272 * in two different ways: code 284 + 5 bits or code 285, so we | |
273 * overwrite length_code[255] to use the best encoding: | |
274 */ | |
275 _length_code[length-1] = (uch)code; | |
276 | |
277 /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ | |
278 dist = 0; | |
279 for (code = 0 ; code < 16; code++) { | |
280 base_dist[code] = dist; | |
281 for (n = 0; n < (1<<extra_dbits[code]); n++) { | |
282 _dist_code[dist++] = (uch)code; | |
283 } | |
284 } | |
285 Assert (dist == 256, "tr_static_init: dist != 256"); | |
286 dist >>= 7; /* from now on, all distances are divided by 128 */ | |
287 for ( ; code < D_CODES; code++) { | |
288 base_dist[code] = dist << 7; | |
289 for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) { | |
290 _dist_code[256 + dist++] = (uch)code; | |
291 } | |
292 } | |
293 Assert (dist == 256, "tr_static_init: 256+dist != 512"); | |
294 | |
295 /* Construct the codes of the static literal tree */ | |
296 for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0; | |
297 n = 0; | |
298 while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++; | |
299 while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++; | |
300 while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++; | |
301 while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++; | |
302 /* Codes 286 and 287 do not exist, but we must include them in the | |
303 * tree construction to get a canonical Huffman tree (longest code | |
304 * all ones) | |
305 */ | |
306 gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count); | |
307 | |
308 /* The static distance tree is trivial: */ | |
309 for (n = 0; n < D_CODES; n++) { | |
310 static_dtree[n].Len = 5; | |
311 static_dtree[n].Code = bi_reverse((unsigned)n, 5); | |
312 } | |
313 static_init_done = 1; | |
314 | |
315 # ifdef GEN_TREES_H | |
316 gen_trees_header(); | |
317 # endif | |
318 #endif /* defined(GEN_TREES_H) || !defined(STDC) */ | |
319 } | |
320 | |
321 /* =========================================================================== | |
322 * Genererate the file trees.h describing the static trees. | |
323 */ | |
324 #ifdef GEN_TREES_H | |
325 # ifndef DEBUG | |
326 # include <stdio.h> | |
327 # endif | |
328 | |
329 # define SEPARATOR(i, last, width) \ | |
330 ((i) == (last)? "\n};\n\n" : \ | |
331 ((i) % (width) == (width)-1 ? ",\n" : ", ")) | |
332 | |
333 void gen_trees_header() | |
334 { | |
335 FILE *header = fopen("trees.h", "w"); | |
336 int i; | |
337 | |
338 Assert (header != NULL, "Can't open trees.h"); | |
339 fprintf(header, | |
340 "/* header created automatically with -DGEN_TREES_H */\n\n"); | |
341 | |
342 fprintf(header, "local const ct_data static_ltree[L_CODES+2] = {\n"); | |
343 for (i = 0; i < L_CODES+2; i++) { | |
344 fprintf(header, "{{%3u},{%3u}}%s", static_ltree[i].Code, | |
345 static_ltree[i].Len, SEPARATOR(i, L_CODES+1, 5)); | |
346 } | |
347 | |
348 fprintf(header, "local const ct_data static_dtree[D_CODES] = {\n"); | |
349 for (i = 0; i < D_CODES; i++) { | |
350 fprintf(header, "{{%2u},{%2u}}%s", static_dtree[i].Code, | |
351 static_dtree[i].Len, SEPARATOR(i, D_CODES-1, 5)); | |
352 } | |
353 | |
354 fprintf(header, "const uch ZLIB_INTERNAL _dist_code[DIST_CODE_LEN] = {\n"); | |
355 for (i = 0; i < DIST_CODE_LEN; i++) { | |
356 fprintf(header, "%2u%s", _dist_code[i], | |
357 SEPARATOR(i, DIST_CODE_LEN-1, 20)); | |
358 } | |
359 | |
360 fprintf(header, | |
361 "const uch ZLIB_INTERNAL _length_code[MAX_MATCH-MIN_MATCH+1]= {\n"); | |
362 for (i = 0; i < MAX_MATCH-MIN_MATCH+1; i++) { | |
363 fprintf(header, "%2u%s", _length_code[i], | |
364 SEPARATOR(i, MAX_MATCH-MIN_MATCH, 20)); | |
365 } | |
366 | |
367 fprintf(header, "local const int base_length[LENGTH_CODES] = {\n"); | |
368 for (i = 0; i < LENGTH_CODES; i++) { | |
369 fprintf(header, "%1u%s", base_length[i], | |
370 SEPARATOR(i, LENGTH_CODES-1, 20)); | |
371 } | |
372 | |
373 fprintf(header, "local const int base_dist[D_CODES] = {\n"); | |
374 for (i = 0; i < D_CODES; i++) { | |
375 fprintf(header, "%5u%s", base_dist[i], | |
376 SEPARATOR(i, D_CODES-1, 10)); | |
377 } | |
378 | |
379 fclose(header); | |
380 } | |
381 #endif /* GEN_TREES_H */ | |
382 | |
383 /* =========================================================================== | |
384 * Initialize the tree data structures for a new zlib stream. | |
385 */ | |
386 void ZLIB_INTERNAL _tr_init(s) | |
387 deflate_state *s; | |
388 { | |
389 tr_static_init(); | |
390 | |
391 s->l_desc.dyn_tree = s->dyn_ltree; | |
392 s->l_desc.stat_desc = &static_l_desc; | |
393 | |
394 s->d_desc.dyn_tree = s->dyn_dtree; | |
395 s->d_desc.stat_desc = &static_d_desc; | |
396 | |
397 s->bl_desc.dyn_tree = s->bl_tree; | |
398 s->bl_desc.stat_desc = &static_bl_desc; | |
399 | |
400 s->bi_buf = 0; | |
401 s->bi_valid = 0; | |
402 s->last_eob_len = 8; /* enough lookahead for inflate */ | |
403 #ifdef DEBUG | |
404 s->compressed_len = 0L; | |
405 s->bits_sent = 0L; | |
406 #endif | |
407 | |
408 /* Initialize the first block of the first file: */ | |
409 init_block(s); | |
410 } | |
411 | |
412 /* =========================================================================== | |
413 * Initialize a new block. | |
414 */ | |
415 local void init_block(s) | |
416 deflate_state *s; | |
417 { | |
418 int n; /* iterates over tree elements */ | |
419 | |
420 /* Initialize the trees. */ | |
421 for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0; | |
422 for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0; | |
423 for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0; | |
424 | |
425 s->dyn_ltree[END_BLOCK].Freq = 1; | |
426 s->opt_len = s->static_len = 0L; | |
427 s->last_lit = s->matches = 0; | |
428 } | |
429 | |
430 #define SMALLEST 1 | |
431 /* Index within the heap array of least frequent node in the Huffman tree */ | |
432 | |
433 | |
434 /* =========================================================================== | |
435 * Remove the smallest element from the heap and recreate the heap with | |
436 * one less element. Updates heap and heap_len. | |
437 */ | |
438 #define pqremove(s, tree, top) \ | |
439 {\ | |
440 top = s->heap[SMALLEST]; \ | |
441 s->heap[SMALLEST] = s->heap[s->heap_len--]; \ | |
442 pqdownheap(s, tree, SMALLEST); \ | |
443 } | |
444 | |
445 /* =========================================================================== | |
446 * Compares to subtrees, using the tree depth as tie breaker when | |
447 * the subtrees have equal frequency. This minimizes the worst case length. | |
448 */ | |
449 #define smaller(tree, n, m, depth) \ | |
450 (tree[n].Freq < tree[m].Freq || \ | |
451 (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m])) | |
452 | |
453 /* =========================================================================== | |
454 * Restore the heap property by moving down the tree starting at node k, | |
455 * exchanging a node with the smallest of its two sons if necessary, stopping | |
456 * when the heap property is re-established (each father smaller than its | |
457 * two sons). | |
458 */ | |
459 local void pqdownheap(s, tree, k) | |
460 deflate_state *s; | |
461 ct_data *tree; /* the tree to restore */ | |
462 int k; /* node to move down */ | |
463 { | |
464 int v = s->heap[k]; | |
465 int j = k << 1; /* left son of k */ | |
466 while (j <= s->heap_len) { | |
467 /* Set j to the smallest of the two sons: */ | |
468 if (j < s->heap_len && | |
469 smaller(tree, s->heap[j+1], s->heap[j], s->depth)) { | |
470 j++; | |
471 } | |
472 /* Exit if v is smaller than both sons */ | |
473 if (smaller(tree, v, s->heap[j], s->depth)) break; | |
474 | |
475 /* Exchange v with the smallest son */ | |
476 s->heap[k] = s->heap[j]; k = j; | |
477 | |
478 /* And continue down the tree, setting j to the left son of k */ | |
479 j <<= 1; | |
480 } | |
481 s->heap[k] = v; | |
482 } | |
483 | |
484 /* =========================================================================== | |
485 * Compute the optimal bit lengths for a tree and update the total bit length | |
486 * for the current block. | |
487 * IN assertion: the fields freq and dad are set, heap[heap_max] and | |
488 * above are the tree nodes sorted by increasing frequency. | |
489 * OUT assertions: the field len is set to the optimal bit length, the | |
490 * array bl_count contains the frequencies for each bit length. | |
491 * The length opt_len is updated; static_len is also updated if stree is | |
492 * not null. | |
493 */ | |
494 local void gen_bitlen(s, desc) | |
495 deflate_state *s; | |
496 tree_desc *desc; /* the tree descriptor */ | |
497 { | |
498 ct_data *tree = desc->dyn_tree; | |
499 int max_code = desc->max_code; | |
500 const ct_data *stree = desc->stat_desc->static_tree; | |
501 const intf *extra = desc->stat_desc->extra_bits; | |
502 int base = desc->stat_desc->extra_base; | |
503 int max_length = desc->stat_desc->max_length; | |
504 int h; /* heap index */ | |
505 int n, m; /* iterate over the tree elements */ | |
506 int bits; /* bit length */ | |
507 int xbits; /* extra bits */ | |
508 ush f; /* frequency */ | |
509 int overflow = 0; /* number of elements with bit length too large */ | |
510 | |
511 for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0; | |
512 | |
513 /* In a first pass, compute the optimal bit lengths (which may | |
514 * overflow in the case of the bit length tree). | |
515 */ | |
516 tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */ | |
517 | |
518 for (h = s->heap_max+1; h < HEAP_SIZE; h++) { | |
519 n = s->heap[h]; | |
520 bits = tree[tree[n].Dad].Len + 1; | |
521 if (bits > max_length) bits = max_length, overflow++; | |
522 tree[n].Len = (ush)bits; | |
523 /* We overwrite tree[n].Dad which is no longer needed */ | |
524 | |
525 if (n > max_code) continue; /* not a leaf node */ | |
526 | |
527 s->bl_count[bits]++; | |
528 xbits = 0; | |
529 if (n >= base) xbits = extra[n-base]; | |
530 f = tree[n].Freq; | |
531 s->opt_len += (ulg)f * (bits + xbits); | |
532 if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits); | |
533 } | |
534 if (overflow == 0) return; | |
535 | |
536 Trace((stderr,"\nbit length overflow\n")); | |
537 /* This happens for example on obj2 and pic of the Calgary corpus */ | |
538 | |
539 /* Find the first bit length which could increase: */ | |
540 do { | |
541 bits = max_length-1; | |
542 while (s->bl_count[bits] == 0) bits--; | |
543 s->bl_count[bits]--; /* move one leaf down the tree */ | |
544 s->bl_count[bits+1] += 2; /* move one overflow item as its brother */ | |
545 s->bl_count[max_length]--; | |
546 /* The brother of the overflow item also moves one step up, | |
547 * but this does not affect bl_count[max_length] | |
548 */ | |
549 overflow -= 2; | |
550 } while (overflow > 0); | |
551 | |
552 /* Now recompute all bit lengths, scanning in increasing frequency. | |
553 * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all | |
554 * lengths instead of fixing only the wrong ones. This idea is taken | |
555 * from 'ar' written by Haruhiko Okumura.) | |
556 */ | |
557 for (bits = max_length; bits != 0; bits--) { | |
558 n = s->bl_count[bits]; | |
559 while (n != 0) { | |
560 m = s->heap[--h]; | |
561 if (m > max_code) continue; | |
562 if ((unsigned) tree[m].Len != (unsigned) bits) { | |
563 Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits)); | |
564 s->opt_len += ((long)bits - (long)tree[m].Len) | |
565 *(long)tree[m].Freq; | |
566 tree[m].Len = (ush)bits; | |
567 } | |
568 n--; | |
569 } | |
570 } | |
571 } | |
572 | |
573 /* =========================================================================== | |
574 * Generate the codes for a given tree and bit counts (which need not be | |
575 * optimal). | |
576 * IN assertion: the array bl_count contains the bit length statistics for | |
577 * the given tree and the field len is set for all tree elements. | |
578 * OUT assertion: the field code is set for all tree elements of non | |
579 * zero code length. | |
580 */ | |
581 local void gen_codes (tree, max_code, bl_count) | |
582 ct_data *tree; /* the tree to decorate */ | |
583 int max_code; /* largest code with non zero frequency */ | |
584 ushf *bl_count; /* number of codes at each bit length */ | |
585 { | |
586 ush next_code[MAX_BITS+1]; /* next code value for each bit length */ | |
587 ush code = 0; /* running code value */ | |
588 int bits; /* bit index */ | |
589 int n; /* code index */ | |
590 | |
591 /* The distribution counts are first used to generate the code values | |
592 * without bit reversal. | |
593 */ | |
594 for (bits = 1; bits <= MAX_BITS; bits++) { | |
595 next_code[bits] = code = (code + bl_count[bits-1]) << 1; | |
596 } | |
597 /* Check that the bit counts in bl_count are consistent. The last code | |
598 * must be all ones. | |
599 */ | |
600 Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1, | |
601 "inconsistent bit counts"); | |
602 Tracev((stderr,"\ngen_codes: max_code %d ", max_code)); | |
603 | |
604 for (n = 0; n <= max_code; n++) { | |
605 int len = tree[n].Len; | |
606 if (len == 0) continue; | |
607 /* Now reverse the bits */ | |
608 tree[n].Code = bi_reverse(next_code[len]++, len); | |
609 | |
610 Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ", | |
611 n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1)); | |
612 } | |
613 } | |
614 | |
615 /* =========================================================================== | |
616 * Construct one Huffman tree and assigns the code bit strings and lengths. | |
617 * Update the total bit length for the current block. | |
618 * IN assertion: the field freq is set for all tree elements. | |
619 * OUT assertions: the fields len and code are set to the optimal bit length | |
620 * and corresponding code. The length opt_len is updated; static_len is | |
621 * also updated if stree is not null. The field max_code is set. | |
622 */ | |
623 local void build_tree(s, desc) | |
624 deflate_state *s; | |
625 tree_desc *desc; /* the tree descriptor */ | |
626 { | |
627 ct_data *tree = desc->dyn_tree; | |
628 const ct_data *stree = desc->stat_desc->static_tree; | |
629 int elems = desc->stat_desc->elems; | |
630 int n, m; /* iterate over heap elements */ | |
631 int max_code = -1; /* largest code with non zero frequency */ | |
632 int node; /* new node being created */ | |
633 | |
634 /* Construct the initial heap, with least frequent element in | |
635 * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. | |
636 * heap[0] is not used. | |
637 */ | |
638 s->heap_len = 0, s->heap_max = HEAP_SIZE; | |
639 | |
640 for (n = 0; n < elems; n++) { | |
641 if (tree[n].Freq != 0) { | |
642 s->heap[++(s->heap_len)] = max_code = n; | |
643 s->depth[n] = 0; | |
644 } else { | |
645 tree[n].Len = 0; | |
646 } | |
647 } | |
648 | |
649 /* The pkzip format requires that at least one distance code exists, | |
650 * and that at least one bit should be sent even if there is only one | |
651 * possible code. So to avoid special checks later on we force at least | |
652 * two codes of non zero frequency. | |
653 */ | |
654 while (s->heap_len < 2) { | |
655 node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0); | |
656 tree[node].Freq = 1; | |
657 s->depth[node] = 0; | |
658 s->opt_len--; if (stree) s->static_len -= stree[node].Len; | |
659 /* node is 0 or 1 so it does not have extra bits */ | |
660 } | |
661 desc->max_code = max_code; | |
662 | |
663 /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, | |
664 * establish sub-heaps of increasing lengths: | |
665 */ | |
666 for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n); | |
667 | |
668 /* Construct the Huffman tree by repeatedly combining the least two | |
669 * frequent nodes. | |
670 */ | |
671 node = elems; /* next internal node of the tree */ | |
672 do { | |
673 pqremove(s, tree, n); /* n = node of least frequency */ | |
674 m = s->heap[SMALLEST]; /* m = node of next least frequency */ | |
675 | |
676 s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */ | |
677 s->heap[--(s->heap_max)] = m; | |
678 | |
679 /* Create a new node father of n and m */ | |
680 tree[node].Freq = tree[n].Freq + tree[m].Freq; | |
681 s->depth[node] = (uch)((s->depth[n] >= s->depth[m] ? | |
682 s->depth[n] : s->depth[m]) + 1); | |
683 tree[n].Dad = tree[m].Dad = (ush)node; | |
684 #ifdef DUMP_BL_TREE | |
685 if (tree == s->bl_tree) { | |
686 fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)", | |
687 node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq); | |
688 } | |
689 #endif | |
690 /* and insert the new node in the heap */ | |
691 s->heap[SMALLEST] = node++; | |
692 pqdownheap(s, tree, SMALLEST); | |
693 | |
694 } while (s->heap_len >= 2); | |
695 | |
696 s->heap[--(s->heap_max)] = s->heap[SMALLEST]; | |
697 | |
698 /* At this point, the fields freq and dad are set. We can now | |
699 * generate the bit lengths. | |
700 */ | |
701 gen_bitlen(s, (tree_desc *)desc); | |
702 | |
703 /* The field len is now set, we can generate the bit codes */ | |
704 gen_codes ((ct_data *)tree, max_code, s->bl_count); | |
705 } | |
706 | |
707 /* =========================================================================== | |
708 * Scan a literal or distance tree to determine the frequencies of the codes | |
709 * in the bit length tree. | |
710 */ | |
711 local void scan_tree (s, tree, max_code) | |
712 deflate_state *s; | |
713 ct_data *tree; /* the tree to be scanned */ | |
714 int max_code; /* and its largest code of non zero frequency */ | |
715 { | |
716 int n; /* iterates over all tree elements */ | |
717 int prevlen = -1; /* last emitted length */ | |
718 int curlen; /* length of current code */ | |
719 int nextlen = tree[0].Len; /* length of next code */ | |
720 int count = 0; /* repeat count of the current code */ | |
721 int max_count = 7; /* max repeat count */ | |
722 int min_count = 4; /* min repeat count */ | |
723 | |
724 if (nextlen == 0) max_count = 138, min_count = 3; | |
725 tree[max_code+1].Len = (ush)0xffff; /* guard */ | |
726 | |
727 for (n = 0; n <= max_code; n++) { | |
728 curlen = nextlen; nextlen = tree[n+1].Len; | |
729 if (++count < max_count && curlen == nextlen) { | |
730 continue; | |
731 } else if (count < min_count) { | |
732 s->bl_tree[curlen].Freq += count; | |
733 } else if (curlen != 0) { | |
734 if (curlen != prevlen) s->bl_tree[curlen].Freq++; | |
735 s->bl_tree[REP_3_6].Freq++; | |
736 } else if (count <= 10) { | |
737 s->bl_tree[REPZ_3_10].Freq++; | |
738 } else { | |
739 s->bl_tree[REPZ_11_138].Freq++; | |
740 } | |
741 count = 0; prevlen = curlen; | |
742 if (nextlen == 0) { | |
743 max_count = 138, min_count = 3; | |
744 } else if (curlen == nextlen) { | |
745 max_count = 6, min_count = 3; | |
746 } else { | |
747 max_count = 7, min_count = 4; | |
748 } | |
749 } | |
750 } | |
751 | |
752 /* =========================================================================== | |
753 * Send a literal or distance tree in compressed form, using the codes in | |
754 * bl_tree. | |
755 */ | |
756 local void send_tree (s, tree, max_code) | |
757 deflate_state *s; | |
758 ct_data *tree; /* the tree to be scanned */ | |
759 int max_code; /* and its largest code of non zero frequency */ | |
760 { | |
761 int n; /* iterates over all tree elements */ | |
762 int prevlen = -1; /* last emitted length */ | |
763 int curlen; /* length of current code */ | |
764 int nextlen = tree[0].Len; /* length of next code */ | |
765 int count = 0; /* repeat count of the current code */ | |
766 int max_count = 7; /* max repeat count */ | |
767 int min_count = 4; /* min repeat count */ | |
768 | |
769 /* tree[max_code+1].Len = -1; */ /* guard already set */ | |
770 if (nextlen == 0) max_count = 138, min_count = 3; | |
771 | |
772 for (n = 0; n <= max_code; n++) { | |
773 curlen = nextlen; nextlen = tree[n+1].Len; | |
774 if (++count < max_count && curlen == nextlen) { | |
775 continue; | |
776 } else if (count < min_count) { | |
777 do { send_code(s, curlen, s->bl_tree); } while (--count != 0); | |
778 | |
779 } else if (curlen != 0) { | |
780 if (curlen != prevlen) { | |
781 send_code(s, curlen, s->bl_tree); count--; | |
782 } | |
783 Assert(count >= 3 && count <= 6, " 3_6?"); | |
784 send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2); | |
785 | |
786 } else if (count <= 10) { | |
787 send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3); | |
788 | |
789 } else { | |
790 send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7); | |
791 } | |
792 count = 0; prevlen = curlen; | |
793 if (nextlen == 0) { | |
794 max_count = 138, min_count = 3; | |
795 } else if (curlen == nextlen) { | |
796 max_count = 6, min_count = 3; | |
797 } else { | |
798 max_count = 7, min_count = 4; | |
799 } | |
800 } | |
801 } | |
802 | |
803 /* =========================================================================== | |
804 * Construct the Huffman tree for the bit lengths and return the index in | |
805 * bl_order of the last bit length code to send. | |
806 */ | |
807 local int build_bl_tree(s) | |
808 deflate_state *s; | |
809 { | |
810 int max_blindex; /* index of last bit length code of non zero freq */ | |
811 | |
812 /* Determine the bit length frequencies for literal and distance trees */ | |
813 scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code); | |
814 scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code); | |
815 | |
816 /* Build the bit length tree: */ | |
817 build_tree(s, (tree_desc *)(&(s->bl_desc))); | |
818 /* opt_len now includes the length of the tree representations, except | |
819 * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. | |
820 */ | |
821 | |
822 /* Determine the number of bit length codes to send. The pkzip format | |
823 * requires that at least 4 bit length codes be sent. (appnote.txt says | |
824 * 3 but the actual value used is 4.) | |
825 */ | |
826 for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) { | |
827 if (s->bl_tree[bl_order[max_blindex]].Len != 0) break; | |
828 } | |
829 /* Update opt_len to include the bit length tree and counts */ | |
830 s->opt_len += 3*(max_blindex+1) + 5+5+4; | |
831 Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", | |
832 s->opt_len, s->static_len)); | |
833 | |
834 return max_blindex; | |
835 } | |
836 | |
837 /* =========================================================================== | |
838 * Send the header for a block using dynamic Huffman trees: the counts, the | |
839 * lengths of the bit length codes, the literal tree and the distance tree. | |
840 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. | |
841 */ | |
842 local void send_all_trees(s, lcodes, dcodes, blcodes) | |
843 deflate_state *s; | |
844 int lcodes, dcodes, blcodes; /* number of codes for each tree */ | |
845 { | |
846 int rank; /* index in bl_order */ | |
847 | |
848 Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); | |
849 Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, | |
850 "too many codes"); | |
851 Tracev((stderr, "\nbl counts: ")); | |
852 send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */ | |
853 send_bits(s, dcodes-1, 5); | |
854 send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */ | |
855 for (rank = 0; rank < blcodes; rank++) { | |
856 Tracev((stderr, "\nbl code %2d ", bl_order[rank])); | |
857 send_bits(s, s->bl_tree[bl_order[rank]].Len, 3); | |
858 } | |
859 Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); | |
860 | |
861 send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */ | |
862 Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); | |
863 | |
864 send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */ | |
865 Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); | |
866 } | |
867 | |
868 /* =========================================================================== | |
869 * Send a stored block | |
870 */ | |
871 void ZLIB_INTERNAL _tr_stored_block(s, buf, stored_len, last) | |
872 deflate_state *s; | |
873 charf *buf; /* input block */ | |
874 ulg stored_len; /* length of input block */ | |
875 int last; /* one if this is the last block for a file */ | |
876 { | |
877 send_bits(s, (STORED_BLOCK<<1)+last, 3); /* send block type */ | |
878 #ifdef DEBUG | |
879 s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L; | |
880 s->compressed_len += (stored_len + 4) << 3; | |
881 #endif | |
882 copy_block(s, buf, (unsigned)stored_len, 1); /* with header */ | |
883 } | |
884 | |
885 /* =========================================================================== | |
886 * Send one empty static block to give enough lookahead for inflate. | |
887 * This takes 10 bits, of which 7 may remain in the bit buffer. | |
888 * The current inflate code requires 9 bits of lookahead. If the | |
889 * last two codes for the previous block (real code plus EOB) were coded | |
890 * on 5 bits or less, inflate may have only 5+3 bits of lookahead to decode | |
891 * the last real code. In this case we send two empty static blocks instead | |
892 * of one. (There are no problems if the previous block is stored or fixed.) | |
893 * To simplify the code, we assume the worst case of last real code encoded | |
894 * on one bit only. | |
895 */ | |
896 void ZLIB_INTERNAL _tr_align(s) | |
897 deflate_state *s; | |
898 { | |
899 send_bits(s, STATIC_TREES<<1, 3); | |
900 send_code(s, END_BLOCK, static_ltree); | |
901 #ifdef DEBUG | |
902 s->compressed_len += 10L; /* 3 for block type, 7 for EOB */ | |
903 #endif | |
904 bi_flush(s); | |
905 /* Of the 10 bits for the empty block, we have already sent | |
906 * (10 - bi_valid) bits. The lookahead for the last real code (before | |
907 * the EOB of the previous block) was thus at least one plus the length | |
908 * of the EOB plus what we have just sent of the empty static block. | |
909 */ | |
910 if (1 + s->last_eob_len + 10 - s->bi_valid < 9) { | |
911 send_bits(s, STATIC_TREES<<1, 3); | |
912 send_code(s, END_BLOCK, static_ltree); | |
913 #ifdef DEBUG | |
914 s->compressed_len += 10L; | |
915 #endif | |
916 bi_flush(s); | |
917 } | |
918 s->last_eob_len = 7; | |
919 } | |
920 | |
921 /* =========================================================================== | |
922 * Determine the best encoding for the current block: dynamic trees, static | |
923 * trees or store, and output the encoded block to the zip file. | |
924 */ | |
925 void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last) | |
926 deflate_state *s; | |
927 charf *buf; /* input block, or NULL if too old */ | |
928 ulg stored_len; /* length of input block */ | |
929 int last; /* one if this is the last block for a file */ | |
930 { | |
931 ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */ | |
932 int max_blindex = 0; /* index of last bit length code of non zero freq */ | |
933 | |
934 /* Build the Huffman trees unless a stored block is forced */ | |
935 if (s->level > 0) { | |
936 | |
937 /* Check if the file is binary or text */ | |
938 if (s->strm->data_type == Z_UNKNOWN) | |
939 s->strm->data_type = detect_data_type(s); | |
940 | |
941 /* Construct the literal and distance trees */ | |
942 build_tree(s, (tree_desc *)(&(s->l_desc))); | |
943 Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len, | |
944 s->static_len)); | |
945 | |
946 build_tree(s, (tree_desc *)(&(s->d_desc))); | |
947 Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len, | |
948 s->static_len)); | |
949 /* At this point, opt_len and static_len are the total bit lengths of | |
950 * the compressed block data, excluding the tree representations. | |
951 */ | |
952 | |
953 /* Build the bit length tree for the above two trees, and get the index | |
954 * in bl_order of the last bit length code to send. | |
955 */ | |
956 max_blindex = build_bl_tree(s); | |
957 | |
958 /* Determine the best encoding. Compute the block lengths in bytes. */ | |
959 opt_lenb = (s->opt_len+3+7)>>3; | |
960 static_lenb = (s->static_len+3+7)>>3; | |
961 | |
962 Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", | |
963 opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, | |
964 s->last_lit)); | |
965 | |
966 if (static_lenb <= opt_lenb) opt_lenb = static_lenb; | |
967 | |
968 } else { | |
969 Assert(buf != (char*)0, "lost buf"); | |
970 opt_lenb = static_lenb = stored_len + 5; /* force a stored block */ | |
971 } | |
972 | |
973 #ifdef FORCE_STORED | |
974 if (buf != (char*)0) { /* force stored block */ | |
975 #else | |
976 if (stored_len+4 <= opt_lenb && buf != (char*)0) { | |
977 /* 4: two words for the lengths */ | |
978 #endif | |
979 /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. | |
980 * Otherwise we can't have processed more than WSIZE input bytes since | |
981 * the last block flush, because compression would have been | |
982 * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to | |
983 * transform a block into a stored block. | |
984 */ | |
985 _tr_stored_block(s, buf, stored_len, last); | |
986 | |
987 #ifdef FORCE_STATIC | |
988 } else if (static_lenb >= 0) { /* force static trees */ | |
989 #else | |
990 } else if (s->strategy == Z_FIXED || static_lenb == opt_lenb) { | |
991 #endif | |
992 send_bits(s, (STATIC_TREES<<1)+last, 3); | |
993 compress_block(s, (ct_data *)static_ltree, (ct_data *)static_dtree); | |
994 #ifdef DEBUG | |
995 s->compressed_len += 3 + s->static_len; | |
996 #endif | |
997 } else { | |
998 send_bits(s, (DYN_TREES<<1)+last, 3); | |
999 send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1, | |
1000 max_blindex+1); | |
1001 compress_block(s, (ct_data *)s->dyn_ltree, (ct_data *)s->dyn_dtree); | |
1002 #ifdef DEBUG | |
1003 s->compressed_len += 3 + s->opt_len; | |
1004 #endif | |
1005 } | |
1006 Assert (s->compressed_len == s->bits_sent, "bad compressed size"); | |
1007 /* The above check is made mod 2^32, for files larger than 512 MB | |
1008 * and uLong implemented on 32 bits. | |
1009 */ | |
1010 init_block(s); | |
1011 | |
1012 if (last) { | |
1013 bi_windup(s); | |
1014 #ifdef DEBUG | |
1015 s->compressed_len += 7; /* align on byte boundary */ | |
1016 #endif | |
1017 } | |
1018 Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3, | |
1019 s->compressed_len-7*last)); | |
1020 } | |
1021 | |
1022 /* =========================================================================== | |
1023 * Save the match info and tally the frequency counts. Return true if | |
1024 * the current block must be flushed. | |
1025 */ | |
1026 int ZLIB_INTERNAL _tr_tally (s, dist, lc) | |
1027 deflate_state *s; | |
1028 unsigned dist; /* distance of matched string */ | |
1029 unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */ | |
1030 { | |
1031 s->d_buf[s->last_lit] = (ush)dist; | |
1032 s->l_buf[s->last_lit++] = (uch)lc; | |
1033 if (dist == 0) { | |
1034 /* lc is the unmatched char */ | |
1035 s->dyn_ltree[lc].Freq++; | |
1036 } else { | |
1037 s->matches++; | |
1038 /* Here, lc is the match length - MIN_MATCH */ | |
1039 dist--; /* dist = match distance - 1 */ | |
1040 Assert((ush)dist < (ush)MAX_DIST(s) && | |
1041 (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && | |
1042 (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); | |
1043 | |
1044 s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++; | |
1045 s->dyn_dtree[d_code(dist)].Freq++; | |
1046 } | |
1047 | |
1048 #ifdef TRUNCATE_BLOCK | |
1049 /* Try to guess if it is profitable to stop the current block here */ | |
1050 if ((s->last_lit & 0x1fff) == 0 && s->level > 2) { | |
1051 /* Compute an upper bound for the compressed length */ | |
1052 ulg out_length = (ulg)s->last_lit*8L; | |
1053 ulg in_length = (ulg)((long)s->strstart - s->block_start); | |
1054 int dcode; | |
1055 for (dcode = 0; dcode < D_CODES; dcode++) { | |
1056 out_length += (ulg)s->dyn_dtree[dcode].Freq * | |
1057 (5L+extra_dbits[dcode]); | |
1058 } | |
1059 out_length >>= 3; | |
1060 Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", | |
1061 s->last_lit, in_length, out_length, | |
1062 100L - out_length*100L/in_length)); | |
1063 if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1; | |
1064 } | |
1065 #endif | |
1066 return (s->last_lit == s->lit_bufsize-1); | |
1067 /* We avoid equality with lit_bufsize because of wraparound at 64K | |
1068 * on 16 bit machines and because stored blocks are restricted to | |
1069 * 64K-1 bytes. | |
1070 */ | |
1071 } | |
1072 | |
1073 /* =========================================================================== | |
1074 * Send the block data compressed using the given Huffman trees | |
1075 */ | |
1076 local void compress_block(s, ltree, dtree) | |
1077 deflate_state *s; | |
1078 ct_data *ltree; /* literal tree */ | |
1079 ct_data *dtree; /* distance tree */ | |
1080 { | |
1081 unsigned dist; /* distance of matched string */ | |
1082 int lc; /* match length or unmatched char (if dist == 0) */ | |
1083 unsigned lx = 0; /* running index in l_buf */ | |
1084 unsigned code; /* the code to send */ | |
1085 int extra; /* number of extra bits to send */ | |
1086 | |
1087 if (s->last_lit != 0) do { | |
1088 dist = s->d_buf[lx]; | |
1089 lc = s->l_buf[lx++]; | |
1090 if (dist == 0) { | |
1091 send_code(s, lc, ltree); /* send a literal byte */ | |
1092 Tracecv(isgraph(lc), (stderr," '%c' ", lc)); | |
1093 } else { | |
1094 /* Here, lc is the match length - MIN_MATCH */ | |
1095 code = _length_code[lc]; | |
1096 send_code(s, code+LITERALS+1, ltree); /* send the length code */ | |
1097 extra = extra_lbits[code]; | |
1098 if (extra != 0) { | |
1099 lc -= base_length[code]; | |
1100 send_bits(s, lc, extra); /* send the extra length bits */ | |
1101 } | |
1102 dist--; /* dist is now the match distance - 1 */ | |
1103 code = d_code(dist); | |
1104 Assert (code < D_CODES, "bad d_code"); | |
1105 | |
1106 send_code(s, code, dtree); /* send the distance code */ | |
1107 extra = extra_dbits[code]; | |
1108 if (extra != 0) { | |
1109 dist -= base_dist[code]; | |
1110 send_bits(s, dist, extra); /* send the extra distance bits */ | |
1111 } | |
1112 } /* literal or match pair ? */ | |
1113 | |
1114 /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */ | |
1115 Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx, | |
1116 "pendingBuf overflow"); | |
1117 | |
1118 } while (lx < s->last_lit); | |
1119 | |
1120 send_code(s, END_BLOCK, ltree); | |
1121 s->last_eob_len = ltree[END_BLOCK].Len; | |
1122 } | |
1123 | |
1124 /* =========================================================================== | |
1125 * Check if the data type is TEXT or BINARY, using the following algorithm: | |
1126 * - TEXT if the two conditions below are satisfied: | |
1127 * a) There are no non-portable control characters belonging to the | |
1128 * "black list" (0..6, 14..25, 28..31). | |
1129 * b) There is at least one printable character belonging to the | |
1130 * "white list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255). | |
1131 * - BINARY otherwise. | |
1132 * - The following partially-portable control characters form a | |
1133 * "gray list" that is ignored in this detection algorithm: | |
1134 * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}). | |
1135 * IN assertion: the fields Freq of dyn_ltree are set. | |
1136 */ | |
1137 local int detect_data_type(s) | |
1138 deflate_state *s; | |
1139 { | |
1140 /* black_mask is the bit mask of black-listed bytes | |
1141 * set bits 0..6, 14..25, and 28..31 | |
1142 * 0xf3ffc07f = binary 11110011111111111100000001111111 | |
1143 */ | |
1144 unsigned long black_mask = 0xf3ffc07fUL; | |
1145 int n; | |
1146 | |
1147 /* Check for non-textual ("black-listed") bytes. */ | |
1148 for (n = 0; n <= 31; n++, black_mask >>= 1) | |
1149 if ((black_mask & 1) && (s->dyn_ltree[n].Freq != 0)) | |
1150 return Z_BINARY; | |
1151 | |
1152 /* Check for textual ("white-listed") bytes. */ | |
1153 if (s->dyn_ltree[9].Freq != 0 || s->dyn_ltree[10].Freq != 0 | |
1154 || s->dyn_ltree[13].Freq != 0) | |
1155 return Z_TEXT; | |
1156 for (n = 32; n < LITERALS; n++) | |
1157 if (s->dyn_ltree[n].Freq != 0) | |
1158 return Z_TEXT; | |
1159 | |
1160 /* There are no "black-listed" or "white-listed" bytes: | |
1161 * this stream either is empty or has tolerated ("gray-listed") bytes only. | |
1162 */ | |
1163 return Z_BINARY; | |
1164 } | |
1165 | |
1166 /* =========================================================================== | |
1167 * Reverse the first len bits of a code, using straightforward code (a faster | |
1168 * method would use a table) | |
1169 * IN assertion: 1 <= len <= 15 | |
1170 */ | |
1171 local unsigned bi_reverse(code, len) | |
1172 unsigned code; /* the value to invert */ | |
1173 int len; /* its bit length */ | |
1174 { | |
1175 register unsigned res = 0; | |
1176 do { | |
1177 res |= code & 1; | |
1178 code >>= 1, res <<= 1; | |
1179 } while (--len > 0); | |
1180 return res >> 1; | |
1181 } | |
1182 | |
1183 /* =========================================================================== | |
1184 * Flush the bit buffer, keeping at most 7 bits in it. | |
1185 */ | |
1186 local void bi_flush(s) | |
1187 deflate_state *s; | |
1188 { | |
1189 if (s->bi_valid == 16) { | |
1190 put_short(s, s->bi_buf); | |
1191 s->bi_buf = 0; | |
1192 s->bi_valid = 0; | |
1193 } else if (s->bi_valid >= 8) { | |
1194 put_byte(s, (Byte)s->bi_buf); | |
1195 s->bi_buf >>= 8; | |
1196 s->bi_valid -= 8; | |
1197 } | |
1198 } | |
1199 | |
1200 /* =========================================================================== | |
1201 * Flush the bit buffer and align the output on a byte boundary | |
1202 */ | |
1203 local void bi_windup(s) | |
1204 deflate_state *s; | |
1205 { | |
1206 if (s->bi_valid > 8) { | |
1207 put_short(s, s->bi_buf); | |
1208 } else if (s->bi_valid > 0) { | |
1209 put_byte(s, (Byte)s->bi_buf); | |
1210 } | |
1211 s->bi_buf = 0; | |
1212 s->bi_valid = 0; | |
1213 #ifdef DEBUG | |
1214 s->bits_sent = (s->bits_sent+7) & ~7; | |
1215 #endif | |
1216 } | |
1217 | |
1218 /* =========================================================================== | |
1219 * Copy a stored block, storing first the length and its | |
1220 * one's complement if requested. | |
1221 */ | |
1222 local void copy_block(s, buf, len, header) | |
1223 deflate_state *s; | |
1224 charf *buf; /* the input data */ | |
1225 unsigned len; /* its length */ | |
1226 int header; /* true if block header must be written */ | |
1227 { | |
1228 bi_windup(s); /* align on byte boundary */ | |
1229 s->last_eob_len = 8; /* enough lookahead for inflate */ | |
1230 | |
1231 if (header) { | |
1232 put_short(s, (ush)len); | |
1233 put_short(s, (ush)~len); | |
1234 #ifdef DEBUG | |
1235 s->bits_sent += 2*16; | |
1236 #endif | |
1237 } | |
1238 #ifdef DEBUG | |
1239 s->bits_sent += (ulg)len<<3; | |
1240 #endif | |
1241 while (len--) { | |
1242 put_byte(s, *buf++); | |
1243 } | |
1244 } |