Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(193)

Side by Side Diff: source/libvpx/vp9/encoder/vp9_bitstream.c

Issue 11555023: libvpx: Add VP9 decoder. (Closed) Base URL: svn://chrome-svn/chrome/trunk/deps/third_party/libvpx/
Patch Set: Created 8 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
Property Changes:
Added: svn:eol-style
+ LF
OLDNEW
(Empty)
1 /*
2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11
12 #include "vp9/common/vp9_header.h"
13 #include "vp9/encoder/vp9_encodemv.h"
14 #include "vp9/common/vp9_entropymode.h"
15 #include "vp9/common/vp9_findnearmv.h"
16 #include "vp9/encoder/vp9_mcomp.h"
17 #include "vp9/common/vp9_systemdependent.h"
18 #include <assert.h>
19 #include <stdio.h>
20 #include <limits.h>
21 #include "vp9/common/vp9_pragmas.h"
22 #include "vpx/vpx_encoder.h"
23 #include "vpx_mem/vpx_mem.h"
24 #include "vp9/encoder/vp9_bitstream.h"
25 #include "vp9/encoder/vp9_segmentation.h"
26
27 #include "vp9/common/vp9_seg_common.h"
28 #include "vp9/common/vp9_pred_common.h"
29 #include "vp9/common/vp9_entropy.h"
30 #include "vp9/encoder/vp9_encodemv.h"
31 #include "vp9/common/vp9_entropymv.h"
32 #include "vp9/common/vp9_mvref_common.h"
33
34 #if defined(SECTIONBITS_OUTPUT)
35 unsigned __int64 Sectionbits[500];
36 #endif
37
38 #ifdef ENTROPY_STATS
39 int intra_mode_stats[VP9_KF_BINTRAMODES]
40 [VP9_KF_BINTRAMODES]
41 [VP9_KF_BINTRAMODES];
42 unsigned int tree_update_hist [BLOCK_TYPES]
43 [COEF_BANDS]
44 [PREV_COEF_CONTEXTS]
45 [ENTROPY_NODES][2];
46 unsigned int hybrid_tree_update_hist [BLOCK_TYPES]
47 [COEF_BANDS]
48 [PREV_COEF_CONTEXTS]
49 [ENTROPY_NODES][2];
50 unsigned int tree_update_hist_8x8 [BLOCK_TYPES_8X8]
51 [COEF_BANDS]
52 [PREV_COEF_CONTEXTS]
53 [ENTROPY_NODES] [2];
54 unsigned int hybrid_tree_update_hist_8x8 [BLOCK_TYPES_8X8]
55 [COEF_BANDS]
56 [PREV_COEF_CONTEXTS]
57 [ENTROPY_NODES] [2];
58 unsigned int tree_update_hist_16x16 [BLOCK_TYPES_16X16]
59 [COEF_BANDS]
60 [PREV_COEF_CONTEXTS]
61 [ENTROPY_NODES] [2];
62 unsigned int hybrid_tree_update_hist_16x16 [BLOCK_TYPES_16X16]
63 [COEF_BANDS]
64 [PREV_COEF_CONTEXTS]
65 [ENTROPY_NODES] [2];
66
67 extern unsigned int active_section;
68 #endif
69
70 #ifdef MODE_STATS
71 int count_mb_seg[4] = { 0, 0, 0, 0 };
72 #endif
73
74 #define vp9_cost_upd ((int)(vp9_cost_one(upd) - vp9_cost_zero(upd)) >> 8)
75 #define vp9_cost_upd256 ((int)(vp9_cost_one(upd) - vp9_cost_zero(upd)))
76
77 #define SEARCH_NEWP
78 static int update_bits[255];
79
80 static void compute_update_table() {
81 int i;
82 for (i = 0; i < 255; i++)
83 update_bits[i] = vp9_count_term_subexp(i, SUBEXP_PARAM, 255);
84 }
85
86 static int split_index(int i, int n, int modulus) {
87 int max1 = (n - 1 - modulus / 2) / modulus + 1;
88 if (i % modulus == modulus / 2) i = i / modulus;
89 else i = max1 + i - (i + modulus - modulus / 2) / modulus;
90 return i;
91 }
92
93 static int remap_prob(int v, int m) {
94 const int n = 256;
95 const int modulus = MODULUS_PARAM;
96 int i;
97 if ((m << 1) <= n)
98 i = vp9_recenter_nonneg(v, m) - 1;
99 else
100 i = vp9_recenter_nonneg(n - 1 - v, n - 1 - m) - 1;
101
102 i = split_index(i, n - 1, modulus);
103 return i;
104 }
105
106 static void write_prob_diff_update(vp9_writer *const bc,
107 vp9_prob newp, vp9_prob oldp) {
108 int delp = remap_prob(newp, oldp);
109 vp9_encode_term_subexp(bc, delp, SUBEXP_PARAM, 255);
110 }
111
112 static int prob_diff_update_cost(vp9_prob newp, vp9_prob oldp) {
113 int delp = remap_prob(newp, oldp);
114 return update_bits[delp] * 256;
115 }
116
117 static void update_mode(
118 vp9_writer *const bc,
119 int n,
120 vp9_token tok [/* n */],
121 vp9_tree tree,
122 vp9_prob Pnew [/* n-1 */],
123 vp9_prob Pcur [/* n-1 */],
124 unsigned int bct [/* n-1 */] [2],
125 const unsigned int num_events[/* n */]
126 ) {
127 unsigned int new_b = 0, old_b = 0;
128 int i = 0;
129
130 vp9_tree_probs_from_distribution(
131 n--, tok, tree,
132 Pnew, bct, num_events,
133 256, 1
134 );
135
136 do {
137 new_b += cost_branch(bct[i], Pnew[i]);
138 old_b += cost_branch(bct[i], Pcur[i]);
139 } while (++i < n);
140
141 if (new_b + (n << 8) < old_b) {
142 int i = 0;
143
144 vp9_write_bit(bc, 1);
145
146 do {
147 const vp9_prob p = Pnew[i];
148
149 vp9_write_literal(bc, Pcur[i] = p ? p : 1, 8);
150 } while (++i < n);
151 } else
152 vp9_write_bit(bc, 0);
153 }
154
155 static void update_mbintra_mode_probs(VP9_COMP* const cpi,
156 vp9_writer* const bc) {
157 VP9_COMMON *const cm = &cpi->common;
158
159 {
160 vp9_prob Pnew [VP9_YMODES - 1];
161 unsigned int bct [VP9_YMODES - 1] [2];
162
163 update_mode(
164 bc, VP9_YMODES, vp9_ymode_encodings, vp9_ymode_tree,
165 Pnew, cm->fc.ymode_prob, bct, (unsigned int *)cpi->ymode_count
166 );
167 #if CONFIG_SUPERBLOCKS
168 update_mode(bc, VP9_I32X32_MODES, vp9_sb_ymode_encodings,
169 vp9_sb_ymode_tree, Pnew, cm->fc.sb_ymode_prob, bct,
170 (unsigned int *)cpi->sb_ymode_count);
171 #endif
172 }
173 }
174
175 static int get_prob(int num, int den) {
176 int p;
177 if (den <= 0)
178 return 128;
179 p = (num * 255 + (den >> 1)) / den;
180 return clip_prob(p);
181 }
182
183 static int get_binary_prob(int n0, int n1) {
184 return get_prob(n0, n0 + n1);
185 }
186
187 void vp9_update_skip_probs(VP9_COMP *cpi) {
188 VP9_COMMON *const pc = &cpi->common;
189 int k;
190
191 for (k = 0; k < MBSKIP_CONTEXTS; ++k) {
192 pc->mbskip_pred_probs[k] = get_binary_prob(cpi->skip_false_count[k],
193 cpi->skip_true_count[k]);
194 }
195 }
196
197 static void update_switchable_interp_probs(VP9_COMP *cpi,
198 vp9_writer* const bc) {
199 VP9_COMMON *const pc = &cpi->common;
200 unsigned int branch_ct[32][2];
201 int i, j;
202 for (j = 0; j <= VP9_SWITCHABLE_FILTERS; ++j) {
203 vp9_tree_probs_from_distribution(
204 VP9_SWITCHABLE_FILTERS,
205 vp9_switchable_interp_encodings, vp9_switchable_interp_tree,
206 pc->fc.switchable_interp_prob[j], branch_ct,
207 cpi->switchable_interp_count[j], 256, 1);
208 for (i = 0; i < VP9_SWITCHABLE_FILTERS - 1; ++i) {
209 if (pc->fc.switchable_interp_prob[j][i] < 1)
210 pc->fc.switchable_interp_prob[j][i] = 1;
211 vp9_write_literal(bc, pc->fc.switchable_interp_prob[j][i], 8);
212 }
213 }
214 }
215
216 // This function updates the reference frame prediction stats
217 static void update_refpred_stats(VP9_COMP *cpi) {
218 VP9_COMMON *const cm = &cpi->common;
219 int i;
220 vp9_prob new_pred_probs[PREDICTION_PROBS];
221 int old_cost, new_cost;
222
223 // Set the prediction probability structures to defaults
224 if (cm->frame_type == KEY_FRAME) {
225 // Set the prediction probabilities to defaults
226 cm->ref_pred_probs[0] = 120;
227 cm->ref_pred_probs[1] = 80;
228 cm->ref_pred_probs[2] = 40;
229
230 vpx_memset(cpi->ref_pred_probs_update, 0,
231 sizeof(cpi->ref_pred_probs_update));
232 } else {
233 // From the prediction counts set the probabilities for each context
234 for (i = 0; i < PREDICTION_PROBS; i++) {
235 new_pred_probs[i] = get_binary_prob(cpi->ref_pred_count[i][0],
236 cpi->ref_pred_count[i][1]);
237
238 // Decide whether or not to update the reference frame probs.
239 // Returned costs are in 1/256 bit units.
240 old_cost =
241 (cpi->ref_pred_count[i][0] * vp9_cost_zero(cm->ref_pred_probs[i])) +
242 (cpi->ref_pred_count[i][1] * vp9_cost_one(cm->ref_pred_probs[i]));
243
244 new_cost =
245 (cpi->ref_pred_count[i][0] * vp9_cost_zero(new_pred_probs[i])) +
246 (cpi->ref_pred_count[i][1] * vp9_cost_one(new_pred_probs[i]));
247
248 // Cost saving must be >= 8 bits (2048 in these units)
249 if ((old_cost - new_cost) >= 2048) {
250 cpi->ref_pred_probs_update[i] = 1;
251 cm->ref_pred_probs[i] = new_pred_probs[i];
252 } else
253 cpi->ref_pred_probs_update[i] = 0;
254
255 }
256 }
257 }
258
259 // This function is called to update the mode probability context used to encode
260 // inter modes. It assumes the branch counts table has already been populated
261 // prior to the actual packing of the bitstream (in rd stage or dummy pack)
262 //
263 // The branch counts table is re-populated during the actual pack stage and in
264 // the decoder to facilitate backwards update of the context.
265 static void update_mode_probs(VP9_COMMON *cm,
266 int mode_context[INTER_MODE_CONTEXTS][4]) {
267 int i, j;
268 unsigned int (*mv_ref_ct)[4][2];
269
270 vpx_memcpy(mode_context, cm->fc.vp9_mode_contexts,
271 sizeof(cm->fc.vp9_mode_contexts));
272
273 mv_ref_ct = cm->fc.mv_ref_ct;
274
275 for (i = 0; i < INTER_MODE_CONTEXTS; i++) {
276 for (j = 0; j < 4; j++) {
277 int new_prob, count, old_cost, new_cost;
278
279 // Work out cost of coding branches with the old and optimal probability
280 old_cost = cost_branch256(mv_ref_ct[i][j], mode_context[i][j]);
281 count = mv_ref_ct[i][j][0] + mv_ref_ct[i][j][1];
282 new_prob = count > 0 ? (255 * mv_ref_ct[i][j][0]) / count : 128;
283 new_prob = (new_prob > 0) ? new_prob : 1;
284 new_cost = cost_branch256(mv_ref_ct[i][j], new_prob);
285
286 // If cost saving is >= 14 bits then update the mode probability.
287 // This is the approximate net cost of updating one probability given
288 // that the no update case ismuch more common than the update case.
289 if (new_cost <= (old_cost - (14 << 8))) {
290 mode_context[i][j] = new_prob;
291 }
292 }
293 }
294 }
295 static void write_ymode(vp9_writer *bc, int m, const vp9_prob *p) {
296 write_token(bc, vp9_ymode_tree, p, vp9_ymode_encodings + m);
297 }
298
299 static void kfwrite_ymode(vp9_writer *bc, int m, const vp9_prob *p) {
300 write_token(bc, vp9_kf_ymode_tree, p, vp9_kf_ymode_encodings + m);
301 }
302
303 #if CONFIG_SUPERBLOCKS
304 static void write_sb_ymode(vp9_writer *bc, int m, const vp9_prob *p) {
305 write_token(bc, vp9_sb_ymode_tree, p, vp9_sb_ymode_encodings + m);
306 }
307
308 static void sb_kfwrite_ymode(vp9_writer *bc, int m, const vp9_prob *p) {
309 write_token(bc, vp9_uv_mode_tree, p, vp9_sb_kf_ymode_encodings + m);
310 }
311 #endif
312
313 static void write_i8x8_mode(vp9_writer *bc, int m, const vp9_prob *p) {
314 write_token(bc, vp9_i8x8_mode_tree, p, vp9_i8x8_mode_encodings + m);
315 }
316
317 static void write_uv_mode(vp9_writer *bc, int m, const vp9_prob *p) {
318 write_token(bc, vp9_uv_mode_tree, p, vp9_uv_mode_encodings + m);
319 }
320
321
322 static void write_bmode(vp9_writer *bc, int m, const vp9_prob *p) {
323 #if CONFIG_NEWBINTRAMODES
324 assert(m < B_CONTEXT_PRED - CONTEXT_PRED_REPLACEMENTS || m == B_CONTEXT_PRED);
325 if (m == B_CONTEXT_PRED) m -= CONTEXT_PRED_REPLACEMENTS;
326 #endif
327 write_token(bc, vp9_bmode_tree, p, vp9_bmode_encodings + m);
328 }
329
330 static void write_kf_bmode(vp9_writer *bc, int m, const vp9_prob *p) {
331 write_token(bc, vp9_kf_bmode_tree, p, vp9_kf_bmode_encodings + m);
332 }
333
334 static void write_split(vp9_writer *bc, int x, const vp9_prob *p) {
335 write_token(
336 bc, vp9_mbsplit_tree, p, vp9_mbsplit_encodings + x);
337 }
338
339 static int prob_update_savings(const unsigned int *ct,
340 const vp9_prob oldp, const vp9_prob newp,
341 const vp9_prob upd) {
342 const int old_b = cost_branch256(ct, oldp);
343 const int new_b = cost_branch256(ct, newp);
344 const int update_b = 2048 + vp9_cost_upd256;
345 return (old_b - new_b - update_b);
346 }
347
348 static int prob_diff_update_savings(const unsigned int *ct,
349 const vp9_prob oldp, const vp9_prob newp,
350 const vp9_prob upd) {
351 const int old_b = cost_branch256(ct, oldp);
352 const int new_b = cost_branch256(ct, newp);
353 const int update_b = (newp == oldp ? 0 :
354 prob_diff_update_cost(newp, oldp) + vp9_cost_upd256);
355 return (old_b - new_b - update_b);
356 }
357
358 static int prob_diff_update_savings_search(const unsigned int *ct,
359 const vp9_prob oldp, vp9_prob *bestp,
360 const vp9_prob upd) {
361 const int old_b = cost_branch256(ct, oldp);
362 int new_b, update_b, savings, bestsavings, step;
363 vp9_prob newp, bestnewp;
364
365 bestsavings = 0;
366 bestnewp = oldp;
367
368 step = (*bestp > oldp ? -1 : 1);
369 for (newp = *bestp; newp != oldp; newp += step) {
370 new_b = cost_branch256(ct, newp);
371 update_b = prob_diff_update_cost(newp, oldp) + vp9_cost_upd256;
372 savings = old_b - new_b - update_b;
373 if (savings > bestsavings) {
374 bestsavings = savings;
375 bestnewp = newp;
376 }
377 }
378 *bestp = bestnewp;
379 return bestsavings;
380 }
381
382 static void vp9_cond_prob_update(vp9_writer *bc, vp9_prob *oldp, vp9_prob upd,
383 unsigned int *ct) {
384 vp9_prob newp;
385 int savings;
386 newp = get_binary_prob(ct[0], ct[1]);
387 savings = prob_update_savings(ct, *oldp, newp, upd);
388 if (savings > 0) {
389 vp9_write(bc, 1, upd);
390 vp9_write_literal(bc, newp, 8);
391 *oldp = newp;
392 } else {
393 vp9_write(bc, 0, upd);
394 }
395 }
396
397 static void pack_mb_tokens(vp9_writer* const bc,
398 TOKENEXTRA **tp,
399 const TOKENEXTRA *const stop) {
400 unsigned int split;
401 unsigned int shift;
402 int count = bc->count;
403 unsigned int range = bc->range;
404 unsigned int lowvalue = bc->lowvalue;
405 TOKENEXTRA *p = *tp;
406
407 while (p < stop) {
408 const int t = p->Token;
409 vp9_token *const a = vp9_coef_encodings + t;
410 const vp9_extra_bit_struct *const b = vp9_extra_bits + t;
411 int i = 0;
412 const unsigned char *pp = p->context_tree;
413 int v = a->value;
414 int n = a->Len;
415
416 if (t == EOSB_TOKEN)
417 {
418 ++p;
419 break;
420 }
421
422 /* skip one or two nodes */
423 if (p->skip_eob_node) {
424 n -= p->skip_eob_node;
425 i = 2 * p->skip_eob_node;
426 }
427
428 do {
429 const int bb = (v >> --n) & 1;
430 split = 1 + (((range - 1) * pp[i >> 1]) >> 8);
431 i = vp9_coef_tree[i + bb];
432
433 if (bb) {
434 lowvalue += split;
435 range = range - split;
436 } else {
437 range = split;
438 }
439
440 shift = vp9_norm[range];
441 range <<= shift;
442 count += shift;
443
444 if (count >= 0) {
445 int offset = shift - count;
446
447 if ((lowvalue << (offset - 1)) & 0x80000000) {
448 int x = bc->pos - 1;
449
450 while (x >= 0 && bc->buffer[x] == 0xff) {
451 bc->buffer[x] = (unsigned char)0;
452 x--;
453 }
454
455 bc->buffer[x] += 1;
456 }
457
458 bc->buffer[bc->pos++] = (lowvalue >> (24 - offset));
459 lowvalue <<= offset;
460 shift = count;
461 lowvalue &= 0xffffff;
462 count -= 8;
463 }
464
465 lowvalue <<= shift;
466 } while (n);
467
468
469 if (b->base_val) {
470 const int e = p->Extra, L = b->Len;
471
472 if (L) {
473 const unsigned char *pp = b->prob;
474 int v = e >> 1;
475 int n = L; /* number of bits in v, assumed nonzero */
476 int i = 0;
477
478 do {
479 const int bb = (v >> --n) & 1;
480 split = 1 + (((range - 1) * pp[i >> 1]) >> 8);
481 i = b->tree[i + bb];
482
483 if (bb) {
484 lowvalue += split;
485 range = range - split;
486 } else {
487 range = split;
488 }
489
490 shift = vp9_norm[range];
491 range <<= shift;
492 count += shift;
493
494 if (count >= 0) {
495 int offset = shift - count;
496
497 if ((lowvalue << (offset - 1)) & 0x80000000) {
498 int x = bc->pos - 1;
499
500 while (x >= 0 && bc->buffer[x] == 0xff) {
501 bc->buffer[x] = (unsigned char)0;
502 x--;
503 }
504
505 bc->buffer[x] += 1;
506 }
507
508 bc->buffer[bc->pos++] = (lowvalue >> (24 - offset));
509 lowvalue <<= offset;
510 shift = count;
511 lowvalue &= 0xffffff;
512 count -= 8;
513 }
514
515 lowvalue <<= shift;
516 } while (n);
517 }
518
519
520 {
521
522 split = (range + 1) >> 1;
523
524 if (e & 1) {
525 lowvalue += split;
526 range = range - split;
527 } else {
528 range = split;
529 }
530
531 range <<= 1;
532
533 if ((lowvalue & 0x80000000)) {
534 int x = bc->pos - 1;
535
536 while (x >= 0 && bc->buffer[x] == 0xff) {
537 bc->buffer[x] = (unsigned char)0;
538 x--;
539 }
540
541 bc->buffer[x] += 1;
542
543 }
544
545 lowvalue <<= 1;
546
547 if (!++count) {
548 count = -8;
549 bc->buffer[bc->pos++] = (lowvalue >> 24);
550 lowvalue &= 0xffffff;
551 }
552 }
553
554 }
555 ++p;
556 }
557
558 bc->count = count;
559 bc->lowvalue = lowvalue;
560 bc->range = range;
561 *tp = p;
562 }
563
564 static void write_partition_size(unsigned char *cx_data, int size) {
565 signed char csize;
566
567 csize = size & 0xff;
568 *cx_data = csize;
569 csize = (size >> 8) & 0xff;
570 *(cx_data + 1) = csize;
571 csize = (size >> 16) & 0xff;
572 *(cx_data + 2) = csize;
573
574 }
575
576 static void write_mv_ref
577 (
578 vp9_writer *bc, MB_PREDICTION_MODE m, const vp9_prob *p
579 ) {
580 #if CONFIG_DEBUG
581 assert(NEARESTMV <= m && m <= SPLITMV);
582 #endif
583 write_token(bc, vp9_mv_ref_tree, p,
584 vp9_mv_ref_encoding_array - NEARESTMV + m);
585 }
586
587 #if CONFIG_SUPERBLOCKS
588 static void write_sb_mv_ref(vp9_writer *bc, MB_PREDICTION_MODE m,
589 const vp9_prob *p) {
590 #if CONFIG_DEBUG
591 assert(NEARESTMV <= m && m < SPLITMV);
592 #endif
593 write_token(bc, vp9_sb_mv_ref_tree, p,
594 vp9_sb_mv_ref_encoding_array - NEARESTMV + m);
595 }
596 #endif
597
598 static void write_sub_mv_ref
599 (
600 vp9_writer *bc, B_PREDICTION_MODE m, const vp9_prob *p
601 ) {
602 #if CONFIG_DEBUG
603 assert(LEFT4X4 <= m && m <= NEW4X4);
604 #endif
605 write_token(bc, vp9_sub_mv_ref_tree, p,
606 vp9_sub_mv_ref_encoding_array - LEFT4X4 + m);
607 }
608
609 static void write_nmv(vp9_writer *bc, const MV *mv, const int_mv *ref,
610 const nmv_context *nmvc, int usehp) {
611 MV e;
612 e.row = mv->row - ref->as_mv.row;
613 e.col = mv->col - ref->as_mv.col;
614
615 vp9_encode_nmv(bc, &e, &ref->as_mv, nmvc);
616 vp9_encode_nmv_fp(bc, &e, &ref->as_mv, nmvc, usehp);
617 }
618
619 #if CONFIG_NEW_MVREF
620 static void vp9_write_mv_ref_id(vp9_writer *w,
621 vp9_prob * ref_id_probs,
622 int mv_ref_id) {
623 // Encode the index for the MV reference.
624 switch (mv_ref_id) {
625 case 0:
626 vp9_write(w, 0, ref_id_probs[0]);
627 break;
628 case 1:
629 vp9_write(w, 1, ref_id_probs[0]);
630 vp9_write(w, 0, ref_id_probs[1]);
631 break;
632 case 2:
633 vp9_write(w, 1, ref_id_probs[0]);
634 vp9_write(w, 1, ref_id_probs[1]);
635 vp9_write(w, 0, ref_id_probs[2]);
636 break;
637 case 3:
638 vp9_write(w, 1, ref_id_probs[0]);
639 vp9_write(w, 1, ref_id_probs[1]);
640 vp9_write(w, 1, ref_id_probs[2]);
641 break;
642
643 // TRAP.. This should not happen
644 default:
645 assert(0);
646 break;
647 }
648 }
649 #endif
650
651 // This function writes the current macro block's segnment id to the bitstream
652 // It should only be called if a segment map update is indicated.
653 static void write_mb_segid(vp9_writer *bc,
654 const MB_MODE_INFO *mi, const MACROBLOCKD *xd) {
655 // Encode the MB segment id.
656 int seg_id = mi->segment_id;
657 #if CONFIG_SUPERBLOCKS
658 if (mi->encoded_as_sb) {
659 if (xd->mb_to_right_edge >= 0)
660 seg_id = seg_id && xd->mode_info_context[1].mbmi.segment_id;
661 if (xd->mb_to_bottom_edge >= 0) {
662 seg_id = seg_id &&
663 xd->mode_info_context[xd->mode_info_stride].mbmi.segment_id;
664 if (xd->mb_to_right_edge >= 0)
665 seg_id = seg_id &&
666 xd->mode_info_context[xd->mode_info_stride + 1].mbmi.segment_id;
667 }
668 }
669 #endif
670 if (xd->segmentation_enabled && xd->update_mb_segmentation_map) {
671 switch (seg_id) {
672 case 0:
673 vp9_write(bc, 0, xd->mb_segment_tree_probs[0]);
674 vp9_write(bc, 0, xd->mb_segment_tree_probs[1]);
675 break;
676 case 1:
677 vp9_write(bc, 0, xd->mb_segment_tree_probs[0]);
678 vp9_write(bc, 1, xd->mb_segment_tree_probs[1]);
679 break;
680 case 2:
681 vp9_write(bc, 1, xd->mb_segment_tree_probs[0]);
682 vp9_write(bc, 0, xd->mb_segment_tree_probs[2]);
683 break;
684 case 3:
685 vp9_write(bc, 1, xd->mb_segment_tree_probs[0]);
686 vp9_write(bc, 1, xd->mb_segment_tree_probs[2]);
687 break;
688
689 // TRAP.. This should not happen
690 default:
691 vp9_write(bc, 0, xd->mb_segment_tree_probs[0]);
692 vp9_write(bc, 0, xd->mb_segment_tree_probs[1]);
693 break;
694 }
695 }
696 }
697
698 // This function encodes the reference frame
699 static void encode_ref_frame(vp9_writer *const bc,
700 VP9_COMMON *const cm,
701 MACROBLOCKD *xd,
702 int segment_id,
703 MV_REFERENCE_FRAME rf) {
704 int seg_ref_active;
705 int seg_ref_count = 0;
706 seg_ref_active = vp9_segfeature_active(xd,
707 segment_id,
708 SEG_LVL_REF_FRAME);
709
710 if (seg_ref_active) {
711 seg_ref_count = vp9_check_segref(xd, segment_id, INTRA_FRAME) +
712 vp9_check_segref(xd, segment_id, LAST_FRAME) +
713 vp9_check_segref(xd, segment_id, GOLDEN_FRAME) +
714 vp9_check_segref(xd, segment_id, ALTREF_FRAME);
715 }
716
717 // If segment level coding of this signal is disabled...
718 // or the segment allows multiple reference frame options
719 if (!seg_ref_active || (seg_ref_count > 1)) {
720 // Values used in prediction model coding
721 unsigned char prediction_flag;
722 vp9_prob pred_prob;
723 MV_REFERENCE_FRAME pred_rf;
724
725 // Get the context probability the prediction flag
726 pred_prob = vp9_get_pred_prob(cm, xd, PRED_REF);
727
728 // Get the predicted value.
729 pred_rf = vp9_get_pred_ref(cm, xd);
730
731 // Did the chosen reference frame match its predicted value.
732 prediction_flag =
733 (xd->mode_info_context->mbmi.ref_frame == pred_rf);
734
735 vp9_set_pred_flag(xd, PRED_REF, prediction_flag);
736 vp9_write(bc, prediction_flag, pred_prob);
737
738 // If not predicted correctly then code value explicitly
739 if (!prediction_flag) {
740 vp9_prob mod_refprobs[PREDICTION_PROBS];
741
742 vpx_memcpy(mod_refprobs,
743 cm->mod_refprobs[pred_rf], sizeof(mod_refprobs));
744
745 // If segment coding enabled blank out options that cant occur by
746 // setting the branch probability to 0.
747 if (seg_ref_active) {
748 mod_refprobs[INTRA_FRAME] *=
749 vp9_check_segref(xd, segment_id, INTRA_FRAME);
750 mod_refprobs[LAST_FRAME] *=
751 vp9_check_segref(xd, segment_id, LAST_FRAME);
752 mod_refprobs[GOLDEN_FRAME] *=
753 (vp9_check_segref(xd, segment_id, GOLDEN_FRAME) *
754 vp9_check_segref(xd, segment_id, ALTREF_FRAME));
755 }
756
757 if (mod_refprobs[0]) {
758 vp9_write(bc, (rf != INTRA_FRAME), mod_refprobs[0]);
759 }
760
761 // Inter coded
762 if (rf != INTRA_FRAME) {
763 if (mod_refprobs[1]) {
764 vp9_write(bc, (rf != LAST_FRAME), mod_refprobs[1]);
765 }
766
767 if (rf != LAST_FRAME) {
768 if (mod_refprobs[2]) {
769 vp9_write(bc, (rf != GOLDEN_FRAME), mod_refprobs[2]);
770 }
771 }
772 }
773 }
774 }
775
776 // if using the prediction mdoel we have nothing further to do because
777 // the reference frame is fully coded by the segment
778 }
779
780 // Update the probabilities used to encode reference frame data
781 static void update_ref_probs(VP9_COMP *const cpi) {
782 VP9_COMMON *const cm = &cpi->common;
783
784 const int *const rfct = cpi->count_mb_ref_frame_usage;
785 const int rf_intra = rfct[INTRA_FRAME];
786 const int rf_inter = rfct[LAST_FRAME] +
787 rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME];
788
789 cm->prob_intra_coded = get_binary_prob(rf_intra, rf_inter);
790 cm->prob_last_coded = get_prob(rfct[LAST_FRAME], rf_inter);
791 cm->prob_gf_coded = get_binary_prob(rfct[GOLDEN_FRAME], rfct[ALTREF_FRAME]);
792
793 // Compute a modified set of probabilities to use when prediction of the
794 // reference frame fails
795 vp9_compute_mod_refprobs(cm);
796 }
797
798 static void pack_inter_mode_mvs(VP9_COMP *const cpi, vp9_writer *const bc) {
799 VP9_COMMON *const pc = &cpi->common;
800 const nmv_context *nmvc = &pc->fc.nmvc;
801 MACROBLOCKD *xd = &cpi->mb.e_mbd;
802 MODE_INFO *m;
803 MODE_INFO *prev_m;
804 TOKENEXTRA *tok = cpi->tok;
805 TOKENEXTRA *tok_end = tok + cpi->tok_count;
806
807 const int mis = pc->mode_info_stride;
808 int mb_row, mb_col;
809 int row, col;
810
811 // Values used in prediction model coding
812 vp9_prob pred_prob;
813 unsigned char prediction_flag;
814
815 int row_delta[4] = { 0, +1, 0, -1};
816 int col_delta[4] = { +1, -1, +1, +1};
817
818 cpi->mb.partition_info = cpi->mb.pi;
819
820 mb_row = 0;
821 for (row = 0; row < pc->mb_rows; row += 2) {
822 m = pc->mi + row * mis;
823 prev_m = pc->prev_mi + row * mis;
824
825 mb_col = 0;
826 for (col = 0; col < pc->mb_cols; col += 2) {
827 int i;
828
829 // Process the 4 MBs in the order:
830 // top-left, top-right, bottom-left, bottom-right
831 #if CONFIG_SUPERBLOCKS
832 vp9_write(bc, m->mbmi.encoded_as_sb, pc->sb_coded);
833 #endif
834 for (i = 0; i < 4; i++) {
835 MB_MODE_INFO *mi;
836 MV_REFERENCE_FRAME rf;
837 MV_REFERENCE_FRAME sec_ref_frame;
838 MB_PREDICTION_MODE mode;
839 int segment_id, skip_coeff;
840
841 int dy = row_delta[i];
842 int dx = col_delta[i];
843 int offset_extended = dy * mis + dx;
844
845 if ((mb_row >= pc->mb_rows) || (mb_col >= pc->mb_cols)) {
846 // MB lies outside frame, move on
847 mb_row += dy;
848 mb_col += dx;
849 m += offset_extended;
850 prev_m += offset_extended;
851 cpi->mb.partition_info += offset_extended;
852 continue;
853 }
854
855 mi = &m->mbmi;
856 rf = mi->ref_frame;
857 sec_ref_frame = mi->second_ref_frame;
858 mode = mi->mode;
859 segment_id = mi->segment_id;
860
861 // Distance of Mb to the various image edges.
862 // These specified to 8th pel as they are always compared to MV
863 // values that are in 1/8th pel units
864 xd->mb_to_left_edge = -((mb_col * 16) << 3);
865 xd->mb_to_top_edge = -((mb_row * 16)) << 3;
866
867 #if CONFIG_SUPERBLOCKS
868 if (mi->encoded_as_sb) {
869 xd->mb_to_right_edge = ((pc->mb_cols - 2 - mb_col) * 16) << 3;
870 xd->mb_to_bottom_edge = ((pc->mb_rows - 2 - mb_row) * 16) << 3;
871 } else {
872 #endif
873 xd->mb_to_right_edge = ((pc->mb_cols - 1 - mb_col) * 16) << 3;
874 xd->mb_to_bottom_edge = ((pc->mb_rows - 1 - mb_row) * 16) << 3;
875 #if CONFIG_SUPERBLOCKS
876 }
877 #endif
878
879 // Make sure the MacroBlockD mode info pointer is set correctly
880 xd->mode_info_context = m;
881 xd->prev_mode_info_context = prev_m;
882
883 #ifdef ENTROPY_STATS
884 active_section = 9;
885 #endif
886 if (cpi->mb.e_mbd.update_mb_segmentation_map) {
887 // Is temporal coding of the segment map enabled
888 if (pc->temporal_update) {
889 prediction_flag = vp9_get_pred_flag(xd, PRED_SEG_ID);
890 pred_prob = vp9_get_pred_prob(pc, xd, PRED_SEG_ID);
891
892 // Code the segment id prediction flag for this mb
893 vp9_write(bc, prediction_flag, pred_prob);
894
895 // If the mb segment id wasn't predicted code explicitly
896 if (!prediction_flag)
897 write_mb_segid(bc, mi, &cpi->mb.e_mbd);
898 } else {
899 // Normal unpredicted coding
900 write_mb_segid(bc, mi, &cpi->mb.e_mbd);
901 }
902 }
903
904 skip_coeff = 1;
905 if (pc->mb_no_coeff_skip &&
906 (!vp9_segfeature_active(xd, segment_id, SEG_LVL_EOB) ||
907 (vp9_get_segdata(xd, segment_id, SEG_LVL_EOB) != 0))) {
908 skip_coeff = mi->mb_skip_coeff;
909 #if CONFIG_SUPERBLOCKS
910 if (mi->encoded_as_sb) {
911 skip_coeff &= m[1].mbmi.mb_skip_coeff;
912 skip_coeff &= m[mis].mbmi.mb_skip_coeff;
913 skip_coeff &= m[mis + 1].mbmi.mb_skip_coeff;
914 }
915 #endif
916 vp9_write(bc, skip_coeff,
917 vp9_get_pred_prob(pc, xd, PRED_MBSKIP));
918 }
919
920 // Encode the reference frame.
921 if (!vp9_segfeature_active(xd, segment_id, SEG_LVL_MODE)
922 || vp9_get_segdata(xd, segment_id, SEG_LVL_MODE) >= NEARESTMV) {
923 encode_ref_frame(bc, pc, xd, segment_id, rf);
924 } else {
925 assert(rf == INTRA_FRAME);
926 }
927
928 if (rf == INTRA_FRAME) {
929 #ifdef ENTROPY_STATS
930 active_section = 6;
931 #endif
932
933 if (!vp9_segfeature_active(xd, segment_id, SEG_LVL_MODE)) {
934 #if CONFIG_SUPERBLOCKS
935 if (m->mbmi.encoded_as_sb)
936 write_sb_ymode(bc, mode, pc->fc.sb_ymode_prob);
937 else
938 #endif
939 write_ymode(bc, mode, pc->fc.ymode_prob);
940 }
941 if (mode == B_PRED) {
942 int j = 0;
943 #if CONFIG_COMP_INTRA_PRED
944 int uses_second =
945 m->bmi[0].as_mode.second !=
946 (B_PREDICTION_MODE)(B_DC_PRED - 1);
947 vp9_write(bc, uses_second, DEFAULT_COMP_INTRA_PROB);
948 #endif
949 do {
950 #if CONFIG_COMP_INTRA_PRED
951 B_PREDICTION_MODE mode2 = m->bmi[j].as_mode.second;
952 #endif
953 write_bmode(bc, m->bmi[j].as_mode.first,
954 pc->fc.bmode_prob);
955 #if CONFIG_COMP_INTRA_PRED
956 if (uses_second) {
957 write_bmode(bc, mode2, pc->fc.bmode_prob);
958 }
959 #endif
960 } while (++j < 16);
961 }
962 if (mode == I8X8_PRED) {
963 write_i8x8_mode(bc, m->bmi[0].as_mode.first,
964 pc->fc.i8x8_mode_prob);
965 write_i8x8_mode(bc, m->bmi[2].as_mode.first,
966 pc->fc.i8x8_mode_prob);
967 write_i8x8_mode(bc, m->bmi[8].as_mode.first,
968 pc->fc.i8x8_mode_prob);
969 write_i8x8_mode(bc, m->bmi[10].as_mode.first,
970 pc->fc.i8x8_mode_prob);
971 } else {
972 write_uv_mode(bc, mi->uv_mode,
973 pc->fc.uv_mode_prob[mode]);
974 }
975 } else {
976 int_mv best_mv, best_second_mv;
977
978 vp9_prob mv_ref_p [VP9_MVREFS - 1];
979
980 {
981 best_mv.as_int = mi->ref_mvs[rf][0].as_int;
982
983 vp9_mv_ref_probs(&cpi->common, mv_ref_p, mi->mb_mode_context[rf]);
984
985 #ifdef ENTROPY_STATS
986 accum_mv_refs(mode, ct);
987 #endif
988 }
989
990 #ifdef ENTROPY_STATS
991 active_section = 3;
992 #endif
993
994 // Is the segment coding of mode enabled
995 if (!vp9_segfeature_active(xd, segment_id, SEG_LVL_MODE)) {
996 #if CONFIG_SUPERBLOCKS
997 if (mi->encoded_as_sb) {
998 write_sb_mv_ref(bc, mode, mv_ref_p);
999 } else
1000 #endif
1001 {
1002 write_mv_ref(bc, mode, mv_ref_p);
1003 }
1004 vp9_accum_mv_refs(&cpi->common, mode, mi->mb_mode_context[rf]);
1005 }
1006
1007 #if CONFIG_PRED_FILTER
1008 // Is the prediction filter enabled
1009 if (mode >= NEARESTMV && mode < SPLITMV) {
1010 if (cpi->common.pred_filter_mode == 2)
1011 vp9_write(bc, mi->pred_filter_enabled,
1012 pc->prob_pred_filter_off);
1013 else
1014 assert(mi->pred_filter_enabled ==
1015 cpi->common.pred_filter_mode);
1016 }
1017 #endif
1018 if (mode >= NEARESTMV && mode <= SPLITMV)
1019 {
1020 if (cpi->common.mcomp_filter_type == SWITCHABLE) {
1021 write_token(bc, vp9_switchable_interp_tree,
1022 vp9_get_pred_probs(&cpi->common, xd,
1023 PRED_SWITCHABLE_INTERP),
1024 vp9_switchable_interp_encodings +
1025 vp9_switchable_interp_map[mi->interp_filter]);
1026 } else {
1027 assert (mi->interp_filter ==
1028 cpi->common.mcomp_filter_type);
1029 }
1030 }
1031
1032 if (mi->second_ref_frame > 0 &&
1033 (mode == NEWMV || mode == SPLITMV)) {
1034
1035 best_second_mv.as_int =
1036 mi->ref_mvs[mi->second_ref_frame][0].as_int;
1037 }
1038
1039 // does the feature use compound prediction or not
1040 // (if not specified at the frame/segment level)
1041 if (cpi->common.comp_pred_mode == HYBRID_PREDICTION) {
1042 vp9_write(bc, mi->second_ref_frame > INTRA_FRAME,
1043 vp9_get_pred_prob(pc, xd, PRED_COMP));
1044 }
1045 #if CONFIG_COMP_INTERINTRA_PRED
1046 if (cpi->common.use_interintra &&
1047 mode >= NEARESTMV && mode < SPLITMV &&
1048 mi->second_ref_frame <= INTRA_FRAME) {
1049 vp9_write(bc, mi->second_ref_frame == INTRA_FRAME,
1050 pc->fc.interintra_prob);
1051 // if (!cpi->dummy_packing)
1052 // printf("-- %d (%d)\n", mi->second_ref_frame == INTRA_FRAME,
1053 // pc->fc.interintra_prob);
1054 if (mi->second_ref_frame == INTRA_FRAME) {
1055 // if (!cpi->dummy_packing)
1056 // printf("** %d %d\n", mi->interintra_mode,
1057 // mi->interintra_uv_mode);
1058 write_ymode(bc, mi->interintra_mode, pc->fc.ymode_prob);
1059 #if SEPARATE_INTERINTRA_UV
1060 write_uv_mode(bc, mi->interintra_uv_mode,
1061 pc->fc.uv_mode_prob[mi->interintra_mode]);
1062 #endif
1063 }
1064 }
1065 #endif
1066
1067 {
1068 switch (mode) { /* new, split require MVs */
1069 case NEWMV:
1070 #ifdef ENTROPY_STATS
1071 active_section = 5;
1072 #endif
1073
1074 #if CONFIG_NEW_MVREF
1075 {
1076 unsigned int best_index;
1077
1078 // Choose the best mv reference
1079 /*
1080 best_index = pick_best_mv_ref(x, rf, mi->mv[0],
1081 mi->ref_mvs[rf], &best_mv);
1082 assert(best_index == mi->best_index);
1083 assert(best_mv.as_int == mi->best_mv.as_int);
1084 */
1085 best_index = mi->best_index;
1086 best_mv.as_int = mi->best_mv.as_int;
1087
1088 // Encode the index of the choice.
1089 vp9_write_mv_ref_id(bc,
1090 xd->mb_mv_ref_id_probs[rf], best_index);
1091
1092 cpi->best_ref_index_counts[rf][best_index]++;
1093
1094 }
1095 #endif
1096
1097 write_nmv(bc, &mi->mv[0].as_mv, &best_mv,
1098 (const nmv_context*) nmvc,
1099 xd->allow_high_precision_mv);
1100
1101 if (mi->second_ref_frame > 0) {
1102 #if CONFIG_NEW_MVREF
1103 unsigned int best_index;
1104 sec_ref_frame = mi->second_ref_frame;
1105
1106 /*
1107 best_index =
1108 pick_best_mv_ref(x, sec_ref_frame, mi->mv[1],
1109 mi->ref_mvs[sec_ref_frame],
1110 &best_second_mv);
1111 assert(best_index == mi->best_second_index);
1112 assert(best_second_mv.as_int == mi->best_second_mv.as_int);
1113 */
1114 best_index = mi->best_second_index;
1115 best_second_mv.as_int = mi->best_second_mv.as_int;
1116
1117 // Encode the index of the choice.
1118 vp9_write_mv_ref_id(bc,
1119 xd->mb_mv_ref_id_probs[sec_ref_frame],
1120 best_index);
1121
1122 cpi->best_ref_index_counts[sec_ref_frame][best_index]++;
1123 #endif
1124 write_nmv(bc, &mi->mv[1].as_mv, &best_second_mv,
1125 (const nmv_context*) nmvc,
1126 xd->allow_high_precision_mv);
1127 }
1128 break;
1129 case SPLITMV: {
1130 int j = 0;
1131
1132 #ifdef MODE_STATS
1133 ++count_mb_seg [mi->partitioning];
1134 #endif
1135
1136 write_split(bc, mi->partitioning, cpi->common.fc.mbsplit_prob);
1137 cpi->mbsplit_count[mi->partitioning]++;
1138
1139 do {
1140 B_PREDICTION_MODE blockmode;
1141 int_mv blockmv;
1142 const int *const L =
1143 vp9_mbsplits [mi->partitioning];
1144 int k = -1; /* first block in subset j */
1145 int mv_contz;
1146 int_mv leftmv, abovemv;
1147
1148 blockmode = cpi->mb.partition_info->bmi[j].mode;
1149 blockmv = cpi->mb.partition_info->bmi[j].mv;
1150 #if CONFIG_DEBUG
1151 while (j != L[++k])
1152 if (k >= 16)
1153 assert(0);
1154 #else
1155 while (j != L[++k]);
1156 #endif
1157 leftmv.as_int = left_block_mv(m, k);
1158 abovemv.as_int = above_block_mv(m, k, mis);
1159 mv_contz = vp9_mv_cont(&leftmv, &abovemv);
1160
1161 write_sub_mv_ref(bc, blockmode,
1162 cpi->common.fc.sub_mv_ref_prob [mv_contz]);
1163 cpi->sub_mv_ref_count[mv_contz][blockmode - LEFT4X4]++;
1164 if (blockmode == NEW4X4) {
1165 #ifdef ENTROPY_STATS
1166 active_section = 11;
1167 #endif
1168 write_nmv(bc, &blockmv.as_mv, &best_mv,
1169 (const nmv_context*) nmvc,
1170 xd->allow_high_precision_mv);
1171
1172 if (mi->second_ref_frame > 0) {
1173 write_nmv(bc,
1174 &cpi->mb.partition_info->bmi[j].second_mv.as_mv,
1175 &best_second_mv,
1176 (const nmv_context*) nmvc,
1177 xd->allow_high_precision_mv);
1178 }
1179 }
1180 } while (++j < cpi->mb.partition_info->count);
1181 }
1182 break;
1183 default:
1184 break;
1185 }
1186 }
1187 /* This is not required if the counts in cpi are consistent with the
1188 * final packing pass */
1189 // if (!cpi->dummy_packing)
1190 // vp9_update_nmv_count(cpi, x, &best_mv, &best_second_mv);
1191 }
1192
1193 if (((rf == INTRA_FRAME && mode <= I8X8_PRED) ||
1194 (rf != INTRA_FRAME && !(mode == SPLITMV &&
1195 mi->partitioning == PARTITIONING_4X4))) &&
1196 pc->txfm_mode == TX_MODE_SELECT &&
1197 !((pc->mb_no_coeff_skip && skip_coeff) ||
1198 (vp9_segfeature_active(xd, segment_id, SEG_LVL_EOB) &&
1199 vp9_get_segdata(xd, segment_id, SEG_LVL_EOB) == 0))) {
1200 TX_SIZE sz = mi->txfm_size;
1201 // FIXME(rbultje) code ternary symbol once all experiments are merged
1202 vp9_write(bc, sz != TX_4X4, pc->prob_tx[0]);
1203 if (sz != TX_4X4 && mode != I8X8_PRED && mode != SPLITMV)
1204 vp9_write(bc, sz != TX_8X8, pc->prob_tx[1]);
1205 }
1206
1207 #ifdef ENTROPY_STATS
1208 active_section = 1;
1209 #endif
1210 assert(tok < tok_end);
1211 pack_mb_tokens(bc, &tok, tok_end);
1212
1213 #if CONFIG_SUPERBLOCKS
1214 if (m->mbmi.encoded_as_sb) {
1215 assert(!i);
1216 mb_col += 2;
1217 m += 2;
1218 cpi->mb.partition_info += 2;
1219 prev_m += 2;
1220 break;
1221 }
1222 #endif
1223
1224 // Next MB
1225 mb_row += dy;
1226 mb_col += dx;
1227 m += offset_extended;
1228 prev_m += offset_extended;
1229 cpi->mb.partition_info += offset_extended;
1230 #if CONFIG_DEBUG
1231 assert((prev_m - cpi->common.prev_mip) == (m - cpi->common.mip));
1232 assert((prev_m - cpi->common.prev_mi) == (m - cpi->common.mi));
1233 #endif
1234 }
1235 }
1236
1237 // Next SB
1238 mb_row += 2;
1239 m += mis + (1 - (pc->mb_cols & 0x1));
1240 prev_m += mis + (1 - (pc->mb_cols & 0x1));
1241 cpi->mb.partition_info += mis + (1 - (pc->mb_cols & 0x1));
1242 }
1243 }
1244
1245
1246 static void write_mb_modes_kf(const VP9_COMMON *c,
1247 const MACROBLOCKD *xd,
1248 const MODE_INFO *m,
1249 int mode_info_stride,
1250 vp9_writer *const bc) {
1251 int ym;
1252 int segment_id;
1253
1254 ym = m->mbmi.mode;
1255 segment_id = m->mbmi.segment_id;
1256
1257 if (xd->update_mb_segmentation_map) {
1258 write_mb_segid(bc, &m->mbmi, xd);
1259 }
1260
1261 if (c->mb_no_coeff_skip &&
1262 (!vp9_segfeature_active(xd, segment_id, SEG_LVL_EOB) ||
1263 (vp9_get_segdata(xd, segment_id, SEG_LVL_EOB) != 0))) {
1264 int skip_coeff = m->mbmi.mb_skip_coeff;
1265 #if CONFIG_SUPERBLOCKS
1266 const int mis = mode_info_stride;
1267 if (m->mbmi.encoded_as_sb) {
1268 skip_coeff &= m[1].mbmi.mb_skip_coeff;
1269 skip_coeff &= m[mis].mbmi.mb_skip_coeff;
1270 skip_coeff &= m[mis + 1].mbmi.mb_skip_coeff;
1271 }
1272 #endif
1273 vp9_write(bc, skip_coeff,
1274 vp9_get_pred_prob(c, xd, PRED_MBSKIP));
1275 }
1276
1277 #if CONFIG_SUPERBLOCKS
1278 if (m->mbmi.encoded_as_sb) {
1279 sb_kfwrite_ymode(bc, ym,
1280 c->sb_kf_ymode_prob[c->kf_ymode_probs_index]);
1281 } else
1282 #endif
1283 {
1284 kfwrite_ymode(bc, ym,
1285 c->kf_ymode_prob[c->kf_ymode_probs_index]);
1286 }
1287
1288 if (ym == B_PRED) {
1289 const int mis = c->mode_info_stride;
1290 int i = 0;
1291 #if CONFIG_COMP_INTRA_PRED
1292 int uses_second =
1293 m->bmi[0].as_mode.second !=
1294 (B_PREDICTION_MODE)(B_DC_PRED - 1);
1295 vp9_write(bc, uses_second, DEFAULT_COMP_INTRA_PROB);
1296 #endif
1297 do {
1298 const B_PREDICTION_MODE A = above_block_mode(m, i, mis);
1299 const B_PREDICTION_MODE L = left_block_mode(m, i);
1300 const int bm = m->bmi[i].as_mode.first;
1301 #if CONFIG_COMP_INTRA_PRED
1302 const int bm2 = m->bmi[i].as_mode.second;
1303 #endif
1304
1305 #ifdef ENTROPY_STATS
1306 ++intra_mode_stats [A] [L] [bm];
1307 #endif
1308
1309 write_kf_bmode(bc, bm, c->kf_bmode_prob[A][L]);
1310 #if CONFIG_COMP_INTRA_PRED
1311 if (uses_second) {
1312 write_kf_bmode(bc, bm2, c->kf_bmode_prob[A][L]);
1313 }
1314 #endif
1315 } while (++i < 16);
1316 }
1317 if (ym == I8X8_PRED) {
1318 write_i8x8_mode(bc, m->bmi[0].as_mode.first,
1319 c->fc.i8x8_mode_prob);
1320 // printf(" mode: %d\n", m->bmi[0].as_mode.first); fflush(stdout);
1321 write_i8x8_mode(bc, m->bmi[2].as_mode.first,
1322 c->fc.i8x8_mode_prob);
1323 // printf(" mode: %d\n", m->bmi[2].as_mode.first); fflush(stdout);
1324 write_i8x8_mode(bc, m->bmi[8].as_mode.first,
1325 c->fc.i8x8_mode_prob);
1326 // printf(" mode: %d\n", m->bmi[8].as_mode.first); fflush(stdout);
1327 write_i8x8_mode(bc, m->bmi[10].as_mode.first,
1328 c->fc.i8x8_mode_prob);
1329 // printf(" mode: %d\n", m->bmi[10].as_mode.first); fflush(stdout);
1330 } else
1331 write_uv_mode(bc, m->mbmi.uv_mode, c->kf_uv_mode_prob[ym]);
1332
1333 if (ym <= I8X8_PRED && c->txfm_mode == TX_MODE_SELECT &&
1334 !((c->mb_no_coeff_skip && m->mbmi.mb_skip_coeff) ||
1335 (vp9_segfeature_active(xd, segment_id, SEG_LVL_EOB) &&
1336 vp9_get_segdata(xd, segment_id, SEG_LVL_EOB) == 0))) {
1337 TX_SIZE sz = m->mbmi.txfm_size;
1338 // FIXME(rbultje) code ternary symbol once all experiments are merged
1339 vp9_write(bc, sz != TX_4X4, c->prob_tx[0]);
1340 if (sz != TX_4X4 && ym <= TM_PRED)
1341 vp9_write(bc, sz != TX_8X8, c->prob_tx[1]);
1342 }
1343 }
1344
1345 static void write_kfmodes(VP9_COMP* const cpi, vp9_writer* const bc) {
1346 VP9_COMMON *const c = &cpi->common;
1347 const int mis = c->mode_info_stride;
1348 MACROBLOCKD *xd = &cpi->mb.e_mbd;
1349 MODE_INFO *m;
1350 int i;
1351 int row, col;
1352 int mb_row, mb_col;
1353 int row_delta[4] = { 0, +1, 0, -1};
1354 int col_delta[4] = { +1, -1, +1, +1};
1355 TOKENEXTRA *tok = cpi->tok;
1356 TOKENEXTRA *tok_end = tok + cpi->tok_count;
1357
1358 mb_row = 0;
1359 for (row = 0; row < c->mb_rows; row += 2) {
1360 m = c->mi + row * mis;
1361
1362 mb_col = 0;
1363 for (col = 0; col < c->mb_cols; col += 2) {
1364 #if CONFIG_SUPERBLOCKS
1365 vp9_write(bc, m->mbmi.encoded_as_sb, c->sb_coded);
1366 #endif
1367 // Process the 4 MBs in the order:
1368 // top-left, top-right, bottom-left, bottom-right
1369 for (i = 0; i < 4; i++) {
1370 int dy = row_delta[i];
1371 int dx = col_delta[i];
1372 int offset_extended = dy * mis + dx;
1373
1374 if ((mb_row >= c->mb_rows) || (mb_col >= c->mb_cols)) {
1375 // MB lies outside frame, move on
1376 mb_row += dy;
1377 mb_col += dx;
1378 m += offset_extended;
1379 continue;
1380 }
1381
1382 // Make sure the MacroBlockD mode info pointer is set correctly
1383 xd->mode_info_context = m;
1384
1385 write_mb_modes_kf(c, xd, m, mis, bc);
1386 #ifdef ENTROPY_STATS
1387 active_section = 8;
1388 #endif
1389 assert(tok < tok_end);
1390 pack_mb_tokens(bc, &tok, tok_end);
1391
1392 #if CONFIG_SUPERBLOCKS
1393 if (m->mbmi.encoded_as_sb) {
1394 assert(!i);
1395 mb_col += 2;
1396 m += 2;
1397 break;
1398 }
1399 #endif
1400 // Next MB
1401 mb_row += dy;
1402 mb_col += dx;
1403 m += offset_extended;
1404 }
1405 }
1406 mb_row += 2;
1407 }
1408 }
1409
1410
1411 /* This function is used for debugging probability trees. */
1412 static void print_prob_tree(vp9_prob
1413 coef_probs[BLOCK_TYPES][COEF_BANDS][PREV_COEF_CONTEX TS][ENTROPY_NODES]) {
1414 /* print coef probability tree */
1415 int i, j, k, l;
1416 FILE *f = fopen("enc_tree_probs.txt", "a");
1417 fprintf(f, "{\n");
1418 for (i = 0; i < BLOCK_TYPES; i++) {
1419 fprintf(f, " {\n");
1420 for (j = 0; j < COEF_BANDS; j++) {
1421 fprintf(f, " {\n");
1422 for (k = 0; k < PREV_COEF_CONTEXTS; k++) {
1423 fprintf(f, " {");
1424 for (l = 0; l < ENTROPY_NODES; l++) {
1425 fprintf(f, "%3u, ",
1426 (unsigned int)(coef_probs [i][j][k][l]));
1427 }
1428 fprintf(f, " }\n");
1429 }
1430 fprintf(f, " }\n");
1431 }
1432 fprintf(f, " }\n");
1433 }
1434 fprintf(f, "}\n");
1435 fclose(f);
1436 }
1437
1438 static void build_coeff_contexts(VP9_COMP *cpi) {
1439 int i = 0, j, k;
1440 #ifdef ENTROPY_STATS
1441 int t = 0;
1442 #endif
1443 for (i = 0; i < BLOCK_TYPES; ++i) {
1444 for (j = 0; j < COEF_BANDS; ++j) {
1445 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) {
1446 if (k >= 3 && ((i == 0 && j == 1) || (i > 0 && j == 0)))
1447 continue;
1448 vp9_tree_probs_from_distribution(
1449 MAX_ENTROPY_TOKENS, vp9_coef_encodings, vp9_coef_tree,
1450 cpi->frame_coef_probs [i][j][k],
1451 cpi->frame_branch_ct [i][j][k],
1452 cpi->coef_counts [i][j][k],
1453 256, 1
1454 );
1455 #ifdef ENTROPY_STATS
1456 if (!cpi->dummy_packing)
1457 for (t = 0; t < MAX_ENTROPY_TOKENS; ++t)
1458 context_counters[i][j][k][t] += cpi->coef_counts[i][j][k][t];
1459 #endif
1460 }
1461 }
1462 }
1463 for (i = 0; i < BLOCK_TYPES; ++i) {
1464 for (j = 0; j < COEF_BANDS; ++j) {
1465 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) {
1466 if (k >= 3 && ((i == 0 && j == 1) || (i > 0 && j == 0)))
1467 continue;
1468 vp9_tree_probs_from_distribution(
1469 MAX_ENTROPY_TOKENS, vp9_coef_encodings, vp9_coef_tree,
1470 cpi->frame_hybrid_coef_probs [i][j][k],
1471 cpi->frame_hybrid_branch_ct [i][j][k],
1472 cpi->hybrid_coef_counts [i][j][k],
1473 256, 1
1474 );
1475 #ifdef ENTROPY_STATS
1476 if (!cpi->dummy_packing)
1477 for (t = 0; t < MAX_ENTROPY_TOKENS; ++t)
1478 hybrid_context_counters[i][j][k][t] += cpi->hybrid_coef_counts[i][j] [k][t];
1479 #endif
1480 }
1481 }
1482 }
1483
1484 if (cpi->common.txfm_mode != ONLY_4X4) {
1485 for (i = 0; i < BLOCK_TYPES_8X8; ++i) {
1486 for (j = 0; j < COEF_BANDS; ++j) {
1487 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) {
1488 /* at every context */
1489 /* calc probs and branch cts for this frame only */
1490 // vp9_prob new_p [ENTROPY_NODES];
1491 // unsigned int branch_ct [ENTROPY_NODES] [2];
1492 if (k >= 3 && ((i == 0 && j == 1) || (i > 0 && j == 0)))
1493 continue;
1494 vp9_tree_probs_from_distribution(
1495 MAX_ENTROPY_TOKENS, vp9_coef_encodings, vp9_coef_tree,
1496 cpi->frame_coef_probs_8x8 [i][j][k],
1497 cpi->frame_branch_ct_8x8 [i][j][k],
1498 cpi->coef_counts_8x8 [i][j][k],
1499 256, 1
1500 );
1501 #ifdef ENTROPY_STATS
1502 if (!cpi->dummy_packing)
1503 for (t = 0; t < MAX_ENTROPY_TOKENS; ++t)
1504 context_counters_8x8[i][j][k][t] += cpi->coef_counts_8x8[i][j][k][ t];
1505 #endif
1506 }
1507 }
1508 }
1509 for (i = 0; i < BLOCK_TYPES_8X8; ++i) {
1510 for (j = 0; j < COEF_BANDS; ++j) {
1511 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) {
1512 /* at every context */
1513 /* calc probs and branch cts for this frame only */
1514 // vp9_prob new_p [ENTROPY_NODES];
1515 // unsigned int branch_ct [ENTROPY_NODES] [2];
1516 if (k >= 3 && ((i == 0 && j == 1) || (i > 0 && j == 0)))
1517 continue;
1518 vp9_tree_probs_from_distribution(
1519 MAX_ENTROPY_TOKENS, vp9_coef_encodings, vp9_coef_tree,
1520 cpi->frame_hybrid_coef_probs_8x8 [i][j][k],
1521 cpi->frame_hybrid_branch_ct_8x8 [i][j][k],
1522 cpi->hybrid_coef_counts_8x8 [i][j][k],
1523 256, 1
1524 );
1525 #ifdef ENTROPY_STATS
1526 if (!cpi->dummy_packing)
1527 for (t = 0; t < MAX_ENTROPY_TOKENS; ++t)
1528 hybrid_context_counters_8x8[i][j][k][t] += cpi->hybrid_coef_counts _8x8[i][j][k][t];
1529 #endif
1530 }
1531 }
1532 }
1533 }
1534
1535 if (cpi->common.txfm_mode > ALLOW_8X8) {
1536 for (i = 0; i < BLOCK_TYPES_16X16; ++i) {
1537 for (j = 0; j < COEF_BANDS; ++j) {
1538 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) {
1539 if (k >= 3 && ((i == 0 && j == 1) || (i > 0 && j == 0)))
1540 continue;
1541 vp9_tree_probs_from_distribution(
1542 MAX_ENTROPY_TOKENS, vp9_coef_encodings, vp9_coef_tree,
1543 cpi->frame_coef_probs_16x16[i][j][k],
1544 cpi->frame_branch_ct_16x16[i][j][k],
1545 cpi->coef_counts_16x16[i][j][k], 256, 1);
1546 #ifdef ENTROPY_STATS
1547 if (!cpi->dummy_packing)
1548 for (t = 0; t < MAX_ENTROPY_TOKENS; ++t)
1549 context_counters_16x16[i][j][k][t] += cpi->coef_counts_16x16[i][j] [k][t];
1550 #endif
1551 }
1552 }
1553 }
1554 }
1555 for (i = 0; i < BLOCK_TYPES_16X16; ++i) {
1556 for (j = 0; j < COEF_BANDS; ++j) {
1557 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) {
1558 if (k >= 3 && ((i == 0 && j == 1) || (i > 0 && j == 0)))
1559 continue;
1560 vp9_tree_probs_from_distribution(
1561 MAX_ENTROPY_TOKENS, vp9_coef_encodings, vp9_coef_tree,
1562 cpi->frame_hybrid_coef_probs_16x16[i][j][k],
1563 cpi->frame_hybrid_branch_ct_16x16[i][j][k],
1564 cpi->hybrid_coef_counts_16x16[i][j][k], 256, 1);
1565 #ifdef ENTROPY_STATS
1566 if (!cpi->dummy_packing)
1567 for (t = 0; t < MAX_ENTROPY_TOKENS; ++t)
1568 hybrid_context_counters_16x16[i][j][k][t] += cpi->hybrid_coef_counts _16x16[i][j][k][t];
1569 #endif
1570 }
1571 }
1572 }
1573 }
1574
1575 static void update_coef_probs_common(
1576 vp9_writer* const bc,
1577 vp9_prob new_frame_coef_probs[BLOCK_TYPES][COEF_BANDS]
1578 [PREV_COEF_CONTEXTS][ENTROPY_NODES],
1579 vp9_prob old_frame_coef_probs[BLOCK_TYPES][COEF_BANDS]
1580 [PREV_COEF_CONTEXTS][ENTROPY_NODES],
1581 unsigned int frame_branch_ct[BLOCK_TYPES][COEF_BANDS]
1582 [PREV_COEF_CONTEXTS][ENTROPY_NODES][2]) {
1583 int i, j, k, t;
1584 int update[2] = {0, 0};
1585 int savings;
1586 // vp9_prob bestupd = find_coef_update_prob(cpi);
1587
1588 /* dry run to see if there is any udpate at all needed */
1589 savings = 0;
1590 for (i = 0; i < BLOCK_TYPES; ++i) {
1591 for (j = !i; j < COEF_BANDS; ++j) {
1592 int prev_coef_savings[ENTROPY_NODES] = {0};
1593 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) {
1594 for (t = 0; t < ENTROPY_NODES; ++t) {
1595 vp9_prob newp = new_frame_coef_probs[i][j][k][t];
1596 const vp9_prob oldp = old_frame_coef_probs[i][j][k][t];
1597 const vp9_prob upd = COEF_UPDATE_PROB;
1598 int s = prev_coef_savings[t];
1599 int u = 0;
1600 if (k >= 3 && ((i == 0 && j == 1) || (i > 0 && j == 0)))
1601 continue;
1602 #if defined(SEARCH_NEWP)
1603 s = prob_diff_update_savings_search(
1604 frame_branch_ct[i][j][k][t],
1605 oldp, &newp, upd);
1606 if (s > 0 && newp != oldp)
1607 u = 1;
1608 if (u)
1609 savings += s - (int)(vp9_cost_zero(upd));
1610 else
1611 savings -= (int)(vp9_cost_zero(upd));
1612 #else
1613 s = prob_update_savings(
1614 frame_branch_ct[i][j][k][t],
1615 oldp, newp, upd);
1616 if (s > 0)
1617 u = 1;
1618 if (u)
1619 savings += s;
1620 #endif
1621
1622 update[u]++;
1623 }
1624 }
1625 }
1626 }
1627
1628 // printf("Update %d %d, savings %d\n", update[0], update[1], savings);
1629 /* Is coef updated at all */
1630 if (update[1] == 0 || savings < 0) {
1631 vp9_write_bit(bc, 0);
1632 } else {
1633 vp9_write_bit(bc, 1);
1634 for (i = 0; i < BLOCK_TYPES; ++i) {
1635 for (j = !i; j < COEF_BANDS; ++j) {
1636 int prev_coef_savings[ENTROPY_NODES] = {0};
1637 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) {
1638 // calc probs and branch cts for this frame only
1639 for (t = 0; t < ENTROPY_NODES; ++t) {
1640 vp9_prob newp = new_frame_coef_probs[i][j][k][t];
1641 vp9_prob *oldp = old_frame_coef_probs[i][j][k] + t;
1642 const vp9_prob upd = COEF_UPDATE_PROB;
1643 int s = prev_coef_savings[t];
1644 int u = 0;
1645 if (k >= 3 && ((i == 0 && j == 1) || (i > 0 && j == 0)))
1646 continue;
1647
1648 #if defined(SEARCH_NEWP)
1649 s = prob_diff_update_savings_search(
1650 frame_branch_ct[i][j][k][t],
1651 *oldp, &newp, upd);
1652 if (s > 0 && newp != *oldp)
1653 u = 1;
1654 #else
1655 s = prob_update_savings(
1656 frame_branch_ct[i][j][k][t],
1657 *oldp, newp, upd);
1658 if (s > 0)
1659 u = 1;
1660 #endif
1661 vp9_write(bc, u, upd);
1662 #ifdef ENTROPY_STATS
1663 if (!cpi->dummy_packing)
1664 ++ tree_update_hist [i][j][k][t] [u];
1665 #endif
1666 if (u) {
1667 /* send/use new probability */
1668 write_prob_diff_update(bc, newp, *oldp);
1669 *oldp = newp;
1670 }
1671 }
1672 }
1673 }
1674 }
1675 }
1676 }
1677
1678 static void update_coef_probs(VP9_COMP* const cpi, vp9_writer* const bc) {
1679 vp9_clear_system_state();
1680
1681 // Build the cofficient contexts based on counts collected in encode loop
1682 build_coeff_contexts(cpi);
1683
1684 update_coef_probs_common(bc,
1685 cpi->frame_coef_probs,
1686 cpi->common.fc.coef_probs,
1687 cpi->frame_branch_ct);
1688
1689 update_coef_probs_common(bc,
1690 cpi->frame_hybrid_coef_probs,
1691 cpi->common.fc.hybrid_coef_probs,
1692 cpi->frame_hybrid_branch_ct);
1693
1694 /* do not do this if not even allowed */
1695 if (cpi->common.txfm_mode != ONLY_4X4) {
1696 update_coef_probs_common(bc,
1697 cpi->frame_coef_probs_8x8,
1698 cpi->common.fc.coef_probs_8x8,
1699 cpi->frame_branch_ct_8x8);
1700
1701 update_coef_probs_common(bc,
1702 cpi->frame_hybrid_coef_probs_8x8,
1703 cpi->common.fc.hybrid_coef_probs_8x8,
1704 cpi->frame_hybrid_branch_ct_8x8);
1705 }
1706
1707 if (cpi->common.txfm_mode > ALLOW_8X8) {
1708 update_coef_probs_common(bc,
1709 cpi->frame_coef_probs_16x16,
1710 cpi->common.fc.coef_probs_16x16,
1711 cpi->frame_branch_ct_16x16);
1712 update_coef_probs_common(bc,
1713 cpi->frame_hybrid_coef_probs_16x16,
1714 cpi->common.fc.hybrid_coef_probs_16x16,
1715 cpi->frame_hybrid_branch_ct_16x16);
1716 }
1717 }
1718
1719 #ifdef PACKET_TESTING
1720 FILE *vpxlogc = 0;
1721 #endif
1722
1723 static void put_delta_q(vp9_writer *bc, int delta_q) {
1724 if (delta_q != 0) {
1725 vp9_write_bit(bc, 1);
1726 vp9_write_literal(bc, abs(delta_q), 4);
1727
1728 if (delta_q < 0)
1729 vp9_write_bit(bc, 1);
1730 else
1731 vp9_write_bit(bc, 0);
1732 } else
1733 vp9_write_bit(bc, 0);
1734 }
1735
1736 static void decide_kf_ymode_entropy(VP9_COMP *cpi) {
1737
1738 int mode_cost[MB_MODE_COUNT];
1739 int cost;
1740 int bestcost = INT_MAX;
1741 int bestindex = 0;
1742 int i, j;
1743
1744 for (i = 0; i < 8; i++) {
1745 vp9_cost_tokens(mode_cost, cpi->common.kf_ymode_prob[i], vp9_kf_ymode_tree);
1746 cost = 0;
1747 for (j = 0; j < VP9_YMODES; j++) {
1748 cost += mode_cost[j] * cpi->ymode_count[j];
1749 }
1750 #if CONFIG_SUPERBLOCKS
1751 vp9_cost_tokens(mode_cost, cpi->common.sb_kf_ymode_prob[i],
1752 vp9_sb_ymode_tree);
1753 for (j = 0; j < VP9_I32X32_MODES; j++) {
1754 cost += mode_cost[j] * cpi->sb_ymode_count[j];
1755 }
1756 #endif
1757 if (cost < bestcost) {
1758 bestindex = i;
1759 bestcost = cost;
1760 }
1761 }
1762 cpi->common.kf_ymode_probs_index = bestindex;
1763
1764 }
1765 static void segment_reference_frames(VP9_COMP *cpi) {
1766 VP9_COMMON *oci = &cpi->common;
1767 MODE_INFO *mi = oci->mi;
1768 int ref[MAX_MB_SEGMENTS] = {0};
1769 int i, j;
1770 int mb_index = 0;
1771 MACROBLOCKD *const xd = &cpi->mb.e_mbd;
1772
1773 for (i = 0; i < oci->mb_rows; i++) {
1774 for (j = 0; j < oci->mb_cols; j++, mb_index++) {
1775 ref[mi[mb_index].mbmi.segment_id] |= (1 << mi[mb_index].mbmi.ref_frame);
1776 }
1777 mb_index++;
1778 }
1779 for (i = 0; i < MAX_MB_SEGMENTS; i++) {
1780 vp9_enable_segfeature(xd, i, SEG_LVL_REF_FRAME);
1781 vp9_set_segdata(xd, i, SEG_LVL_REF_FRAME, ref[i]);
1782 }
1783 }
1784
1785 void vp9_pack_bitstream(VP9_COMP *cpi, unsigned char *dest,
1786 unsigned long *size) {
1787 int i, j;
1788 VP9_HEADER oh;
1789 VP9_COMMON *const pc = &cpi->common;
1790 vp9_writer header_bc, residual_bc;
1791 MACROBLOCKD *const xd = &cpi->mb.e_mbd;
1792 int extra_bytes_packed = 0;
1793
1794 unsigned char *cx_data = dest;
1795
1796 oh.show_frame = (int) pc->show_frame;
1797 oh.type = (int)pc->frame_type;
1798 oh.version = pc->version;
1799 oh.first_partition_length_in_bytes = 0;
1800
1801 cx_data += 3;
1802
1803 #if defined(SECTIONBITS_OUTPUT)
1804 Sectionbits[active_section = 1] += sizeof(VP9_HEADER) * 8 * 256;
1805 #endif
1806
1807 compute_update_table();
1808
1809 /* vp9_kf_default_bmode_probs() is called in vp9_setup_key_frame() once
1810 * for each K frame before encode frame. pc->kf_bmode_prob doesn't get
1811 * changed anywhere else. No need to call it again here. --yw
1812 * vp9_kf_default_bmode_probs( pc->kf_bmode_prob);
1813 */
1814
1815 /* every keyframe send startcode, width, height, scale factor, clamp
1816 * and color type.
1817 */
1818 if (oh.type == KEY_FRAME) {
1819 int v;
1820
1821 // Start / synch code
1822 cx_data[0] = 0x9D;
1823 cx_data[1] = 0x01;
1824 cx_data[2] = 0x2a;
1825
1826 v = (pc->horiz_scale << 14) | pc->Width;
1827 cx_data[3] = v;
1828 cx_data[4] = v >> 8;
1829
1830 v = (pc->vert_scale << 14) | pc->Height;
1831 cx_data[5] = v;
1832 cx_data[6] = v >> 8;
1833
1834 extra_bytes_packed = 7;
1835 cx_data += extra_bytes_packed;
1836
1837 vp9_start_encode(&header_bc, cx_data);
1838
1839 // signal clr type
1840 vp9_write_bit(&header_bc, pc->clr_type);
1841 vp9_write_bit(&header_bc, pc->clamp_type);
1842
1843 } else {
1844 vp9_start_encode(&header_bc, cx_data);
1845 }
1846
1847 // Signal whether or not Segmentation is enabled
1848 vp9_write_bit(&header_bc, (xd->segmentation_enabled) ? 1 : 0);
1849
1850 // Indicate which features are enabled
1851 if (xd->segmentation_enabled) {
1852 // Indicate whether or not the segmentation map is being updated.
1853 vp9_write_bit(&header_bc, (xd->update_mb_segmentation_map) ? 1 : 0);
1854
1855 // If it is, then indicate the method that will be used.
1856 if (xd->update_mb_segmentation_map) {
1857 // Select the coding strategy (temporal or spatial)
1858 vp9_choose_segmap_coding_method(cpi);
1859 // Send the tree probabilities used to decode unpredicted
1860 // macro-block segments
1861 for (i = 0; i < MB_FEATURE_TREE_PROBS; i++) {
1862 int data = xd->mb_segment_tree_probs[i];
1863
1864 if (data != 255) {
1865 vp9_write_bit(&header_bc, 1);
1866 vp9_write_literal(&header_bc, data, 8);
1867 } else {
1868 vp9_write_bit(&header_bc, 0);
1869 }
1870 }
1871
1872 // Write out the chosen coding method.
1873 vp9_write_bit(&header_bc, (pc->temporal_update) ? 1 : 0);
1874 if (pc->temporal_update) {
1875 for (i = 0; i < PREDICTION_PROBS; i++) {
1876 int data = pc->segment_pred_probs[i];
1877
1878 if (data != 255) {
1879 vp9_write_bit(&header_bc, 1);
1880 vp9_write_literal(&header_bc, data, 8);
1881 } else {
1882 vp9_write_bit(&header_bc, 0);
1883 }
1884 }
1885 }
1886 }
1887
1888 vp9_write_bit(&header_bc, (xd->update_mb_segmentation_data) ? 1 : 0);
1889
1890 // segment_reference_frames(cpi);
1891
1892 if (xd->update_mb_segmentation_data) {
1893 signed char Data;
1894
1895 vp9_write_bit(&header_bc, (xd->mb_segment_abs_delta) ? 1 : 0);
1896
1897 // For each segments id...
1898 for (i = 0; i < MAX_MB_SEGMENTS; i++) {
1899 // For each segmentation codable feature...
1900 for (j = 0; j < SEG_LVL_MAX; j++) {
1901 Data = vp9_get_segdata(xd, i, j);
1902
1903 // If the feature is enabled...
1904 if (vp9_segfeature_active(xd, i, j)) {
1905 vp9_write_bit(&header_bc, 1);
1906
1907 // Is the segment data signed..
1908 if (vp9_is_segfeature_signed(j)) {
1909 // Encode the relevant feature data
1910 if (Data < 0) {
1911 Data = - Data;
1912 vp9_encode_unsigned_max(&header_bc, Data,
1913 vp9_seg_feature_data_max(j));
1914 vp9_write_bit(&header_bc, 1);
1915 } else {
1916 vp9_encode_unsigned_max(&header_bc, Data,
1917 vp9_seg_feature_data_max(j));
1918 vp9_write_bit(&header_bc, 0);
1919 }
1920 }
1921 // Unsigned data element so no sign bit needed
1922 else
1923 vp9_encode_unsigned_max(&header_bc, Data,
1924 vp9_seg_feature_data_max(j));
1925 } else
1926 vp9_write_bit(&header_bc, 0);
1927 }
1928 }
1929 }
1930 }
1931
1932 // Encode the common prediction model status flag probability updates for
1933 // the reference frame
1934 update_refpred_stats(cpi);
1935 if (pc->frame_type != KEY_FRAME) {
1936 for (i = 0; i < PREDICTION_PROBS; i++) {
1937 if (cpi->ref_pred_probs_update[i]) {
1938 vp9_write_bit(&header_bc, 1);
1939 vp9_write_literal(&header_bc, pc->ref_pred_probs[i], 8);
1940 } else {
1941 vp9_write_bit(&header_bc, 0);
1942 }
1943 }
1944 }
1945
1946 #if CONFIG_SUPERBLOCKS
1947 {
1948 /* sb mode probability */
1949 const int sb_max = (((pc->mb_rows + 1) >> 1) * ((pc->mb_cols + 1) >> 1));
1950
1951 pc->sb_coded = get_prob(sb_max - cpi->sb_count, sb_max);
1952 vp9_write_literal(&header_bc, pc->sb_coded, 8);
1953 }
1954 #endif
1955
1956 {
1957 if (pc->txfm_mode == TX_MODE_SELECT) {
1958 pc->prob_tx[0] = get_prob(cpi->txfm_count[0] + cpi->txfm_count_8x8p[0],
1959 cpi->txfm_count[0] + cpi->txfm_count[1] + cpi->t xfm_count[2] +
1960 cpi->txfm_count_8x8p[0] + cpi->txfm_count_8x8p[1 ]);
1961 pc->prob_tx[1] = get_prob(cpi->txfm_count[1], cpi->txfm_count[1] + cpi->tx fm_count[2]);
1962 } else {
1963 pc->prob_tx[0] = 128;
1964 pc->prob_tx[1] = 128;
1965 }
1966 vp9_write_literal(&header_bc, pc->txfm_mode, 2);
1967 if (pc->txfm_mode == TX_MODE_SELECT) {
1968 vp9_write_literal(&header_bc, pc->prob_tx[0], 8);
1969 vp9_write_literal(&header_bc, pc->prob_tx[1], 8);
1970 }
1971 }
1972
1973 // Encode the loop filter level and type
1974 vp9_write_bit(&header_bc, pc->filter_type);
1975 vp9_write_literal(&header_bc, pc->filter_level, 6);
1976 vp9_write_literal(&header_bc, pc->sharpness_level, 3);
1977
1978 // Write out loop filter deltas applied at the MB level based on mode or ref f rame (if they are enabled).
1979 vp9_write_bit(&header_bc, (xd->mode_ref_lf_delta_enabled) ? 1 : 0);
1980
1981 if (xd->mode_ref_lf_delta_enabled) {
1982 // Do the deltas need to be updated
1983 int send_update = xd->mode_ref_lf_delta_update;
1984
1985 vp9_write_bit(&header_bc, send_update);
1986 if (send_update) {
1987 int Data;
1988
1989 // Send update
1990 for (i = 0; i < MAX_REF_LF_DELTAS; i++) {
1991 Data = xd->ref_lf_deltas[i];
1992
1993 // Frame level data
1994 if (xd->ref_lf_deltas[i] != xd->last_ref_lf_deltas[i]) {
1995 xd->last_ref_lf_deltas[i] = xd->ref_lf_deltas[i];
1996 vp9_write_bit(&header_bc, 1);
1997
1998 if (Data > 0) {
1999 vp9_write_literal(&header_bc, (Data & 0x3F), 6);
2000 vp9_write_bit(&header_bc, 0); // sign
2001 } else {
2002 Data = -Data;
2003 vp9_write_literal(&header_bc, (Data & 0x3F), 6);
2004 vp9_write_bit(&header_bc, 1); // sign
2005 }
2006 } else {
2007 vp9_write_bit(&header_bc, 0);
2008 }
2009 }
2010
2011 // Send update
2012 for (i = 0; i < MAX_MODE_LF_DELTAS; i++) {
2013 Data = xd->mode_lf_deltas[i];
2014
2015 if (xd->mode_lf_deltas[i] != xd->last_mode_lf_deltas[i]) {
2016 xd->last_mode_lf_deltas[i] = xd->mode_lf_deltas[i];
2017 vp9_write_bit(&header_bc, 1);
2018
2019 if (Data > 0) {
2020 vp9_write_literal(&header_bc, (Data & 0x3F), 6);
2021 vp9_write_bit(&header_bc, 0); // sign
2022 } else {
2023 Data = -Data;
2024 vp9_write_literal(&header_bc, (Data & 0x3F), 6);
2025 vp9_write_bit(&header_bc, 1); // sign
2026 }
2027 } else {
2028 vp9_write_bit(&header_bc, 0);
2029 }
2030 }
2031 }
2032 }
2033
2034 // signal here is multi token partition is enabled
2035 // vp9_write_literal(&header_bc, pc->multi_token_partition, 2);
2036 vp9_write_literal(&header_bc, 0, 2);
2037
2038 // Frame Q baseline quantizer index
2039 vp9_write_literal(&header_bc, pc->base_qindex, QINDEX_BITS);
2040
2041 // Transmit Dc, Second order and Uv quantizer delta information
2042 put_delta_q(&header_bc, pc->y1dc_delta_q);
2043 put_delta_q(&header_bc, pc->y2dc_delta_q);
2044 put_delta_q(&header_bc, pc->y2ac_delta_q);
2045 put_delta_q(&header_bc, pc->uvdc_delta_q);
2046 put_delta_q(&header_bc, pc->uvac_delta_q);
2047
2048 // When there is a key frame all reference buffers are updated using the new k ey frame
2049 if (pc->frame_type != KEY_FRAME) {
2050 // Should the GF or ARF be updated using the transmitted frame or buffer
2051 vp9_write_bit(&header_bc, pc->refresh_golden_frame);
2052 vp9_write_bit(&header_bc, pc->refresh_alt_ref_frame);
2053
2054 // For inter frames the current default behavior is that when
2055 // cm->refresh_golden_frame is set we copy the old GF over to
2056 // the ARF buffer. This is purely an encoder decision at present.
2057 if (pc->refresh_golden_frame)
2058 pc->copy_buffer_to_arf = 2;
2059
2060 // If not being updated from current frame should either GF or ARF be update d from another buffer
2061 if (!pc->refresh_golden_frame)
2062 vp9_write_literal(&header_bc, pc->copy_buffer_to_gf, 2);
2063
2064 if (!pc->refresh_alt_ref_frame)
2065 vp9_write_literal(&header_bc, pc->copy_buffer_to_arf, 2);
2066
2067 // Indicate reference frame sign bias for Golden and ARF frames (always 0 fo r last frame buffer)
2068 vp9_write_bit(&header_bc, pc->ref_frame_sign_bias[GOLDEN_FRAME]);
2069 vp9_write_bit(&header_bc, pc->ref_frame_sign_bias[ALTREF_FRAME]);
2070
2071 // Signal whether to allow high MV precision
2072 vp9_write_bit(&header_bc, (xd->allow_high_precision_mv) ? 1 : 0);
2073 if (pc->mcomp_filter_type == SWITCHABLE) {
2074 /* Check to see if only one of the filters is actually used */
2075 int count[VP9_SWITCHABLE_FILTERS];
2076 int i, j, c = 0;
2077 for (i = 0; i < VP9_SWITCHABLE_FILTERS; ++i) {
2078 count[i] = 0;
2079 for (j = 0; j <= VP9_SWITCHABLE_FILTERS; ++j) {
2080 count[i] += cpi->switchable_interp_count[j][i];
2081 }
2082 c += (count[i] > 0);
2083 }
2084 if (c == 1) {
2085 /* Only one filter is used. So set the filter at frame level */
2086 for (i = 0; i < VP9_SWITCHABLE_FILTERS; ++i) {
2087 if (count[i]) {
2088 pc->mcomp_filter_type = vp9_switchable_interp[i];
2089 break;
2090 }
2091 }
2092 }
2093 }
2094 // Signal the type of subpel filter to use
2095 vp9_write_bit(&header_bc, (pc->mcomp_filter_type == SWITCHABLE));
2096 if (pc->mcomp_filter_type != SWITCHABLE)
2097 vp9_write_literal(&header_bc, (pc->mcomp_filter_type), 2);
2098 #if CONFIG_COMP_INTERINTRA_PRED
2099 // printf("Counts: %d %d\n", cpi->interintra_count[0],
2100 // cpi->interintra_count[1]);
2101 if (!cpi->dummy_packing && pc->use_interintra)
2102 pc->use_interintra = (cpi->interintra_count[1] > 0);
2103 vp9_write_bit(&header_bc, pc->use_interintra);
2104 if (!pc->use_interintra)
2105 vp9_zero(cpi->interintra_count);
2106 #endif
2107 }
2108
2109 vp9_write_bit(&header_bc, pc->refresh_entropy_probs);
2110
2111 if (pc->frame_type != KEY_FRAME)
2112 vp9_write_bit(&header_bc, pc->refresh_last_frame);
2113
2114 #ifdef ENTROPY_STATS
2115 if (pc->frame_type == INTER_FRAME)
2116 active_section = 0;
2117 else
2118 active_section = 7;
2119 #endif
2120
2121 // If appropriate update the inter mode probability context and code the
2122 // changes in the bitstream.
2123 if ((pc->frame_type != KEY_FRAME)) {
2124 int i, j;
2125 int new_context[INTER_MODE_CONTEXTS][4];
2126 update_mode_probs(pc, new_context);
2127
2128 for (i = 0; i < INTER_MODE_CONTEXTS; i++) {
2129 for (j = 0; j < 4; j++) {
2130 if (new_context[i][j] != pc->fc.vp9_mode_contexts[i][j]) {
2131 vp9_write(&header_bc, 1, 252);
2132 vp9_write_literal(&header_bc, new_context[i][j], 8);
2133
2134 // Only update the persistent copy if this is the "real pack"
2135 if (!cpi->dummy_packing) {
2136 pc->fc.vp9_mode_contexts[i][j] = new_context[i][j];
2137 }
2138 } else {
2139 vp9_write(&header_bc, 0, 252);
2140 }
2141 }
2142 }
2143 }
2144
2145 vp9_clear_system_state(); // __asm emms;
2146
2147 vp9_copy(cpi->common.fc.pre_coef_probs, cpi->common.fc.coef_probs);
2148 vp9_copy(cpi->common.fc.pre_hybrid_coef_probs, cpi->common.fc.hybrid_coef_prob s);
2149 vp9_copy(cpi->common.fc.pre_coef_probs_8x8, cpi->common.fc.coef_probs_8x8);
2150 vp9_copy(cpi->common.fc.pre_hybrid_coef_probs_8x8, cpi->common.fc.hybrid_coef_ probs_8x8);
2151 vp9_copy(cpi->common.fc.pre_coef_probs_16x16, cpi->common.fc.coef_probs_16x16) ;
2152 vp9_copy(cpi->common.fc.pre_hybrid_coef_probs_16x16, cpi->common.fc.hybrid_coe f_probs_16x16);
2153 #if CONFIG_SUPERBLOCKS
2154 vp9_copy(cpi->common.fc.pre_sb_ymode_prob, cpi->common.fc.sb_ymode_prob);
2155 #endif
2156 vp9_copy(cpi->common.fc.pre_ymode_prob, cpi->common.fc.ymode_prob);
2157 vp9_copy(cpi->common.fc.pre_uv_mode_prob, cpi->common.fc.uv_mode_prob);
2158 vp9_copy(cpi->common.fc.pre_bmode_prob, cpi->common.fc.bmode_prob);
2159 vp9_copy(cpi->common.fc.pre_sub_mv_ref_prob, cpi->common.fc.sub_mv_ref_prob);
2160 vp9_copy(cpi->common.fc.pre_mbsplit_prob, cpi->common.fc.mbsplit_prob);
2161 vp9_copy(cpi->common.fc.pre_i8x8_mode_prob, cpi->common.fc.i8x8_mode_prob);
2162 cpi->common.fc.pre_nmvc = cpi->common.fc.nmvc;
2163 #if CONFIG_COMP_INTERINTRA_PRED
2164 cpi->common.fc.pre_interintra_prob = cpi->common.fc.interintra_prob;
2165 #endif
2166 vp9_zero(cpi->sub_mv_ref_count);
2167 vp9_zero(cpi->mbsplit_count);
2168 vp9_zero(cpi->common.fc.mv_ref_ct)
2169
2170 update_coef_probs(cpi, &header_bc);
2171
2172 #ifdef ENTROPY_STATS
2173 active_section = 2;
2174 #endif
2175
2176 // Write out the mb_no_coeff_skip flag
2177 vp9_write_bit(&header_bc, pc->mb_no_coeff_skip);
2178 if (pc->mb_no_coeff_skip) {
2179 int k;
2180
2181 vp9_update_skip_probs(cpi);
2182 for (k = 0; k < MBSKIP_CONTEXTS; ++k)
2183 vp9_write_literal(&header_bc, pc->mbskip_pred_probs[k], 8);
2184 }
2185
2186 if (pc->frame_type == KEY_FRAME) {
2187 if (!pc->kf_ymode_probs_update) {
2188 vp9_write_literal(&header_bc, pc->kf_ymode_probs_index, 3);
2189 }
2190 } else {
2191 // Update the probabilities used to encode reference frame data
2192 update_ref_probs(cpi);
2193
2194 #ifdef ENTROPY_STATS
2195 active_section = 1;
2196 #endif
2197
2198 #if CONFIG_PRED_FILTER
2199 // Write the prediction filter mode used for this frame
2200 vp9_write_literal(&header_bc, pc->pred_filter_mode, 2);
2201
2202 // Write prediction filter on/off probability if signaling at MB level
2203 if (pc->pred_filter_mode == 2)
2204 vp9_write_literal(&header_bc, pc->prob_pred_filter_off, 8);
2205
2206 #endif
2207 if (pc->mcomp_filter_type == SWITCHABLE)
2208 update_switchable_interp_probs(cpi, &header_bc);
2209
2210 #if CONFIG_COMP_INTERINTRA_PRED
2211 if (pc->use_interintra) {
2212 vp9_cond_prob_update(&header_bc,
2213 &pc->fc.interintra_prob,
2214 VP9_UPD_INTERINTRA_PROB,
2215 cpi->interintra_count);
2216 }
2217 #endif
2218
2219 vp9_write_literal(&header_bc, pc->prob_intra_coded, 8);
2220 vp9_write_literal(&header_bc, pc->prob_last_coded, 8);
2221 vp9_write_literal(&header_bc, pc->prob_gf_coded, 8);
2222
2223 {
2224 const int comp_pred_mode = cpi->common.comp_pred_mode;
2225 const int use_compound_pred = (comp_pred_mode != SINGLE_PREDICTION_ONLY);
2226 const int use_hybrid_pred = (comp_pred_mode == HYBRID_PREDICTION);
2227
2228 vp9_write(&header_bc, use_compound_pred, 128);
2229 if (use_compound_pred) {
2230 vp9_write(&header_bc, use_hybrid_pred, 128);
2231 if (use_hybrid_pred) {
2232 for (i = 0; i < COMP_PRED_CONTEXTS; i++) {
2233 pc->prob_comppred[i] = get_binary_prob(cpi->single_pred_count[i],
2234 cpi->comp_pred_count[i]);
2235 vp9_write_literal(&header_bc, pc->prob_comppred[i], 8);
2236 }
2237 }
2238 }
2239 }
2240 update_mbintra_mode_probs(cpi, &header_bc);
2241
2242 vp9_write_nmv_probs(cpi, xd->allow_high_precision_mv, &header_bc);
2243 }
2244
2245 vp9_stop_encode(&header_bc);
2246
2247 oh.first_partition_length_in_bytes = header_bc.pos;
2248
2249 /* update frame tag */
2250 {
2251 int v = (oh.first_partition_length_in_bytes << 5) |
2252 (oh.show_frame << 4) |
2253 (oh.version << 1) |
2254 oh.type;
2255
2256 dest[0] = v;
2257 dest[1] = v >> 8;
2258 dest[2] = v >> 16;
2259 }
2260
2261 *size = VP9_HEADER_SIZE + extra_bytes_packed + header_bc.pos;
2262 vp9_start_encode(&residual_bc, cx_data + header_bc.pos);
2263
2264 if (pc->frame_type == KEY_FRAME) {
2265 decide_kf_ymode_entropy(cpi);
2266 write_kfmodes(cpi, &residual_bc);
2267 } else {
2268 /* This is not required if the counts in cpi are consistent with the
2269 * final packing pass */
2270 // if (!cpi->dummy_packing) vp9_zero(cpi->NMVcount);
2271 pack_inter_mode_mvs(cpi, &residual_bc);
2272
2273 vp9_update_mode_context(&cpi->common);
2274 }
2275
2276 vp9_stop_encode(&residual_bc);
2277
2278 *size += residual_bc.pos;
2279 }
2280
2281 #ifdef ENTROPY_STATS
2282 void print_tree_update_probs() {
2283 int i, j, k, l;
2284 FILE *f = fopen("coefupdprob.h", "w");
2285 int Sum;
2286 fprintf(f, "\n/* Update probabilities for token entropy tree. */\n\n");
2287
2288 fprintf(f, "const vp9_prob\n"
2289 "vp9_coef_update_probs[BLOCK_TYPES]\n"
2290 " [COEF_BANDS]\n"
2291 " [PREV_COEF_CONTEXTS]\n"
2292 " [ENTROPY_NODES] = {\n");
2293 for (i = 0; i < BLOCK_TYPES; i++) {
2294 fprintf(f, " { \n");
2295 for (j = 0; j < COEF_BANDS; j++) {
2296 fprintf(f, " {\n");
2297 for (k = 0; k < PREV_COEF_CONTEXTS; k++) {
2298 fprintf(f, " {");
2299 for (l = 0; l < ENTROPY_NODES; l++) {
2300 fprintf(f, "%3ld, ",
2301 get_binary_prob(tree_update_hist[i][j][k][l][0],
2302 tree_update_hist[i][j][k][l][1]));
2303 }
2304 fprintf(f, "},\n");
2305 }
2306 fprintf(f, " },\n");
2307 }
2308 fprintf(f, " },\n");
2309 }
2310 fprintf(f, "};\n");
2311
2312 fprintf(f, "const vp9_prob\n"
2313 "vp9_coef_update_probs_8x8[BLOCK_TYPES_8X8]\n"
2314 " [COEF_BANDS]\n"
2315 " [PREV_COEF_CONTEXTS]\n"
2316 " [ENTROPY_NODES] = {\n");
2317 for (i = 0; i < BLOCK_TYPES_8X8; i++) {
2318 fprintf(f, " { \n");
2319 for (j = 0; j < COEF_BANDS; j++) {
2320 fprintf(f, " {\n");
2321 for (k = 0; k < PREV_COEF_CONTEXTS; k++) {
2322 fprintf(f, " {");
2323 for (l = 0; l < MAX_ENTROPY_TOKENS - 1; l++) {
2324 fprintf(f, "%3ld, ",
2325 get_binary_prob(tree_update_hist_8x8[i][j][k][l][0],
2326 tree_update_hist_8x8[i][j][k][l][1]));
2327 }
2328 fprintf(f, "},\n");
2329 }
2330 fprintf(f, " },\n");
2331 }
2332 fprintf(f, " },\n");
2333 }
2334
2335 fprintf(f, "const vp9_prob\n"
2336 "vp9_coef_update_probs_16x16[BLOCK_TYPES_16X16]\n"
2337 " [COEF_BANDS]\n"
2338 " [PREV_COEF_CONTEXTS]\n"
2339 " [ENTROPY_NODES] = {\n");
2340 for (i = 0; i < BLOCK_TYPES_16X16; i++) {
2341 fprintf(f, " { \n");
2342 for (j = 0; j < COEF_BANDS; j++) {
2343 fprintf(f, " {\n");
2344 for (k = 0; k < PREV_COEF_CONTEXTS; k++) {
2345 fprintf(f, " {");
2346 for (l = 0; l < MAX_ENTROPY_TOKENS - 1; l++) {
2347 fprintf(f, "%3ld, ",
2348 get_binary_prob(tree_update_hist_16x16[i][j][k][l][0],
2349 tree_update_hist_16x16[i][j][k][l][1]));
2350 }
2351 fprintf(f, "},\n");
2352 }
2353 fprintf(f, " },\n");
2354 }
2355 fprintf(f, " },\n");
2356 }
2357
2358 fclose(f);
2359 f = fopen("treeupdate.bin", "wb");
2360 fwrite(tree_update_hist, sizeof(tree_update_hist), 1, f);
2361 fwrite(tree_update_hist_8x8, sizeof(tree_update_hist_8x8), 1, f);
2362 fwrite(tree_update_hist_16x16, sizeof(tree_update_hist_16x16), 1, f);
2363 fclose(f);
2364 }
2365 #endif
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698