Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(47)

Side by Side Diff: source/libvpx/vp9/encoder/vp9_bitstream.c

Issue 11974002: libvpx: Pull from upstream (Closed) Base URL: svn://chrome-svn/chrome/trunk/deps/third_party/libvpx/
Patch Set: Created 7 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « source/libvpx/vp9/encoder/vp9_bitstream.h ('k') | source/libvpx/vp9/encoder/vp9_block.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved. 2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 11
12 #include "vp9/common/vp9_header.h" 12 #include "vp9/common/vp9_header.h"
13 #include "vp9/encoder/vp9_encodemv.h" 13 #include "vp9/encoder/vp9_encodemv.h"
14 #include "vp9/common/vp9_entropymode.h" 14 #include "vp9/common/vp9_entropymode.h"
15 #include "vp9/common/vp9_entropymv.h"
15 #include "vp9/common/vp9_findnearmv.h" 16 #include "vp9/common/vp9_findnearmv.h"
16 #include "vp9/encoder/vp9_mcomp.h" 17 #include "vp9/encoder/vp9_mcomp.h"
17 #include "vp9/common/vp9_systemdependent.h" 18 #include "vp9/common/vp9_systemdependent.h"
18 #include <assert.h> 19 #include <assert.h>
19 #include <stdio.h> 20 #include <stdio.h>
20 #include <limits.h> 21 #include <limits.h>
21 #include "vp9/common/vp9_pragmas.h" 22 #include "vp9/common/vp9_pragmas.h"
22 #include "vpx/vpx_encoder.h" 23 #include "vpx/vpx_encoder.h"
23 #include "vpx_mem/vpx_mem.h" 24 #include "vpx_mem/vpx_mem.h"
24 #include "vp9/encoder/vp9_bitstream.h" 25 #include "vp9/encoder/vp9_bitstream.h"
25 #include "vp9/encoder/vp9_segmentation.h" 26 #include "vp9/encoder/vp9_segmentation.h"
26 27
27 #include "vp9/common/vp9_seg_common.h" 28 #include "vp9/common/vp9_seg_common.h"
28 #include "vp9/common/vp9_pred_common.h" 29 #include "vp9/common/vp9_pred_common.h"
29 #include "vp9/common/vp9_entropy.h" 30 #include "vp9/common/vp9_entropy.h"
30 #include "vp9/encoder/vp9_encodemv.h" 31 #include "vp9/encoder/vp9_encodemv.h"
31 #include "vp9/common/vp9_entropymv.h" 32 #include "vp9/common/vp9_entropymv.h"
32 #include "vp9/common/vp9_mvref_common.h" 33 #include "vp9/common/vp9_mvref_common.h"
34 #include "vp9/common/vp9_treecoder.h"
33 35
34 #if defined(SECTIONBITS_OUTPUT) 36 #if defined(SECTIONBITS_OUTPUT)
35 unsigned __int64 Sectionbits[500]; 37 unsigned __int64 Sectionbits[500];
36 #endif 38 #endif
37 39
38 #ifdef ENTROPY_STATS 40 #ifdef ENTROPY_STATS
39 int intra_mode_stats[VP9_KF_BINTRAMODES] 41 int intra_mode_stats[VP9_KF_BINTRAMODES]
40 [VP9_KF_BINTRAMODES] 42 [VP9_KF_BINTRAMODES]
41 [VP9_KF_BINTRAMODES]; 43 [VP9_KF_BINTRAMODES];
42 unsigned int tree_update_hist [BLOCK_TYPES] 44 vp9_coeff_stats tree_update_hist_4x4[BLOCK_TYPES_4X4];
43 [COEF_BANDS] 45 vp9_coeff_stats hybrid_tree_update_hist_4x4[BLOCK_TYPES_4X4];
44 [PREV_COEF_CONTEXTS] 46 vp9_coeff_stats tree_update_hist_8x8[BLOCK_TYPES_8X8];
45 [ENTROPY_NODES][2]; 47 vp9_coeff_stats hybrid_tree_update_hist_8x8[BLOCK_TYPES_8X8];
46 unsigned int hybrid_tree_update_hist [BLOCK_TYPES] 48 vp9_coeff_stats tree_update_hist_16x16[BLOCK_TYPES_16X16];
47 [COEF_BANDS] 49 vp9_coeff_stats hybrid_tree_update_hist_16x16[BLOCK_TYPES_16X16];
48 [PREV_COEF_CONTEXTS] 50 vp9_coeff_stats tree_update_hist_32x32[BLOCK_TYPES_32X32];
49 [ENTROPY_NODES][2];
50 unsigned int tree_update_hist_8x8 [BLOCK_TYPES_8X8]
51 [COEF_BANDS]
52 [PREV_COEF_CONTEXTS]
53 [ENTROPY_NODES] [2];
54 unsigned int hybrid_tree_update_hist_8x8 [BLOCK_TYPES_8X8]
55 [COEF_BANDS]
56 [PREV_COEF_CONTEXTS]
57 [ENTROPY_NODES] [2];
58 unsigned int tree_update_hist_16x16 [BLOCK_TYPES_16X16]
59 [COEF_BANDS]
60 [PREV_COEF_CONTEXTS]
61 [ENTROPY_NODES] [2];
62 unsigned int hybrid_tree_update_hist_16x16 [BLOCK_TYPES_16X16]
63 [COEF_BANDS]
64 [PREV_COEF_CONTEXTS]
65 [ENTROPY_NODES] [2];
66 51
67 extern unsigned int active_section; 52 extern unsigned int active_section;
68 #endif 53 #endif
69 54
70 #ifdef MODE_STATS 55 #ifdef MODE_STATS
71 int count_mb_seg[4] = { 0, 0, 0, 0 }; 56 int count_mb_seg[4] = { 0, 0, 0, 0 };
72 #endif 57 #endif
73 58
74 #define vp9_cost_upd ((int)(vp9_cost_one(upd) - vp9_cost_zero(upd)) >> 8) 59 #define vp9_cost_upd ((int)(vp9_cost_one(upd) - vp9_cost_zero(upd)) >> 8)
75 #define vp9_cost_upd256 ((int)(vp9_cost_one(upd) - vp9_cost_zero(upd))) 60 #define vp9_cost_upd256 ((int)(vp9_cost_one(upd) - vp9_cost_zero(upd)))
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
120 vp9_token tok [/* n */], 105 vp9_token tok [/* n */],
121 vp9_tree tree, 106 vp9_tree tree,
122 vp9_prob Pnew [/* n-1 */], 107 vp9_prob Pnew [/* n-1 */],
123 vp9_prob Pcur [/* n-1 */], 108 vp9_prob Pcur [/* n-1 */],
124 unsigned int bct [/* n-1 */] [2], 109 unsigned int bct [/* n-1 */] [2],
125 const unsigned int num_events[/* n */] 110 const unsigned int num_events[/* n */]
126 ) { 111 ) {
127 unsigned int new_b = 0, old_b = 0; 112 unsigned int new_b = 0, old_b = 0;
128 int i = 0; 113 int i = 0;
129 114
130 vp9_tree_probs_from_distribution( 115 vp9_tree_probs_from_distribution(n--, tok, tree,
131 n--, tok, tree, 116 Pnew, bct, num_events);
132 Pnew, bct, num_events,
133 256, 1
134 );
135 117
136 do { 118 do {
137 new_b += cost_branch(bct[i], Pnew[i]); 119 new_b += cost_branch(bct[i], Pnew[i]);
138 old_b += cost_branch(bct[i], Pcur[i]); 120 old_b += cost_branch(bct[i], Pcur[i]);
139 } while (++i < n); 121 } while (++i < n);
140 122
141 if (new_b + (n << 8) < old_b) { 123 if (new_b + (n << 8) < old_b) {
142 int i = 0; 124 int i = 0;
143 125
144 vp9_write_bit(bc, 1); 126 vp9_write_bit(bc, 1);
(...skipping 12 matching lines...) Expand all
157 VP9_COMMON *const cm = &cpi->common; 139 VP9_COMMON *const cm = &cpi->common;
158 140
159 { 141 {
160 vp9_prob Pnew [VP9_YMODES - 1]; 142 vp9_prob Pnew [VP9_YMODES - 1];
161 unsigned int bct [VP9_YMODES - 1] [2]; 143 unsigned int bct [VP9_YMODES - 1] [2];
162 144
163 update_mode( 145 update_mode(
164 bc, VP9_YMODES, vp9_ymode_encodings, vp9_ymode_tree, 146 bc, VP9_YMODES, vp9_ymode_encodings, vp9_ymode_tree,
165 Pnew, cm->fc.ymode_prob, bct, (unsigned int *)cpi->ymode_count 147 Pnew, cm->fc.ymode_prob, bct, (unsigned int *)cpi->ymode_count
166 ); 148 );
167 #if CONFIG_SUPERBLOCKS
168 update_mode(bc, VP9_I32X32_MODES, vp9_sb_ymode_encodings, 149 update_mode(bc, VP9_I32X32_MODES, vp9_sb_ymode_encodings,
169 vp9_sb_ymode_tree, Pnew, cm->fc.sb_ymode_prob, bct, 150 vp9_sb_ymode_tree, Pnew, cm->fc.sb_ymode_prob, bct,
170 (unsigned int *)cpi->sb_ymode_count); 151 (unsigned int *)cpi->sb_ymode_count);
171 #endif
172 } 152 }
173 } 153 }
174 154
175 static int get_prob(int num, int den) {
176 int p;
177 if (den <= 0)
178 return 128;
179 p = (num * 255 + (den >> 1)) / den;
180 return clip_prob(p);
181 }
182
183 static int get_binary_prob(int n0, int n1) {
184 return get_prob(n0, n0 + n1);
185 }
186
187 void vp9_update_skip_probs(VP9_COMP *cpi) { 155 void vp9_update_skip_probs(VP9_COMP *cpi) {
188 VP9_COMMON *const pc = &cpi->common; 156 VP9_COMMON *const pc = &cpi->common;
189 int k; 157 int k;
190 158
191 for (k = 0; k < MBSKIP_CONTEXTS; ++k) { 159 for (k = 0; k < MBSKIP_CONTEXTS; ++k) {
192 pc->mbskip_pred_probs[k] = get_binary_prob(cpi->skip_false_count[k], 160 pc->mbskip_pred_probs[k] = get_binary_prob(cpi->skip_false_count[k],
193 cpi->skip_true_count[k]); 161 cpi->skip_true_count[k]);
194 } 162 }
195 } 163 }
196 164
197 static void update_switchable_interp_probs(VP9_COMP *cpi, 165 static void update_switchable_interp_probs(VP9_COMP *cpi,
198 vp9_writer* const bc) { 166 vp9_writer* const bc) {
199 VP9_COMMON *const pc = &cpi->common; 167 VP9_COMMON *const pc = &cpi->common;
200 unsigned int branch_ct[32][2]; 168 unsigned int branch_ct[32][2];
201 int i, j; 169 int i, j;
202 for (j = 0; j <= VP9_SWITCHABLE_FILTERS; ++j) { 170 for (j = 0; j <= VP9_SWITCHABLE_FILTERS; ++j) {
203 vp9_tree_probs_from_distribution( 171 vp9_tree_probs_from_distribution(
204 VP9_SWITCHABLE_FILTERS, 172 VP9_SWITCHABLE_FILTERS,
205 vp9_switchable_interp_encodings, vp9_switchable_interp_tree, 173 vp9_switchable_interp_encodings, vp9_switchable_interp_tree,
206 pc->fc.switchable_interp_prob[j], branch_ct, 174 pc->fc.switchable_interp_prob[j], branch_ct,
207 cpi->switchable_interp_count[j], 256, 1); 175 cpi->switchable_interp_count[j]);
208 for (i = 0; i < VP9_SWITCHABLE_FILTERS - 1; ++i) { 176 for (i = 0; i < VP9_SWITCHABLE_FILTERS - 1; ++i) {
209 if (pc->fc.switchable_interp_prob[j][i] < 1) 177 if (pc->fc.switchable_interp_prob[j][i] < 1)
210 pc->fc.switchable_interp_prob[j][i] = 1; 178 pc->fc.switchable_interp_prob[j][i] = 1;
211 vp9_write_literal(bc, pc->fc.switchable_interp_prob[j][i], 8); 179 vp9_write_literal(bc, pc->fc.switchable_interp_prob[j][i], 8);
212 } 180 }
213 } 181 }
214 } 182 }
215 183
216 // This function updates the reference frame prediction stats 184 // This function updates the reference frame prediction stats
217 static void update_refpred_stats(VP9_COMP *cpi) { 185 static void update_refpred_stats(VP9_COMP *cpi) {
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
267 int i, j; 235 int i, j;
268 unsigned int (*mv_ref_ct)[4][2]; 236 unsigned int (*mv_ref_ct)[4][2];
269 237
270 vpx_memcpy(mode_context, cm->fc.vp9_mode_contexts, 238 vpx_memcpy(mode_context, cm->fc.vp9_mode_contexts,
271 sizeof(cm->fc.vp9_mode_contexts)); 239 sizeof(cm->fc.vp9_mode_contexts));
272 240
273 mv_ref_ct = cm->fc.mv_ref_ct; 241 mv_ref_ct = cm->fc.mv_ref_ct;
274 242
275 for (i = 0; i < INTER_MODE_CONTEXTS; i++) { 243 for (i = 0; i < INTER_MODE_CONTEXTS; i++) {
276 for (j = 0; j < 4; j++) { 244 for (j = 0; j < 4; j++) {
277 int new_prob, count, old_cost, new_cost; 245 int new_prob, old_cost, new_cost;
278 246
279 // Work out cost of coding branches with the old and optimal probability 247 // Work out cost of coding branches with the old and optimal probability
280 old_cost = cost_branch256(mv_ref_ct[i][j], mode_context[i][j]); 248 old_cost = cost_branch256(mv_ref_ct[i][j], mode_context[i][j]);
281 count = mv_ref_ct[i][j][0] + mv_ref_ct[i][j][1]; 249 new_prob = get_binary_prob(mv_ref_ct[i][j][0], mv_ref_ct[i][j][1]);
282 new_prob = count > 0 ? (255 * mv_ref_ct[i][j][0]) / count : 128;
283 new_prob = (new_prob > 0) ? new_prob : 1;
284 new_cost = cost_branch256(mv_ref_ct[i][j], new_prob); 250 new_cost = cost_branch256(mv_ref_ct[i][j], new_prob);
285 251
286 // If cost saving is >= 14 bits then update the mode probability. 252 // If cost saving is >= 14 bits then update the mode probability.
287 // This is the approximate net cost of updating one probability given 253 // This is the approximate net cost of updating one probability given
288 // that the no update case ismuch more common than the update case. 254 // that the no update case ismuch more common than the update case.
289 if (new_cost <= (old_cost - (14 << 8))) { 255 if (new_cost <= (old_cost - (14 << 8))) {
290 mode_context[i][j] = new_prob; 256 mode_context[i][j] = new_prob;
291 } 257 }
292 } 258 }
293 } 259 }
294 } 260 }
261
262 #if CONFIG_NEW_MVREF
263 static void update_mv_ref_probs(VP9_COMP *cpi,
264 int mvref_probs[MAX_REF_FRAMES]
265 [MAX_MV_REF_CANDIDATES-1]) {
266 MACROBLOCKD *xd = &cpi->mb.e_mbd;
267 int rf; // Reference frame
268 int ref_c; // Motion reference candidate
269 int node; // Probability node index
270
271 for (rf = 0; rf < MAX_REF_FRAMES; ++rf) {
272 int count = 0;
273
274 // Skip the dummy entry for intra ref frame.
275 if (rf == INTRA_FRAME) {
276 continue;
277 }
278
279 // Sum the counts for all candidates
280 for (ref_c = 0; ref_c < MAX_MV_REF_CANDIDATES; ++ref_c) {
281 count += cpi->mb_mv_ref_count[rf][ref_c];
282 }
283
284 // Calculate the tree node probabilities
285 for (node = 0; node < MAX_MV_REF_CANDIDATES-1; ++node) {
286 int new_prob, old_cost, new_cost;
287 unsigned int branch_cnts[2];
288
289 // How many hits on each branch at this node
290 branch_cnts[0] = cpi->mb_mv_ref_count[rf][node];
291 branch_cnts[1] = count - cpi->mb_mv_ref_count[rf][node];
292
293 // Work out cost of coding branches with the old and optimal probability
294 old_cost = cost_branch256(branch_cnts, xd->mb_mv_ref_probs[rf][node]);
295 new_prob = get_prob(branch_cnts[0], count);
296 new_cost = cost_branch256(branch_cnts, new_prob);
297
298 // Take current 0 branch cases out of residual count
299 count -= cpi->mb_mv_ref_count[rf][node];
300
301 if ((new_cost + VP9_MV_REF_UPDATE_COST) <= old_cost) {
302 mvref_probs[rf][node] = new_prob;
303 } else {
304 mvref_probs[rf][node] = xd->mb_mv_ref_probs[rf][node];
305 }
306 }
307 }
308 }
309 #endif
310
295 static void write_ymode(vp9_writer *bc, int m, const vp9_prob *p) { 311 static void write_ymode(vp9_writer *bc, int m, const vp9_prob *p) {
296 write_token(bc, vp9_ymode_tree, p, vp9_ymode_encodings + m); 312 write_token(bc, vp9_ymode_tree, p, vp9_ymode_encodings + m);
297 } 313 }
298 314
299 static void kfwrite_ymode(vp9_writer *bc, int m, const vp9_prob *p) { 315 static void kfwrite_ymode(vp9_writer *bc, int m, const vp9_prob *p) {
300 write_token(bc, vp9_kf_ymode_tree, p, vp9_kf_ymode_encodings + m); 316 write_token(bc, vp9_kf_ymode_tree, p, vp9_kf_ymode_encodings + m);
301 } 317 }
302 318
303 #if CONFIG_SUPERBLOCKS
304 static void write_sb_ymode(vp9_writer *bc, int m, const vp9_prob *p) { 319 static void write_sb_ymode(vp9_writer *bc, int m, const vp9_prob *p) {
305 write_token(bc, vp9_sb_ymode_tree, p, vp9_sb_ymode_encodings + m); 320 write_token(bc, vp9_sb_ymode_tree, p, vp9_sb_ymode_encodings + m);
306 } 321 }
307 322
308 static void sb_kfwrite_ymode(vp9_writer *bc, int m, const vp9_prob *p) { 323 static void sb_kfwrite_ymode(vp9_writer *bc, int m, const vp9_prob *p) {
309 write_token(bc, vp9_uv_mode_tree, p, vp9_sb_kf_ymode_encodings + m); 324 write_token(bc, vp9_uv_mode_tree, p, vp9_sb_kf_ymode_encodings + m);
310 } 325 }
311 #endif
312 326
313 static void write_i8x8_mode(vp9_writer *bc, int m, const vp9_prob *p) { 327 static void write_i8x8_mode(vp9_writer *bc, int m, const vp9_prob *p) {
314 write_token(bc, vp9_i8x8_mode_tree, p, vp9_i8x8_mode_encodings + m); 328 write_token(bc, vp9_i8x8_mode_tree, p, vp9_i8x8_mode_encodings + m);
315 } 329 }
316 330
317 static void write_uv_mode(vp9_writer *bc, int m, const vp9_prob *p) { 331 static void write_uv_mode(vp9_writer *bc, int m, const vp9_prob *p) {
318 write_token(bc, vp9_uv_mode_tree, p, vp9_uv_mode_encodings + m); 332 write_token(bc, vp9_uv_mode_tree, p, vp9_uv_mode_encodings + m);
319 } 333 }
320 334
321 335
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
390 vp9_write_literal(bc, newp, 8); 404 vp9_write_literal(bc, newp, 8);
391 *oldp = newp; 405 *oldp = newp;
392 } else { 406 } else {
393 vp9_write(bc, 0, upd); 407 vp9_write(bc, 0, upd);
394 } 408 }
395 } 409 }
396 410
397 static void pack_mb_tokens(vp9_writer* const bc, 411 static void pack_mb_tokens(vp9_writer* const bc,
398 TOKENEXTRA **tp, 412 TOKENEXTRA **tp,
399 const TOKENEXTRA *const stop) { 413 const TOKENEXTRA *const stop) {
400 unsigned int split;
401 unsigned int shift;
402 int count = bc->count;
403 unsigned int range = bc->range;
404 unsigned int lowvalue = bc->lowvalue;
405 TOKENEXTRA *p = *tp; 414 TOKENEXTRA *p = *tp;
406 415
407 while (p < stop) { 416 while (p < stop) {
408 const int t = p->Token; 417 const int t = p->Token;
409 vp9_token *const a = vp9_coef_encodings + t; 418 vp9_token *const a = vp9_coef_encodings + t;
410 const vp9_extra_bit_struct *const b = vp9_extra_bits + t; 419 const vp9_extra_bit_struct *const b = vp9_extra_bits + t;
411 int i = 0; 420 int i = 0;
412 const unsigned char *pp = p->context_tree; 421 const unsigned char *pp = p->context_tree;
413 int v = a->value; 422 int v = a->value;
414 int n = a->Len; 423 int n = a->Len;
415 424
416 if (t == EOSB_TOKEN) 425 if (t == EOSB_TOKEN)
417 { 426 {
418 ++p; 427 ++p;
419 break; 428 break;
420 } 429 }
421 430
422 /* skip one or two nodes */ 431 /* skip one or two nodes */
423 if (p->skip_eob_node) { 432 if (p->skip_eob_node) {
424 n -= p->skip_eob_node; 433 n -= p->skip_eob_node;
425 i = 2 * p->skip_eob_node; 434 i = 2 * p->skip_eob_node;
426 } 435 }
427 436
428 do { 437 do {
429 const int bb = (v >> --n) & 1; 438 const int bb = (v >> --n) & 1;
430 split = 1 + (((range - 1) * pp[i >> 1]) >> 8); 439 encode_bool(bc, bb, pp[i >> 1]);
431 i = vp9_coef_tree[i + bb]; 440 i = vp9_coef_tree[i + bb];
432
433 if (bb) {
434 lowvalue += split;
435 range = range - split;
436 } else {
437 range = split;
438 }
439
440 shift = vp9_norm[range];
441 range <<= shift;
442 count += shift;
443
444 if (count >= 0) {
445 int offset = shift - count;
446
447 if ((lowvalue << (offset - 1)) & 0x80000000) {
448 int x = bc->pos - 1;
449
450 while (x >= 0 && bc->buffer[x] == 0xff) {
451 bc->buffer[x] = (unsigned char)0;
452 x--;
453 }
454
455 bc->buffer[x] += 1;
456 }
457
458 bc->buffer[bc->pos++] = (lowvalue >> (24 - offset));
459 lowvalue <<= offset;
460 shift = count;
461 lowvalue &= 0xffffff;
462 count -= 8;
463 }
464
465 lowvalue <<= shift;
466 } while (n); 441 } while (n);
467 442
468 443
469 if (b->base_val) { 444 if (b->base_val) {
470 const int e = p->Extra, L = b->Len; 445 const int e = p->Extra, L = b->Len;
471 446
472 if (L) { 447 if (L) {
473 const unsigned char *pp = b->prob; 448 const unsigned char *pp = b->prob;
474 int v = e >> 1; 449 int v = e >> 1;
475 int n = L; /* number of bits in v, assumed nonzero */ 450 int n = L; /* number of bits in v, assumed nonzero */
476 int i = 0; 451 int i = 0;
477 452
478 do { 453 do {
479 const int bb = (v >> --n) & 1; 454 const int bb = (v >> --n) & 1;
480 split = 1 + (((range - 1) * pp[i >> 1]) >> 8); 455 encode_bool(bc, bb, pp[i >> 1]);
481 i = b->tree[i + bb]; 456 i = b->tree[i + bb];
482
483 if (bb) {
484 lowvalue += split;
485 range = range - split;
486 } else {
487 range = split;
488 }
489
490 shift = vp9_norm[range];
491 range <<= shift;
492 count += shift;
493
494 if (count >= 0) {
495 int offset = shift - count;
496
497 if ((lowvalue << (offset - 1)) & 0x80000000) {
498 int x = bc->pos - 1;
499
500 while (x >= 0 && bc->buffer[x] == 0xff) {
501 bc->buffer[x] = (unsigned char)0;
502 x--;
503 }
504
505 bc->buffer[x] += 1;
506 }
507
508 bc->buffer[bc->pos++] = (lowvalue >> (24 - offset));
509 lowvalue <<= offset;
510 shift = count;
511 lowvalue &= 0xffffff;
512 count -= 8;
513 }
514
515 lowvalue <<= shift;
516 } while (n); 457 } while (n);
517 } 458 }
518 459
519 460 encode_bool(bc, e & 1, 128);
520 {
521
522 split = (range + 1) >> 1;
523
524 if (e & 1) {
525 lowvalue += split;
526 range = range - split;
527 } else {
528 range = split;
529 }
530
531 range <<= 1;
532
533 if ((lowvalue & 0x80000000)) {
534 int x = bc->pos - 1;
535
536 while (x >= 0 && bc->buffer[x] == 0xff) {
537 bc->buffer[x] = (unsigned char)0;
538 x--;
539 }
540
541 bc->buffer[x] += 1;
542
543 }
544
545 lowvalue <<= 1;
546
547 if (!++count) {
548 count = -8;
549 bc->buffer[bc->pos++] = (lowvalue >> 24);
550 lowvalue &= 0xffffff;
551 }
552 }
553
554 } 461 }
555 ++p; 462 ++p;
556 } 463 }
557 464
558 bc->count = count;
559 bc->lowvalue = lowvalue;
560 bc->range = range;
561 *tp = p; 465 *tp = p;
562 } 466 }
563 467
564 static void write_partition_size(unsigned char *cx_data, int size) { 468 static void write_partition_size(unsigned char *cx_data, int size) {
565 signed char csize; 469 signed char csize;
566 470
567 csize = size & 0xff; 471 csize = size & 0xff;
568 *cx_data = csize; 472 *cx_data = csize;
569 csize = (size >> 8) & 0xff; 473 csize = (size >> 8) & 0xff;
570 *(cx_data + 1) = csize; 474 *(cx_data + 1) = csize;
571 csize = (size >> 16) & 0xff; 475 csize = (size >> 16) & 0xff;
572 *(cx_data + 2) = csize; 476 *(cx_data + 2) = csize;
573 477
574 } 478 }
575 479
576 static void write_mv_ref 480 static void write_mv_ref
577 ( 481 (
578 vp9_writer *bc, MB_PREDICTION_MODE m, const vp9_prob *p 482 vp9_writer *bc, MB_PREDICTION_MODE m, const vp9_prob *p
579 ) { 483 ) {
580 #if CONFIG_DEBUG 484 #if CONFIG_DEBUG
581 assert(NEARESTMV <= m && m <= SPLITMV); 485 assert(NEARESTMV <= m && m <= SPLITMV);
582 #endif 486 #endif
583 write_token(bc, vp9_mv_ref_tree, p, 487 write_token(bc, vp9_mv_ref_tree, p,
584 vp9_mv_ref_encoding_array - NEARESTMV + m); 488 vp9_mv_ref_encoding_array - NEARESTMV + m);
585 } 489 }
586 490
587 #if CONFIG_SUPERBLOCKS
588 static void write_sb_mv_ref(vp9_writer *bc, MB_PREDICTION_MODE m, 491 static void write_sb_mv_ref(vp9_writer *bc, MB_PREDICTION_MODE m,
589 const vp9_prob *p) { 492 const vp9_prob *p) {
590 #if CONFIG_DEBUG 493 #if CONFIG_DEBUG
591 assert(NEARESTMV <= m && m < SPLITMV); 494 assert(NEARESTMV <= m && m < SPLITMV);
592 #endif 495 #endif
593 write_token(bc, vp9_sb_mv_ref_tree, p, 496 write_token(bc, vp9_sb_mv_ref_tree, p,
594 vp9_sb_mv_ref_encoding_array - NEARESTMV + m); 497 vp9_sb_mv_ref_encoding_array - NEARESTMV + m);
595 } 498 }
596 #endif
597 499
598 static void write_sub_mv_ref 500 static void write_sub_mv_ref
599 ( 501 (
600 vp9_writer *bc, B_PREDICTION_MODE m, const vp9_prob *p 502 vp9_writer *bc, B_PREDICTION_MODE m, const vp9_prob *p
601 ) { 503 ) {
602 #if CONFIG_DEBUG 504 #if CONFIG_DEBUG
603 assert(LEFT4X4 <= m && m <= NEW4X4); 505 assert(LEFT4X4 <= m && m <= NEW4X4);
604 #endif 506 #endif
605 write_token(bc, vp9_sub_mv_ref_tree, p, 507 write_token(bc, vp9_sub_mv_ref_tree, p,
606 vp9_sub_mv_ref_encoding_array - LEFT4X4 + m); 508 vp9_sub_mv_ref_encoding_array - LEFT4X4 + m);
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
647 } 549 }
648 } 550 }
649 #endif 551 #endif
650 552
651 // This function writes the current macro block's segnment id to the bitstream 553 // This function writes the current macro block's segnment id to the bitstream
652 // It should only be called if a segment map update is indicated. 554 // It should only be called if a segment map update is indicated.
653 static void write_mb_segid(vp9_writer *bc, 555 static void write_mb_segid(vp9_writer *bc,
654 const MB_MODE_INFO *mi, const MACROBLOCKD *xd) { 556 const MB_MODE_INFO *mi, const MACROBLOCKD *xd) {
655 // Encode the MB segment id. 557 // Encode the MB segment id.
656 int seg_id = mi->segment_id; 558 int seg_id = mi->segment_id;
657 #if CONFIG_SUPERBLOCKS 559
658 if (mi->encoded_as_sb) {
659 if (xd->mb_to_right_edge >= 0)
660 seg_id = seg_id && xd->mode_info_context[1].mbmi.segment_id;
661 if (xd->mb_to_bottom_edge >= 0) {
662 seg_id = seg_id &&
663 xd->mode_info_context[xd->mode_info_stride].mbmi.segment_id;
664 if (xd->mb_to_right_edge >= 0)
665 seg_id = seg_id &&
666 xd->mode_info_context[xd->mode_info_stride + 1].mbmi.segment_id;
667 }
668 }
669 #endif
670 if (xd->segmentation_enabled && xd->update_mb_segmentation_map) { 560 if (xd->segmentation_enabled && xd->update_mb_segmentation_map) {
671 switch (seg_id) { 561 switch (seg_id) {
672 case 0: 562 case 0:
673 vp9_write(bc, 0, xd->mb_segment_tree_probs[0]); 563 vp9_write(bc, 0, xd->mb_segment_tree_probs[0]);
674 vp9_write(bc, 0, xd->mb_segment_tree_probs[1]); 564 vp9_write(bc, 0, xd->mb_segment_tree_probs[1]);
675 break; 565 break;
676 case 1: 566 case 1:
677 vp9_write(bc, 0, xd->mb_segment_tree_probs[0]); 567 vp9_write(bc, 0, xd->mb_segment_tree_probs[0]);
678 vp9_write(bc, 1, xd->mb_segment_tree_probs[1]); 568 vp9_write(bc, 1, xd->mb_segment_tree_probs[1]);
679 break; 569 break;
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
788 678
789 cm->prob_intra_coded = get_binary_prob(rf_intra, rf_inter); 679 cm->prob_intra_coded = get_binary_prob(rf_intra, rf_inter);
790 cm->prob_last_coded = get_prob(rfct[LAST_FRAME], rf_inter); 680 cm->prob_last_coded = get_prob(rfct[LAST_FRAME], rf_inter);
791 cm->prob_gf_coded = get_binary_prob(rfct[GOLDEN_FRAME], rfct[ALTREF_FRAME]); 681 cm->prob_gf_coded = get_binary_prob(rfct[GOLDEN_FRAME], rfct[ALTREF_FRAME]);
792 682
793 // Compute a modified set of probabilities to use when prediction of the 683 // Compute a modified set of probabilities to use when prediction of the
794 // reference frame fails 684 // reference frame fails
795 vp9_compute_mod_refprobs(cm); 685 vp9_compute_mod_refprobs(cm);
796 } 686 }
797 687
798 static void pack_inter_mode_mvs(VP9_COMP *const cpi, vp9_writer *const bc) { 688 static void pack_inter_mode_mvs(VP9_COMP *cpi, MODE_INFO *m,
689 vp9_writer *bc,
690 int mb_rows_left, int mb_cols_left) {
799 VP9_COMMON *const pc = &cpi->common; 691 VP9_COMMON *const pc = &cpi->common;
800 const nmv_context *nmvc = &pc->fc.nmvc; 692 const nmv_context *nmvc = &pc->fc.nmvc;
801 MACROBLOCKD *xd = &cpi->mb.e_mbd; 693 MACROBLOCK *const x = &cpi->mb;
802 MODE_INFO *m; 694 MACROBLOCKD *const xd = &x->e_mbd;
803 MODE_INFO *prev_m;
804 TOKENEXTRA *tok = cpi->tok;
805 TOKENEXTRA *tok_end = tok + cpi->tok_count;
806
807 const int mis = pc->mode_info_stride; 695 const int mis = pc->mode_info_stride;
808 int mb_row, mb_col; 696 MB_MODE_INFO *const mi = &m->mbmi;
809 int row, col; 697 const MV_REFERENCE_FRAME rf = mi->ref_frame;
810 698 const MB_PREDICTION_MODE mode = mi->mode;
811 // Values used in prediction model coding 699 const int segment_id = mi->segment_id;
812 vp9_prob pred_prob; 700 const int mb_size = 1 << mi->sb_type;
813 unsigned char prediction_flag; 701 int skip_coeff;
814 702
815 int row_delta[4] = { 0, +1, 0, -1}; 703 int mb_row = pc->mb_rows - mb_rows_left;
816 int col_delta[4] = { +1, -1, +1, +1}; 704 int mb_col = pc->mb_cols - mb_cols_left;
817 705 xd->prev_mode_info_context = pc->prev_mi + (m - pc->mi);
818 cpi->mb.partition_info = cpi->mb.pi; 706 x->partition_info = x->pi + (m - pc->mi);
819 707
820 mb_row = 0; 708 // Distance of Mb to the various image edges.
821 for (row = 0; row < pc->mb_rows; row += 2) { 709 // These specified to 8th pel as they are always compared to MV
822 m = pc->mi + row * mis; 710 // values that are in 1/8th pel units
823 prev_m = pc->prev_mi + row * mis; 711 xd->mb_to_left_edge = -((mb_col * 16) << 3);
824 712 xd->mb_to_top_edge = -((mb_row * 16)) << 3;
825 mb_col = 0; 713 xd->mb_to_right_edge = ((pc->mb_cols - mb_size - mb_col) * 16) << 3;
826 for (col = 0; col < pc->mb_cols; col += 2) { 714 xd->mb_to_bottom_edge = ((pc->mb_rows - mb_size - mb_row) * 16) << 3;
827 int i; 715
828 716 #ifdef ENTROPY_STATS
829 // Process the 4 MBs in the order: 717 active_section = 9;
830 // top-left, top-right, bottom-left, bottom-right 718 #endif
831 #if CONFIG_SUPERBLOCKS 719
832 vp9_write(bc, m->mbmi.encoded_as_sb, pc->sb_coded); 720 if (cpi->mb.e_mbd.update_mb_segmentation_map) {
833 #endif 721 // Is temporal coding of the segment map enabled
834 for (i = 0; i < 4; i++) { 722 if (pc->temporal_update) {
835 MB_MODE_INFO *mi; 723 unsigned char prediction_flag = vp9_get_pred_flag(xd, PRED_SEG_ID);
836 MV_REFERENCE_FRAME rf; 724 vp9_prob pred_prob = vp9_get_pred_prob(pc, xd, PRED_SEG_ID);
837 MV_REFERENCE_FRAME sec_ref_frame; 725
838 MB_PREDICTION_MODE mode; 726 // Code the segment id prediction flag for this mb
839 int segment_id, skip_coeff; 727 vp9_write(bc, prediction_flag, pred_prob);
840 728
841 int dy = row_delta[i]; 729 // If the mb segment id wasn't predicted code explicitly
842 int dx = col_delta[i]; 730 if (!prediction_flag)
843 int offset_extended = dy * mis + dx; 731 write_mb_segid(bc, mi, &cpi->mb.e_mbd);
844 732 } else {
845 if ((mb_row >= pc->mb_rows) || (mb_col >= pc->mb_cols)) { 733 // Normal unpredicted coding
846 // MB lies outside frame, move on 734 write_mb_segid(bc, mi, &cpi->mb.e_mbd);
847 mb_row += dy; 735 }
848 mb_col += dx; 736 }
849 m += offset_extended; 737
850 prev_m += offset_extended; 738 if (!pc->mb_no_coeff_skip) {
851 cpi->mb.partition_info += offset_extended; 739 skip_coeff = 0;
852 continue; 740 } else if (vp9_segfeature_active(xd, segment_id, SEG_LVL_EOB) &&
741 vp9_get_segdata(xd, segment_id, SEG_LVL_EOB) == 0) {
742 skip_coeff = 1;
743 } else {
744 const int nmbs = mb_size;
745 const int xmbs = MIN(nmbs, mb_cols_left);
746 const int ymbs = MIN(nmbs, mb_rows_left);
747 int x, y;
748
749 skip_coeff = 1;
750 for (y = 0; y < ymbs; y++) {
751 for (x = 0; x < xmbs; x++) {
752 skip_coeff = skip_coeff && m[y * mis + x].mbmi.mb_skip_coeff;
753 }
754 }
755
756 vp9_write(bc, skip_coeff,
757 vp9_get_pred_prob(pc, xd, PRED_MBSKIP));
758 }
759
760 // Encode the reference frame.
761 if (!vp9_segfeature_active(xd, segment_id, SEG_LVL_MODE)
762 || vp9_get_segdata(xd, segment_id, SEG_LVL_MODE) >= NEARESTMV) {
763 encode_ref_frame(bc, pc, xd, segment_id, rf);
764 } else {
765 assert(rf == INTRA_FRAME);
766 }
767
768 if (rf == INTRA_FRAME) {
769 #ifdef ENTROPY_STATS
770 active_section = 6;
771 #endif
772
773 if (!vp9_segfeature_active(xd, segment_id, SEG_LVL_MODE)) {
774 if (m->mbmi.sb_type)
775 write_sb_ymode(bc, mode, pc->fc.sb_ymode_prob);
776 else
777 write_ymode(bc, mode, pc->fc.ymode_prob);
778 }
779 if (mode == B_PRED) {
780 int j = 0;
781 do {
782 write_bmode(bc, m->bmi[j].as_mode.first,
783 pc->fc.bmode_prob);
784 } while (++j < 16);
785 }
786 if (mode == I8X8_PRED) {
787 write_i8x8_mode(bc, m->bmi[0].as_mode.first,
788 pc->fc.i8x8_mode_prob);
789 write_i8x8_mode(bc, m->bmi[2].as_mode.first,
790 pc->fc.i8x8_mode_prob);
791 write_i8x8_mode(bc, m->bmi[8].as_mode.first,
792 pc->fc.i8x8_mode_prob);
793 write_i8x8_mode(bc, m->bmi[10].as_mode.first,
794 pc->fc.i8x8_mode_prob);
795 } else {
796 write_uv_mode(bc, mi->uv_mode,
797 pc->fc.uv_mode_prob[mode]);
798 }
799 } else {
800 vp9_prob mv_ref_p[VP9_MVREFS - 1];
801
802 vp9_mv_ref_probs(&cpi->common, mv_ref_p, mi->mb_mode_context[rf]);
803
804 // #ifdef ENTROPY_STATS
805 #ifdef ENTROPY_STATS
806 accum_mv_refs(mode, ct);
807 active_section = 3;
808 #endif
809
810 // Is the segment coding of mode enabled
811 if (!vp9_segfeature_active(xd, segment_id, SEG_LVL_MODE)) {
812 if (mi->sb_type) {
813 write_sb_mv_ref(bc, mode, mv_ref_p);
814 } else {
815 write_mv_ref(bc, mode, mv_ref_p);
816 }
817 vp9_accum_mv_refs(&cpi->common, mode, mi->mb_mode_context[rf]);
818 }
819
820 if (mode >= NEARESTMV && mode <= SPLITMV) {
821 if (cpi->common.mcomp_filter_type == SWITCHABLE) {
822 write_token(bc, vp9_switchable_interp_tree,
823 vp9_get_pred_probs(&cpi->common, xd,
824 PRED_SWITCHABLE_INTERP),
825 vp9_switchable_interp_encodings +
826 vp9_switchable_interp_map[mi->interp_filter]);
827 } else {
828 assert(mi->interp_filter == cpi->common.mcomp_filter_type);
829 }
830 }
831
832 // does the feature use compound prediction or not
833 // (if not specified at the frame/segment level)
834 if (cpi->common.comp_pred_mode == HYBRID_PREDICTION) {
835 vp9_write(bc, mi->second_ref_frame > INTRA_FRAME,
836 vp9_get_pred_prob(pc, xd, PRED_COMP));
837 }
838 #if CONFIG_COMP_INTERINTRA_PRED
839 if (cpi->common.use_interintra &&
840 mode >= NEARESTMV && mode < SPLITMV &&
841 mi->second_ref_frame <= INTRA_FRAME) {
842 vp9_write(bc, mi->second_ref_frame == INTRA_FRAME,
843 pc->fc.interintra_prob);
844 // if (!cpi->dummy_packing)
845 // printf("-- %d (%d)\n", mi->second_ref_frame == INTRA_FRAME,
846 // pc->fc.interintra_prob);
847 if (mi->second_ref_frame == INTRA_FRAME) {
848 // if (!cpi->dummy_packing)
849 // printf("** %d %d\n", mi->interintra_mode,
850 // mi->interintra_uv_mode);
851 write_ymode(bc, mi->interintra_mode, pc->fc.ymode_prob);
852 #if SEPARATE_INTERINTRA_UV
853 write_uv_mode(bc, mi->interintra_uv_mode,
854 pc->fc.uv_mode_prob[mi->interintra_mode]);
855 #endif
856 }
857 }
858 #endif
859
860 #if CONFIG_NEW_MVREF
861 // if ((mode == NEWMV) || (mode == SPLITMV)) {
862 if (mode == NEWMV) {
863 // Encode the index of the choice.
864 vp9_write_mv_ref_id(bc,
865 xd->mb_mv_ref_probs[rf], mi->best_index);
866
867 if (mi->second_ref_frame > 0) {
868 // Encode the index of the choice.
869 vp9_write_mv_ref_id(
870 bc, xd->mb_mv_ref_probs[mi->second_ref_frame],
871 mi->best_second_index);
872 }
873 }
874 #endif
875
876 switch (mode) { /* new, split require MVs */
877 case NEWMV:
878 #ifdef ENTROPY_STATS
879 active_section = 5;
880 #endif
881 write_nmv(bc, &mi->mv[0].as_mv, &mi->best_mv,
882 (const nmv_context*) nmvc,
883 xd->allow_high_precision_mv);
884
885 if (mi->second_ref_frame > 0) {
886 write_nmv(bc, &mi->mv[1].as_mv, &mi->best_second_mv,
887 (const nmv_context*) nmvc,
888 xd->allow_high_precision_mv);
853 } 889 }
854 890 break;
855 mi = &m->mbmi; 891 case SPLITMV: {
856 rf = mi->ref_frame; 892 int j = 0;
857 sec_ref_frame = mi->second_ref_frame; 893
858 mode = mi->mode; 894 #ifdef MODE_STATS
859 segment_id = mi->segment_id; 895 ++count_mb_seg[mi->partitioning];
860 896 #endif
861 // Distance of Mb to the various image edges. 897
862 // These specified to 8th pel as they are always compared to MV 898 write_split(bc, mi->partitioning, cpi->common.fc.mbsplit_prob);
863 // values that are in 1/8th pel units 899 cpi->mbsplit_count[mi->partitioning]++;
864 xd->mb_to_left_edge = -((mb_col * 16) << 3); 900
865 xd->mb_to_top_edge = -((mb_row * 16)) << 3; 901 do {
866 902 B_PREDICTION_MODE blockmode;
867 #if CONFIG_SUPERBLOCKS 903 int_mv blockmv;
868 if (mi->encoded_as_sb) { 904 const int *const L = vp9_mbsplits[mi->partitioning];
869 xd->mb_to_right_edge = ((pc->mb_cols - 2 - mb_col) * 16) << 3; 905 int k = -1; /* first block in subset j */
870 xd->mb_to_bottom_edge = ((pc->mb_rows - 2 - mb_row) * 16) << 3; 906 int mv_contz;
871 } else { 907 int_mv leftmv, abovemv;
872 #endif 908
873 xd->mb_to_right_edge = ((pc->mb_cols - 1 - mb_col) * 16) << 3; 909 blockmode = cpi->mb.partition_info->bmi[j].mode;
874 xd->mb_to_bottom_edge = ((pc->mb_rows - 1 - mb_row) * 16) << 3; 910 blockmv = cpi->mb.partition_info->bmi[j].mv;
875 #if CONFIG_SUPERBLOCKS 911 #if CONFIG_DEBUG
876 } 912 while (j != L[++k])
877 #endif 913 if (k >= 16)
878 914 assert(0);
879 // Make sure the MacroBlockD mode info pointer is set correctly 915 #else
880 xd->mode_info_context = m; 916 while (j != L[++k]);
881 xd->prev_mode_info_context = prev_m; 917 #endif
882 918 leftmv.as_int = left_block_mv(m, k);
883 #ifdef ENTROPY_STATS 919 abovemv.as_int = above_block_mv(m, k, mis);
884 active_section = 9; 920 mv_contz = vp9_mv_cont(&leftmv, &abovemv);
885 #endif 921
886 if (cpi->mb.e_mbd.update_mb_segmentation_map) { 922 write_sub_mv_ref(bc, blockmode,
887 // Is temporal coding of the segment map enabled 923 cpi->common.fc.sub_mv_ref_prob[mv_contz]);
888 if (pc->temporal_update) { 924 cpi->sub_mv_ref_count[mv_contz][blockmode - LEFT4X4]++;
889 prediction_flag = vp9_get_pred_flag(xd, PRED_SEG_ID); 925 if (blockmode == NEW4X4) {
890 pred_prob = vp9_get_pred_prob(pc, xd, PRED_SEG_ID); 926 #ifdef ENTROPY_STATS
891 927 active_section = 11;
892 // Code the segment id prediction flag for this mb 928 #endif
893 vp9_write(bc, prediction_flag, pred_prob); 929 write_nmv(bc, &blockmv.as_mv, &mi->best_mv,
894 930 (const nmv_context*) nmvc,
895 // If the mb segment id wasn't predicted code explicitly 931 xd->allow_high_precision_mv);
896 if (!prediction_flag) 932
897 write_mb_segid(bc, mi, &cpi->mb.e_mbd); 933 if (mi->second_ref_frame > 0) {
898 } else { 934 write_nmv(bc,
899 // Normal unpredicted coding 935 &cpi->mb.partition_info->bmi[j].second_mv.as_mv,
900 write_mb_segid(bc, mi, &cpi->mb.e_mbd); 936 &mi->best_second_mv,
901 } 937 (const nmv_context*) nmvc,
902 } 938 xd->allow_high_precision_mv);
903
904 skip_coeff = 1;
905 if (pc->mb_no_coeff_skip &&
906 (!vp9_segfeature_active(xd, segment_id, SEG_LVL_EOB) ||
907 (vp9_get_segdata(xd, segment_id, SEG_LVL_EOB) != 0))) {
908 skip_coeff = mi->mb_skip_coeff;
909 #if CONFIG_SUPERBLOCKS
910 if (mi->encoded_as_sb) {
911 skip_coeff &= m[1].mbmi.mb_skip_coeff;
912 skip_coeff &= m[mis].mbmi.mb_skip_coeff;
913 skip_coeff &= m[mis + 1].mbmi.mb_skip_coeff;
914 }
915 #endif
916 vp9_write(bc, skip_coeff,
917 vp9_get_pred_prob(pc, xd, PRED_MBSKIP));
918 }
919
920 // Encode the reference frame.
921 if (!vp9_segfeature_active(xd, segment_id, SEG_LVL_MODE)
922 || vp9_get_segdata(xd, segment_id, SEG_LVL_MODE) >= NEARESTMV) {
923 encode_ref_frame(bc, pc, xd, segment_id, rf);
924 } else {
925 assert(rf == INTRA_FRAME);
926 }
927
928 if (rf == INTRA_FRAME) {
929 #ifdef ENTROPY_STATS
930 active_section = 6;
931 #endif
932
933 if (!vp9_segfeature_active(xd, segment_id, SEG_LVL_MODE)) {
934 #if CONFIG_SUPERBLOCKS
935 if (m->mbmi.encoded_as_sb)
936 write_sb_ymode(bc, mode, pc->fc.sb_ymode_prob);
937 else
938 #endif
939 write_ymode(bc, mode, pc->fc.ymode_prob);
940 }
941 if (mode == B_PRED) {
942 int j = 0;
943 #if CONFIG_COMP_INTRA_PRED
944 int uses_second =
945 m->bmi[0].as_mode.second !=
946 (B_PREDICTION_MODE)(B_DC_PRED - 1);
947 vp9_write(bc, uses_second, DEFAULT_COMP_INTRA_PROB);
948 #endif
949 do {
950 #if CONFIG_COMP_INTRA_PRED
951 B_PREDICTION_MODE mode2 = m->bmi[j].as_mode.second;
952 #endif
953 write_bmode(bc, m->bmi[j].as_mode.first,
954 pc->fc.bmode_prob);
955 #if CONFIG_COMP_INTRA_PRED
956 if (uses_second) {
957 write_bmode(bc, mode2, pc->fc.bmode_prob);
958 }
959 #endif
960 } while (++j < 16);
961 }
962 if (mode == I8X8_PRED) {
963 write_i8x8_mode(bc, m->bmi[0].as_mode.first,
964 pc->fc.i8x8_mode_prob);
965 write_i8x8_mode(bc, m->bmi[2].as_mode.first,
966 pc->fc.i8x8_mode_prob);
967 write_i8x8_mode(bc, m->bmi[8].as_mode.first,
968 pc->fc.i8x8_mode_prob);
969 write_i8x8_mode(bc, m->bmi[10].as_mode.first,
970 pc->fc.i8x8_mode_prob);
971 } else {
972 write_uv_mode(bc, mi->uv_mode,
973 pc->fc.uv_mode_prob[mode]);
974 }
975 } else {
976 int_mv best_mv, best_second_mv;
977
978 vp9_prob mv_ref_p [VP9_MVREFS - 1];
979
980 {
981 best_mv.as_int = mi->ref_mvs[rf][0].as_int;
982
983 vp9_mv_ref_probs(&cpi->common, mv_ref_p, mi->mb_mode_context[rf]);
984
985 #ifdef ENTROPY_STATS
986 accum_mv_refs(mode, ct);
987 #endif
988 }
989
990 #ifdef ENTROPY_STATS
991 active_section = 3;
992 #endif
993
994 // Is the segment coding of mode enabled
995 if (!vp9_segfeature_active(xd, segment_id, SEG_LVL_MODE)) {
996 #if CONFIG_SUPERBLOCKS
997 if (mi->encoded_as_sb) {
998 write_sb_mv_ref(bc, mode, mv_ref_p);
999 } else
1000 #endif
1001 {
1002 write_mv_ref(bc, mode, mv_ref_p);
1003 }
1004 vp9_accum_mv_refs(&cpi->common, mode, mi->mb_mode_context[rf]);
1005 }
1006
1007 #if CONFIG_PRED_FILTER
1008 // Is the prediction filter enabled
1009 if (mode >= NEARESTMV && mode < SPLITMV) {
1010 if (cpi->common.pred_filter_mode == 2)
1011 vp9_write(bc, mi->pred_filter_enabled,
1012 pc->prob_pred_filter_off);
1013 else
1014 assert(mi->pred_filter_enabled ==
1015 cpi->common.pred_filter_mode);
1016 }
1017 #endif
1018 if (mode >= NEARESTMV && mode <= SPLITMV)
1019 {
1020 if (cpi->common.mcomp_filter_type == SWITCHABLE) {
1021 write_token(bc, vp9_switchable_interp_tree,
1022 vp9_get_pred_probs(&cpi->common, xd,
1023 PRED_SWITCHABLE_INTERP),
1024 vp9_switchable_interp_encodings +
1025 vp9_switchable_interp_map[mi->interp_filter]);
1026 } else {
1027 assert (mi->interp_filter ==
1028 cpi->common.mcomp_filter_type);
1029 } 939 }
1030 } 940 }
1031 941 } while (++j < cpi->mb.partition_info->count);
1032 if (mi->second_ref_frame > 0 && 942 break;
1033 (mode == NEWMV || mode == SPLITMV)) { 943 }
1034 944 default:
1035 best_second_mv.as_int = 945 break;
1036 mi->ref_mvs[mi->second_ref_frame][0].as_int; 946 }
1037 } 947 }
1038 948
1039 // does the feature use compound prediction or not 949 if (((rf == INTRA_FRAME && mode <= I8X8_PRED) ||
1040 // (if not specified at the frame/segment level) 950 (rf != INTRA_FRAME && !(mode == SPLITMV &&
1041 if (cpi->common.comp_pred_mode == HYBRID_PREDICTION) { 951 mi->partitioning == PARTITIONING_4X4))) &&
1042 vp9_write(bc, mi->second_ref_frame > INTRA_FRAME, 952 pc->txfm_mode == TX_MODE_SELECT &&
1043 vp9_get_pred_prob(pc, xd, PRED_COMP)); 953 !((pc->mb_no_coeff_skip && skip_coeff) ||
1044 } 954 (vp9_segfeature_active(xd, segment_id, SEG_LVL_EOB) &&
1045 #if CONFIG_COMP_INTERINTRA_PRED 955 vp9_get_segdata(xd, segment_id, SEG_LVL_EOB) == 0))) {
1046 if (cpi->common.use_interintra && 956 TX_SIZE sz = mi->txfm_size;
1047 mode >= NEARESTMV && mode < SPLITMV && 957 // FIXME(rbultje) code ternary symbol once all experiments are merged
1048 mi->second_ref_frame <= INTRA_FRAME) { 958 vp9_write(bc, sz != TX_4X4, pc->prob_tx[0]);
1049 vp9_write(bc, mi->second_ref_frame == INTRA_FRAME, 959 if (sz != TX_4X4 && mode != I8X8_PRED && mode != SPLITMV) {
1050 pc->fc.interintra_prob); 960 vp9_write(bc, sz != TX_8X8, pc->prob_tx[1]);
1051 // if (!cpi->dummy_packing) 961 if (mi->sb_type && sz != TX_8X8)
1052 // printf("-- %d (%d)\n", mi->second_ref_frame == INTRA_FRAME, 962 vp9_write(bc, sz != TX_16X16, pc->prob_tx[2]);
1053 // pc->fc.interintra_prob); 963 }
1054 if (mi->second_ref_frame == INTRA_FRAME) {
1055 // if (!cpi->dummy_packing)
1056 // printf("** %d %d\n", mi->interintra_mode,
1057 // mi->interintra_uv_mode);
1058 write_ymode(bc, mi->interintra_mode, pc->fc.ymode_prob);
1059 #if SEPARATE_INTERINTRA_UV
1060 write_uv_mode(bc, mi->interintra_uv_mode,
1061 pc->fc.uv_mode_prob[mi->interintra_mode]);
1062 #endif
1063 }
1064 }
1065 #endif
1066
1067 {
1068 switch (mode) { /* new, split require MVs */
1069 case NEWMV:
1070 #ifdef ENTROPY_STATS
1071 active_section = 5;
1072 #endif
1073
1074 #if CONFIG_NEW_MVREF
1075 {
1076 unsigned int best_index;
1077
1078 // Choose the best mv reference
1079 /*
1080 best_index = pick_best_mv_ref(x, rf, mi->mv[0],
1081 mi->ref_mvs[rf], &best_mv);
1082 assert(best_index == mi->best_index);
1083 assert(best_mv.as_int == mi->best_mv.as_int);
1084 */
1085 best_index = mi->best_index;
1086 best_mv.as_int = mi->best_mv.as_int;
1087
1088 // Encode the index of the choice.
1089 vp9_write_mv_ref_id(bc,
1090 xd->mb_mv_ref_id_probs[rf], best_index);
1091
1092 cpi->best_ref_index_counts[rf][best_index]++;
1093
1094 }
1095 #endif
1096
1097 write_nmv(bc, &mi->mv[0].as_mv, &best_mv,
1098 (const nmv_context*) nmvc,
1099 xd->allow_high_precision_mv);
1100
1101 if (mi->second_ref_frame > 0) {
1102 #if CONFIG_NEW_MVREF
1103 unsigned int best_index;
1104 sec_ref_frame = mi->second_ref_frame;
1105
1106 /*
1107 best_index =
1108 pick_best_mv_ref(x, sec_ref_frame, mi->mv[1],
1109 mi->ref_mvs[sec_ref_frame],
1110 &best_second_mv);
1111 assert(best_index == mi->best_second_index);
1112 assert(best_second_mv.as_int == mi->best_second_mv.as_int);
1113 */
1114 best_index = mi->best_second_index;
1115 best_second_mv.as_int = mi->best_second_mv.as_int;
1116
1117 // Encode the index of the choice.
1118 vp9_write_mv_ref_id(bc,
1119 xd->mb_mv_ref_id_probs[sec_ref_frame],
1120 best_index);
1121
1122 cpi->best_ref_index_counts[sec_ref_frame][best_index]++;
1123 #endif
1124 write_nmv(bc, &mi->mv[1].as_mv, &best_second_mv,
1125 (const nmv_context*) nmvc,
1126 xd->allow_high_precision_mv);
1127 }
1128 break;
1129 case SPLITMV: {
1130 int j = 0;
1131
1132 #ifdef MODE_STATS
1133 ++count_mb_seg [mi->partitioning];
1134 #endif
1135
1136 write_split(bc, mi->partitioning, cpi->common.fc.mbsplit_prob);
1137 cpi->mbsplit_count[mi->partitioning]++;
1138
1139 do {
1140 B_PREDICTION_MODE blockmode;
1141 int_mv blockmv;
1142 const int *const L =
1143 vp9_mbsplits [mi->partitioning];
1144 int k = -1; /* first block in subset j */
1145 int mv_contz;
1146 int_mv leftmv, abovemv;
1147
1148 blockmode = cpi->mb.partition_info->bmi[j].mode;
1149 blockmv = cpi->mb.partition_info->bmi[j].mv;
1150 #if CONFIG_DEBUG
1151 while (j != L[++k])
1152 if (k >= 16)
1153 assert(0);
1154 #else
1155 while (j != L[++k]);
1156 #endif
1157 leftmv.as_int = left_block_mv(m, k);
1158 abovemv.as_int = above_block_mv(m, k, mis);
1159 mv_contz = vp9_mv_cont(&leftmv, &abovemv);
1160
1161 write_sub_mv_ref(bc, blockmode,
1162 cpi->common.fc.sub_mv_ref_prob [mv_contz]);
1163 cpi->sub_mv_ref_count[mv_contz][blockmode - LEFT4X4]++;
1164 if (blockmode == NEW4X4) {
1165 #ifdef ENTROPY_STATS
1166 active_section = 11;
1167 #endif
1168 write_nmv(bc, &blockmv.as_mv, &best_mv,
1169 (const nmv_context*) nmvc,
1170 xd->allow_high_precision_mv);
1171
1172 if (mi->second_ref_frame > 0) {
1173 write_nmv(bc,
1174 &cpi->mb.partition_info->bmi[j].second_mv.as_mv,
1175 &best_second_mv,
1176 (const nmv_context*) nmvc,
1177 xd->allow_high_precision_mv);
1178 }
1179 }
1180 } while (++j < cpi->mb.partition_info->count);
1181 }
1182 break;
1183 default:
1184 break;
1185 }
1186 }
1187 /* This is not required if the counts in cpi are consistent with the
1188 * final packing pass */
1189 // if (!cpi->dummy_packing)
1190 // vp9_update_nmv_count(cpi, x, &best_mv, &best_second_mv);
1191 }
1192
1193 if (((rf == INTRA_FRAME && mode <= I8X8_PRED) ||
1194 (rf != INTRA_FRAME && !(mode == SPLITMV &&
1195 mi->partitioning == PARTITIONING_4X4))) &&
1196 pc->txfm_mode == TX_MODE_SELECT &&
1197 !((pc->mb_no_coeff_skip && skip_coeff) ||
1198 (vp9_segfeature_active(xd, segment_id, SEG_LVL_EOB) &&
1199 vp9_get_segdata(xd, segment_id, SEG_LVL_EOB) == 0))) {
1200 TX_SIZE sz = mi->txfm_size;
1201 // FIXME(rbultje) code ternary symbol once all experiments are merged
1202 vp9_write(bc, sz != TX_4X4, pc->prob_tx[0]);
1203 if (sz != TX_4X4 && mode != I8X8_PRED && mode != SPLITMV)
1204 vp9_write(bc, sz != TX_8X8, pc->prob_tx[1]);
1205 }
1206
1207 #ifdef ENTROPY_STATS
1208 active_section = 1;
1209 #endif
1210 assert(tok < tok_end);
1211 pack_mb_tokens(bc, &tok, tok_end);
1212
1213 #if CONFIG_SUPERBLOCKS
1214 if (m->mbmi.encoded_as_sb) {
1215 assert(!i);
1216 mb_col += 2;
1217 m += 2;
1218 cpi->mb.partition_info += 2;
1219 prev_m += 2;
1220 break;
1221 }
1222 #endif
1223
1224 // Next MB
1225 mb_row += dy;
1226 mb_col += dx;
1227 m += offset_extended;
1228 prev_m += offset_extended;
1229 cpi->mb.partition_info += offset_extended;
1230 #if CONFIG_DEBUG
1231 assert((prev_m - cpi->common.prev_mip) == (m - cpi->common.mip));
1232 assert((prev_m - cpi->common.prev_mi) == (m - cpi->common.mi));
1233 #endif
1234 }
1235 }
1236
1237 // Next SB
1238 mb_row += 2;
1239 m += mis + (1 - (pc->mb_cols & 0x1));
1240 prev_m += mis + (1 - (pc->mb_cols & 0x1));
1241 cpi->mb.partition_info += mis + (1 - (pc->mb_cols & 0x1));
1242 } 964 }
1243 } 965 }
1244 966
1245 967 static void write_mb_modes_kf(const VP9_COMP *cpi,
1246 static void write_mb_modes_kf(const VP9_COMMON *c, 968 const MODE_INFO *m,
1247 const MACROBLOCKD *xd, 969 vp9_writer *bc,
1248 const MODE_INFO *m, 970 int mb_rows_left, int mb_cols_left) {
1249 int mode_info_stride, 971 const VP9_COMMON *const c = &cpi->common;
1250 vp9_writer *const bc) { 972 const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
1251 int ym; 973 const int mis = c->mode_info_stride;
1252 int segment_id; 974 const int ym = m->mbmi.mode;
1253 975 const int segment_id = m->mbmi.segment_id;
1254 ym = m->mbmi.mode; 976 int skip_coeff;
1255 segment_id = m->mbmi.segment_id;
1256 977
1257 if (xd->update_mb_segmentation_map) { 978 if (xd->update_mb_segmentation_map) {
1258 write_mb_segid(bc, &m->mbmi, xd); 979 write_mb_segid(bc, &m->mbmi, xd);
1259 } 980 }
1260 981
1261 if (c->mb_no_coeff_skip && 982 if (!c->mb_no_coeff_skip) {
1262 (!vp9_segfeature_active(xd, segment_id, SEG_LVL_EOB) || 983 skip_coeff = 0;
1263 (vp9_get_segdata(xd, segment_id, SEG_LVL_EOB) != 0))) { 984 } else if (vp9_segfeature_active(xd, segment_id, SEG_LVL_EOB) &&
1264 int skip_coeff = m->mbmi.mb_skip_coeff; 985 vp9_get_segdata(xd, segment_id, SEG_LVL_EOB) == 0) {
1265 #if CONFIG_SUPERBLOCKS 986 skip_coeff = 1;
1266 const int mis = mode_info_stride; 987 } else {
1267 if (m->mbmi.encoded_as_sb) { 988 const int nmbs = 1 << m->mbmi.sb_type;
1268 skip_coeff &= m[1].mbmi.mb_skip_coeff; 989 const int xmbs = MIN(nmbs, mb_cols_left);
1269 skip_coeff &= m[mis].mbmi.mb_skip_coeff; 990 const int ymbs = MIN(nmbs, mb_rows_left);
1270 skip_coeff &= m[mis + 1].mbmi.mb_skip_coeff; 991 int x, y;
1271 } 992
1272 #endif 993 skip_coeff = 1;
1273 vp9_write(bc, skip_coeff, 994 for (y = 0; y < ymbs; y++) {
1274 vp9_get_pred_prob(c, xd, PRED_MBSKIP)); 995 for (x = 0; x < xmbs; x++) {
1275 } 996 skip_coeff = skip_coeff && m[y * mis + x].mbmi.mb_skip_coeff;
1276 997 }
1277 #if CONFIG_SUPERBLOCKS 998 }
1278 if (m->mbmi.encoded_as_sb) { 999
1000 vp9_write(bc, skip_coeff,
1001 vp9_get_pred_prob(c, xd, PRED_MBSKIP));
1002 }
1003
1004 if (m->mbmi.sb_type) {
1279 sb_kfwrite_ymode(bc, ym, 1005 sb_kfwrite_ymode(bc, ym,
1280 c->sb_kf_ymode_prob[c->kf_ymode_probs_index]); 1006 c->sb_kf_ymode_prob[c->kf_ymode_probs_index]);
1281 } else 1007 } else {
1282 #endif
1283 {
1284 kfwrite_ymode(bc, ym, 1008 kfwrite_ymode(bc, ym,
1285 c->kf_ymode_prob[c->kf_ymode_probs_index]); 1009 c->kf_ymode_prob[c->kf_ymode_probs_index]);
1286 } 1010 }
1287 1011
1288 if (ym == B_PRED) { 1012 if (ym == B_PRED) {
1289 const int mis = c->mode_info_stride;
1290 int i = 0; 1013 int i = 0;
1291 #if CONFIG_COMP_INTRA_PRED
1292 int uses_second =
1293 m->bmi[0].as_mode.second !=
1294 (B_PREDICTION_MODE)(B_DC_PRED - 1);
1295 vp9_write(bc, uses_second, DEFAULT_COMP_INTRA_PROB);
1296 #endif
1297 do { 1014 do {
1298 const B_PREDICTION_MODE A = above_block_mode(m, i, mis); 1015 const B_PREDICTION_MODE A = above_block_mode(m, i, mis);
1299 const B_PREDICTION_MODE L = left_block_mode(m, i); 1016 const B_PREDICTION_MODE L = left_block_mode(m, i);
1300 const int bm = m->bmi[i].as_mode.first; 1017 const int bm = m->bmi[i].as_mode.first;
1301 #if CONFIG_COMP_INTRA_PRED
1302 const int bm2 = m->bmi[i].as_mode.second;
1303 #endif
1304 1018
1305 #ifdef ENTROPY_STATS 1019 #ifdef ENTROPY_STATS
1306 ++intra_mode_stats [A] [L] [bm]; 1020 ++intra_mode_stats [A] [L] [bm];
1307 #endif 1021 #endif
1308 1022
1309 write_kf_bmode(bc, bm, c->kf_bmode_prob[A][L]); 1023 write_kf_bmode(bc, bm, c->kf_bmode_prob[A][L]);
1310 #if CONFIG_COMP_INTRA_PRED
1311 if (uses_second) {
1312 write_kf_bmode(bc, bm2, c->kf_bmode_prob[A][L]);
1313 }
1314 #endif
1315 } while (++i < 16); 1024 } while (++i < 16);
1316 } 1025 }
1317 if (ym == I8X8_PRED) { 1026 if (ym == I8X8_PRED) {
1318 write_i8x8_mode(bc, m->bmi[0].as_mode.first, 1027 write_i8x8_mode(bc, m->bmi[0].as_mode.first,
1319 c->fc.i8x8_mode_prob); 1028 c->fc.i8x8_mode_prob);
1320 // printf(" mode: %d\n", m->bmi[0].as_mode.first); fflush(stdout); 1029 // printf(" mode: %d\n", m->bmi[0].as_mode.first); fflush(stdout);
1321 write_i8x8_mode(bc, m->bmi[2].as_mode.first, 1030 write_i8x8_mode(bc, m->bmi[2].as_mode.first,
1322 c->fc.i8x8_mode_prob); 1031 c->fc.i8x8_mode_prob);
1323 // printf(" mode: %d\n", m->bmi[2].as_mode.first); fflush(stdout); 1032 // printf(" mode: %d\n", m->bmi[2].as_mode.first); fflush(stdout);
1324 write_i8x8_mode(bc, m->bmi[8].as_mode.first, 1033 write_i8x8_mode(bc, m->bmi[8].as_mode.first,
1325 c->fc.i8x8_mode_prob); 1034 c->fc.i8x8_mode_prob);
1326 // printf(" mode: %d\n", m->bmi[8].as_mode.first); fflush(stdout); 1035 // printf(" mode: %d\n", m->bmi[8].as_mode.first); fflush(stdout);
1327 write_i8x8_mode(bc, m->bmi[10].as_mode.first, 1036 write_i8x8_mode(bc, m->bmi[10].as_mode.first,
1328 c->fc.i8x8_mode_prob); 1037 c->fc.i8x8_mode_prob);
1329 // printf(" mode: %d\n", m->bmi[10].as_mode.first); fflush(stdout); 1038 // printf(" mode: %d\n", m->bmi[10].as_mode.first); fflush(stdout);
1330 } else 1039 } else
1331 write_uv_mode(bc, m->mbmi.uv_mode, c->kf_uv_mode_prob[ym]); 1040 write_uv_mode(bc, m->mbmi.uv_mode, c->kf_uv_mode_prob[ym]);
1332 1041
1333 if (ym <= I8X8_PRED && c->txfm_mode == TX_MODE_SELECT && 1042 if (ym <= I8X8_PRED && c->txfm_mode == TX_MODE_SELECT &&
1334 !((c->mb_no_coeff_skip && m->mbmi.mb_skip_coeff) || 1043 !((c->mb_no_coeff_skip && skip_coeff) ||
1335 (vp9_segfeature_active(xd, segment_id, SEG_LVL_EOB) && 1044 (vp9_segfeature_active(xd, segment_id, SEG_LVL_EOB) &&
1336 vp9_get_segdata(xd, segment_id, SEG_LVL_EOB) == 0))) { 1045 vp9_get_segdata(xd, segment_id, SEG_LVL_EOB) == 0))) {
1337 TX_SIZE sz = m->mbmi.txfm_size; 1046 TX_SIZE sz = m->mbmi.txfm_size;
1338 // FIXME(rbultje) code ternary symbol once all experiments are merged 1047 // FIXME(rbultje) code ternary symbol once all experiments are merged
1339 vp9_write(bc, sz != TX_4X4, c->prob_tx[0]); 1048 vp9_write(bc, sz != TX_4X4, c->prob_tx[0]);
1340 if (sz != TX_4X4 && ym <= TM_PRED) 1049 if (sz != TX_4X4 && ym <= TM_PRED) {
1341 vp9_write(bc, sz != TX_8X8, c->prob_tx[1]); 1050 vp9_write(bc, sz != TX_8X8, c->prob_tx[1]);
1051 if (m->mbmi.sb_type && sz != TX_8X8)
1052 vp9_write(bc, sz != TX_16X16, c->prob_tx[2]);
1053 }
1342 } 1054 }
1343 } 1055 }
1344 1056
1345 static void write_kfmodes(VP9_COMP* const cpi, vp9_writer* const bc) { 1057 static void write_modes_b(VP9_COMP *cpi, MODE_INFO *m, vp9_writer *bc,
1058 TOKENEXTRA **tok, TOKENEXTRA *tok_end,
1059 int mb_row, int mb_col) {
1060 VP9_COMMON *const c = &cpi->common;
1061 MACROBLOCKD *const xd = &cpi->mb.e_mbd;
1062
1063 xd->mode_info_context = m;
1064 if (c->frame_type == KEY_FRAME) {
1065 write_mb_modes_kf(cpi, m, bc,
1066 c->mb_rows - mb_row, c->mb_cols - mb_col);
1067 #ifdef ENTROPY_STATS
1068 active_section = 8;
1069 #endif
1070 } else {
1071 pack_inter_mode_mvs(cpi, m, bc,
1072 c->mb_rows - mb_row, c->mb_cols - mb_col);
1073 #ifdef ENTROPY_STATS
1074 active_section = 1;
1075 #endif
1076 }
1077
1078 assert(*tok < tok_end);
1079 pack_mb_tokens(bc, tok, tok_end);
1080 }
1081
1082 static void write_modes(VP9_COMP *cpi, vp9_writer* const bc) {
1346 VP9_COMMON *const c = &cpi->common; 1083 VP9_COMMON *const c = &cpi->common;
1347 const int mis = c->mode_info_stride; 1084 const int mis = c->mode_info_stride;
1348 MACROBLOCKD *xd = &cpi->mb.e_mbd; 1085 MODE_INFO *m, *m_ptr = c->mi;
1349 MODE_INFO *m; 1086 int i, mb_row, mb_col;
1350 int i;
1351 int row, col;
1352 int mb_row, mb_col;
1353 int row_delta[4] = { 0, +1, 0, -1};
1354 int col_delta[4] = { +1, -1, +1, +1};
1355 TOKENEXTRA *tok = cpi->tok; 1087 TOKENEXTRA *tok = cpi->tok;
1356 TOKENEXTRA *tok_end = tok + cpi->tok_count; 1088 TOKENEXTRA *tok_end = tok + cpi->tok_count;
1357 1089
1358 mb_row = 0; 1090 for (mb_row = 0; mb_row < c->mb_rows; mb_row += 4, m_ptr += 4 * mis) {
1359 for (row = 0; row < c->mb_rows; row += 2) { 1091 m = m_ptr;
1360 m = c->mi + row * mis; 1092 for (mb_col = 0; mb_col < c->mb_cols; mb_col += 4, m += 4) {
1093 vp9_write(bc, m->mbmi.sb_type == BLOCK_SIZE_SB64X64, c->sb64_coded);
1094 if (m->mbmi.sb_type == BLOCK_SIZE_SB64X64) {
1095 write_modes_b(cpi, m, bc, &tok, tok_end, mb_row, mb_col);
1096 } else {
1097 int j;
1361 1098
1362 mb_col = 0; 1099 for (j = 0; j < 4; j++) {
1363 for (col = 0; col < c->mb_cols; col += 2) { 1100 const int x_idx_sb = (j & 1) << 1, y_idx_sb = j & 2;
1364 #if CONFIG_SUPERBLOCKS 1101 MODE_INFO *sb_m = m + y_idx_sb * mis + x_idx_sb;
1365 vp9_write(bc, m->mbmi.encoded_as_sb, c->sb_coded);
1366 #endif
1367 // Process the 4 MBs in the order:
1368 // top-left, top-right, bottom-left, bottom-right
1369 for (i = 0; i < 4; i++) {
1370 int dy = row_delta[i];
1371 int dx = col_delta[i];
1372 int offset_extended = dy * mis + dx;
1373 1102
1374 if ((mb_row >= c->mb_rows) || (mb_col >= c->mb_cols)) { 1103 if (mb_col + x_idx_sb >= c->mb_cols ||
1375 // MB lies outside frame, move on 1104 mb_row + y_idx_sb >= c->mb_rows)
1376 mb_row += dy; 1105 continue;
1377 mb_col += dx; 1106
1378 m += offset_extended; 1107 vp9_write(bc, sb_m->mbmi.sb_type, c->sb32_coded);
1379 continue; 1108 if (sb_m->mbmi.sb_type) {
1109 assert(sb_m->mbmi.sb_type == BLOCK_SIZE_SB32X32);
1110 write_modes_b(cpi, sb_m, bc, &tok, tok_end,
1111 mb_row + y_idx_sb, mb_col + x_idx_sb);
1112 } else {
1113 // Process the 4 MBs in the order:
1114 // top-left, top-right, bottom-left, bottom-right
1115 for (i = 0; i < 4; i++) {
1116 const int x_idx = x_idx_sb + (i & 1), y_idx = y_idx_sb + (i >> 1);
1117 MODE_INFO *mb_m = m + x_idx + y_idx * mis;
1118
1119 if (mb_row + y_idx >= c->mb_rows ||
1120 mb_col + x_idx >= c->mb_cols) {
1121 // MB lies outside frame, move on
1122 continue;
1123 }
1124
1125 assert(mb_m->mbmi.sb_type == BLOCK_SIZE_MB16X16);
1126 write_modes_b(cpi, mb_m, bc, &tok, tok_end,
1127 mb_row + y_idx, mb_col + x_idx);
1128 }
1129 }
1380 } 1130 }
1381
1382 // Make sure the MacroBlockD mode info pointer is set correctly
1383 xd->mode_info_context = m;
1384
1385 write_mb_modes_kf(c, xd, m, mis, bc);
1386 #ifdef ENTROPY_STATS
1387 active_section = 8;
1388 #endif
1389 assert(tok < tok_end);
1390 pack_mb_tokens(bc, &tok, tok_end);
1391
1392 #if CONFIG_SUPERBLOCKS
1393 if (m->mbmi.encoded_as_sb) {
1394 assert(!i);
1395 mb_col += 2;
1396 m += 2;
1397 break;
1398 }
1399 #endif
1400 // Next MB
1401 mb_row += dy;
1402 mb_col += dx;
1403 m += offset_extended;
1404 } 1131 }
1405 } 1132 }
1406 mb_row += 2;
1407 } 1133 }
1408 } 1134 }
1409 1135
1410 1136
1411 /* This function is used for debugging probability trees. */ 1137 /* This function is used for debugging probability trees. */
1412 static void print_prob_tree(vp9_prob 1138 static void print_prob_tree(vp9_coeff_probs *coef_probs) {
1413 coef_probs[BLOCK_TYPES][COEF_BANDS][PREV_COEF_CONTEX TS][ENTROPY_NODES]) {
1414 /* print coef probability tree */ 1139 /* print coef probability tree */
1415 int i, j, k, l; 1140 int i, j, k, l;
1416 FILE *f = fopen("enc_tree_probs.txt", "a"); 1141 FILE *f = fopen("enc_tree_probs.txt", "a");
1417 fprintf(f, "{\n"); 1142 fprintf(f, "{\n");
1418 for (i = 0; i < BLOCK_TYPES; i++) { 1143 for (i = 0; i < BLOCK_TYPES_4X4; i++) {
1419 fprintf(f, " {\n"); 1144 fprintf(f, " {\n");
1420 for (j = 0; j < COEF_BANDS; j++) { 1145 for (j = 0; j < COEF_BANDS; j++) {
1421 fprintf(f, " {\n"); 1146 fprintf(f, " {\n");
1422 for (k = 0; k < PREV_COEF_CONTEXTS; k++) { 1147 for (k = 0; k < PREV_COEF_CONTEXTS; k++) {
1423 fprintf(f, " {"); 1148 fprintf(f, " {");
1424 for (l = 0; l < ENTROPY_NODES; l++) { 1149 for (l = 0; l < ENTROPY_NODES; l++) {
1425 fprintf(f, "%3u, ", 1150 fprintf(f, "%3u, ",
1426 (unsigned int)(coef_probs [i][j][k][l])); 1151 (unsigned int)(coef_probs [i][j][k][l]));
1427 } 1152 }
1428 fprintf(f, " }\n"); 1153 fprintf(f, " }\n");
1429 } 1154 }
1430 fprintf(f, " }\n"); 1155 fprintf(f, " }\n");
1431 } 1156 }
1432 fprintf(f, " }\n"); 1157 fprintf(f, " }\n");
1433 } 1158 }
1434 fprintf(f, "}\n"); 1159 fprintf(f, "}\n");
1435 fclose(f); 1160 fclose(f);
1436 } 1161 }
1437 1162
1438 static void build_coeff_contexts(VP9_COMP *cpi) { 1163 static void build_tree_distribution(vp9_coeff_probs *coef_probs,
1164 vp9_coeff_count *coef_counts,
1165 #ifdef ENTROPY_STATS
1166 VP9_COMP *cpi,
1167 vp9_coeff_accum *context_counters,
1168 #endif
1169 vp9_coeff_stats *coef_branch_ct,
1170 int block_types) {
1439 int i = 0, j, k; 1171 int i = 0, j, k;
1440 #ifdef ENTROPY_STATS 1172 #ifdef ENTROPY_STATS
1441 int t = 0; 1173 int t = 0;
1442 #endif 1174 #endif
1443 for (i = 0; i < BLOCK_TYPES; ++i) { 1175
1176 for (i = 0; i < block_types; ++i) {
1444 for (j = 0; j < COEF_BANDS; ++j) { 1177 for (j = 0; j < COEF_BANDS; ++j) {
1445 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) { 1178 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) {
1446 if (k >= 3 && ((i == 0 && j == 1) || (i > 0 && j == 0))) 1179 if (k >= 3 && ((i == 0 && j == 1) || (i > 0 && j == 0)))
1447 continue; 1180 continue;
1448 vp9_tree_probs_from_distribution( 1181 vp9_tree_probs_from_distribution(MAX_ENTROPY_TOKENS,
1449 MAX_ENTROPY_TOKENS, vp9_coef_encodings, vp9_coef_tree, 1182 vp9_coef_encodings, vp9_coef_tree,
1450 cpi->frame_coef_probs [i][j][k], 1183 coef_probs[i][j][k],
1451 cpi->frame_branch_ct [i][j][k], 1184 coef_branch_ct[i][j][k],
1452 cpi->coef_counts [i][j][k], 1185 coef_counts[i][j][k]);
1453 256, 1
1454 );
1455 #ifdef ENTROPY_STATS 1186 #ifdef ENTROPY_STATS
1456 if (!cpi->dummy_packing) 1187 if (!cpi->dummy_packing)
1457 for (t = 0; t < MAX_ENTROPY_TOKENS; ++t) 1188 for (t = 0; t < MAX_ENTROPY_TOKENS; ++t)
1458 context_counters[i][j][k][t] += cpi->coef_counts[i][j][k][t]; 1189 context_counters[i][j][k][t] += coef_counts[i][j][k][t];
1459 #endif
1460 }
1461 }
1462 }
1463 for (i = 0; i < BLOCK_TYPES; ++i) {
1464 for (j = 0; j < COEF_BANDS; ++j) {
1465 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) {
1466 if (k >= 3 && ((i == 0 && j == 1) || (i > 0 && j == 0)))
1467 continue;
1468 vp9_tree_probs_from_distribution(
1469 MAX_ENTROPY_TOKENS, vp9_coef_encodings, vp9_coef_tree,
1470 cpi->frame_hybrid_coef_probs [i][j][k],
1471 cpi->frame_hybrid_branch_ct [i][j][k],
1472 cpi->hybrid_coef_counts [i][j][k],
1473 256, 1
1474 );
1475 #ifdef ENTROPY_STATS
1476 if (!cpi->dummy_packing)
1477 for (t = 0; t < MAX_ENTROPY_TOKENS; ++t)
1478 hybrid_context_counters[i][j][k][t] += cpi->hybrid_coef_counts[i][j] [k][t];
1479 #endif
1480 }
1481 }
1482 }
1483
1484 if (cpi->common.txfm_mode != ONLY_4X4) {
1485 for (i = 0; i < BLOCK_TYPES_8X8; ++i) {
1486 for (j = 0; j < COEF_BANDS; ++j) {
1487 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) {
1488 /* at every context */
1489 /* calc probs and branch cts for this frame only */
1490 // vp9_prob new_p [ENTROPY_NODES];
1491 // unsigned int branch_ct [ENTROPY_NODES] [2];
1492 if (k >= 3 && ((i == 0 && j == 1) || (i > 0 && j == 0)))
1493 continue;
1494 vp9_tree_probs_from_distribution(
1495 MAX_ENTROPY_TOKENS, vp9_coef_encodings, vp9_coef_tree,
1496 cpi->frame_coef_probs_8x8 [i][j][k],
1497 cpi->frame_branch_ct_8x8 [i][j][k],
1498 cpi->coef_counts_8x8 [i][j][k],
1499 256, 1
1500 );
1501 #ifdef ENTROPY_STATS
1502 if (!cpi->dummy_packing)
1503 for (t = 0; t < MAX_ENTROPY_TOKENS; ++t)
1504 context_counters_8x8[i][j][k][t] += cpi->coef_counts_8x8[i][j][k][ t];
1505 #endif
1506 }
1507 }
1508 }
1509 for (i = 0; i < BLOCK_TYPES_8X8; ++i) {
1510 for (j = 0; j < COEF_BANDS; ++j) {
1511 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) {
1512 /* at every context */
1513 /* calc probs and branch cts for this frame only */
1514 // vp9_prob new_p [ENTROPY_NODES];
1515 // unsigned int branch_ct [ENTROPY_NODES] [2];
1516 if (k >= 3 && ((i == 0 && j == 1) || (i > 0 && j == 0)))
1517 continue;
1518 vp9_tree_probs_from_distribution(
1519 MAX_ENTROPY_TOKENS, vp9_coef_encodings, vp9_coef_tree,
1520 cpi->frame_hybrid_coef_probs_8x8 [i][j][k],
1521 cpi->frame_hybrid_branch_ct_8x8 [i][j][k],
1522 cpi->hybrid_coef_counts_8x8 [i][j][k],
1523 256, 1
1524 );
1525 #ifdef ENTROPY_STATS
1526 if (!cpi->dummy_packing)
1527 for (t = 0; t < MAX_ENTROPY_TOKENS; ++t)
1528 hybrid_context_counters_8x8[i][j][k][t] += cpi->hybrid_coef_counts _8x8[i][j][k][t];
1529 #endif
1530 }
1531 }
1532 }
1533 }
1534
1535 if (cpi->common.txfm_mode > ALLOW_8X8) {
1536 for (i = 0; i < BLOCK_TYPES_16X16; ++i) {
1537 for (j = 0; j < COEF_BANDS; ++j) {
1538 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) {
1539 if (k >= 3 && ((i == 0 && j == 1) || (i > 0 && j == 0)))
1540 continue;
1541 vp9_tree_probs_from_distribution(
1542 MAX_ENTROPY_TOKENS, vp9_coef_encodings, vp9_coef_tree,
1543 cpi->frame_coef_probs_16x16[i][j][k],
1544 cpi->frame_branch_ct_16x16[i][j][k],
1545 cpi->coef_counts_16x16[i][j][k], 256, 1);
1546 #ifdef ENTROPY_STATS
1547 if (!cpi->dummy_packing)
1548 for (t = 0; t < MAX_ENTROPY_TOKENS; ++t)
1549 context_counters_16x16[i][j][k][t] += cpi->coef_counts_16x16[i][j] [k][t];
1550 #endif
1551 }
1552 }
1553 }
1554 }
1555 for (i = 0; i < BLOCK_TYPES_16X16; ++i) {
1556 for (j = 0; j < COEF_BANDS; ++j) {
1557 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) {
1558 if (k >= 3 && ((i == 0 && j == 1) || (i > 0 && j == 0)))
1559 continue;
1560 vp9_tree_probs_from_distribution(
1561 MAX_ENTROPY_TOKENS, vp9_coef_encodings, vp9_coef_tree,
1562 cpi->frame_hybrid_coef_probs_16x16[i][j][k],
1563 cpi->frame_hybrid_branch_ct_16x16[i][j][k],
1564 cpi->hybrid_coef_counts_16x16[i][j][k], 256, 1);
1565 #ifdef ENTROPY_STATS
1566 if (!cpi->dummy_packing)
1567 for (t = 0; t < MAX_ENTROPY_TOKENS; ++t)
1568 hybrid_context_counters_16x16[i][j][k][t] += cpi->hybrid_coef_counts _16x16[i][j][k][t];
1569 #endif 1190 #endif
1570 } 1191 }
1571 } 1192 }
1572 } 1193 }
1573 } 1194 }
1574 1195
1575 static void update_coef_probs_common( 1196 static void build_coeff_contexts(VP9_COMP *cpi) {
1576 vp9_writer* const bc, 1197 build_tree_distribution(cpi->frame_coef_probs_4x4,
1577 vp9_prob new_frame_coef_probs[BLOCK_TYPES][COEF_BANDS] 1198 cpi->coef_counts_4x4,
1578 [PREV_COEF_CONTEXTS][ENTROPY_NODES], 1199 #ifdef ENTROPY_STATS
1579 vp9_prob old_frame_coef_probs[BLOCK_TYPES][COEF_BANDS] 1200 cpi, context_counters_4x4,
1580 [PREV_COEF_CONTEXTS][ENTROPY_NODES], 1201 #endif
1581 unsigned int frame_branch_ct[BLOCK_TYPES][COEF_BANDS] 1202 cpi->frame_branch_ct_4x4, BLOCK_TYPES_4X4);
1582 [PREV_COEF_CONTEXTS][ENTROPY_NODES][2]) { 1203 build_tree_distribution(cpi->frame_hybrid_coef_probs_4x4,
1204 cpi->hybrid_coef_counts_4x4,
1205 #ifdef ENTROPY_STATS
1206 cpi, hybrid_context_counters_4x4,
1207 #endif
1208 cpi->frame_hybrid_branch_ct_4x4, BLOCK_TYPES_4X4);
1209 build_tree_distribution(cpi->frame_coef_probs_8x8,
1210 cpi->coef_counts_8x8,
1211 #ifdef ENTROPY_STATS
1212 cpi, context_counters_8x8,
1213 #endif
1214 cpi->frame_branch_ct_8x8, BLOCK_TYPES_8X8);
1215 build_tree_distribution(cpi->frame_hybrid_coef_probs_8x8,
1216 cpi->hybrid_coef_counts_8x8,
1217 #ifdef ENTROPY_STATS
1218 cpi, hybrid_context_counters_8x8,
1219 #endif
1220 cpi->frame_hybrid_branch_ct_8x8, BLOCK_TYPES_8X8);
1221 build_tree_distribution(cpi->frame_coef_probs_16x16,
1222 cpi->coef_counts_16x16,
1223 #ifdef ENTROPY_STATS
1224 cpi, context_counters_16x16,
1225 #endif
1226 cpi->frame_branch_ct_16x16, BLOCK_TYPES_16X16);
1227 build_tree_distribution(cpi->frame_hybrid_coef_probs_16x16,
1228 cpi->hybrid_coef_counts_16x16,
1229 #ifdef ENTROPY_STATS
1230 cpi, hybrid_context_counters_16x16,
1231 #endif
1232 cpi->frame_hybrid_branch_ct_16x16, BLOCK_TYPES_16X16);
1233 build_tree_distribution(cpi->frame_coef_probs_32x32,
1234 cpi->coef_counts_32x32,
1235 #ifdef ENTROPY_STATS
1236 cpi, context_counters_32x32,
1237 #endif
1238 cpi->frame_branch_ct_32x32, BLOCK_TYPES_32X32);
1239 }
1240
1241 static void update_coef_probs_common(vp9_writer* const bc,
1242 #ifdef ENTROPY_STATS
1243 VP9_COMP *cpi,
1244 vp9_coeff_stats *tree_update_hist,
1245 #endif
1246 vp9_coeff_probs *new_frame_coef_probs,
1247 vp9_coeff_probs *old_frame_coef_probs,
1248 vp9_coeff_stats *frame_branch_ct,
1249 int block_types) {
1583 int i, j, k, t; 1250 int i, j, k, t;
1584 int update[2] = {0, 0}; 1251 int update[2] = {0, 0};
1585 int savings; 1252 int savings;
1586 // vp9_prob bestupd = find_coef_update_prob(cpi); 1253 // vp9_prob bestupd = find_coef_update_prob(cpi);
1587 1254
1588 /* dry run to see if there is any udpate at all needed */ 1255 /* dry run to see if there is any udpate at all needed */
1589 savings = 0; 1256 savings = 0;
1590 for (i = 0; i < BLOCK_TYPES; ++i) { 1257 for (i = 0; i < block_types; ++i) {
1591 for (j = !i; j < COEF_BANDS; ++j) { 1258 for (j = !i; j < COEF_BANDS; ++j) {
1592 int prev_coef_savings[ENTROPY_NODES] = {0}; 1259 int prev_coef_savings[ENTROPY_NODES] = {0};
1593 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) { 1260 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) {
1594 for (t = 0; t < ENTROPY_NODES; ++t) { 1261 for (t = 0; t < ENTROPY_NODES; ++t) {
1595 vp9_prob newp = new_frame_coef_probs[i][j][k][t]; 1262 vp9_prob newp = new_frame_coef_probs[i][j][k][t];
1596 const vp9_prob oldp = old_frame_coef_probs[i][j][k][t]; 1263 const vp9_prob oldp = old_frame_coef_probs[i][j][k][t];
1597 const vp9_prob upd = COEF_UPDATE_PROB; 1264 const vp9_prob upd = COEF_UPDATE_PROB;
1598 int s = prev_coef_savings[t]; 1265 int s = prev_coef_savings[t];
1599 int u = 0; 1266 int u = 0;
1600 if (k >= 3 && ((i == 0 && j == 1) || (i > 0 && j == 0))) 1267 if (k >= 3 && ((i == 0 && j == 1) || (i > 0 && j == 0)))
(...skipping 23 matching lines...) Expand all
1624 } 1291 }
1625 } 1292 }
1626 } 1293 }
1627 1294
1628 // printf("Update %d %d, savings %d\n", update[0], update[1], savings); 1295 // printf("Update %d %d, savings %d\n", update[0], update[1], savings);
1629 /* Is coef updated at all */ 1296 /* Is coef updated at all */
1630 if (update[1] == 0 || savings < 0) { 1297 if (update[1] == 0 || savings < 0) {
1631 vp9_write_bit(bc, 0); 1298 vp9_write_bit(bc, 0);
1632 } else { 1299 } else {
1633 vp9_write_bit(bc, 1); 1300 vp9_write_bit(bc, 1);
1634 for (i = 0; i < BLOCK_TYPES; ++i) { 1301 for (i = 0; i < block_types; ++i) {
1635 for (j = !i; j < COEF_BANDS; ++j) { 1302 for (j = !i; j < COEF_BANDS; ++j) {
1636 int prev_coef_savings[ENTROPY_NODES] = {0}; 1303 int prev_coef_savings[ENTROPY_NODES] = {0};
1637 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) { 1304 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) {
1638 // calc probs and branch cts for this frame only 1305 // calc probs and branch cts for this frame only
1639 for (t = 0; t < ENTROPY_NODES; ++t) { 1306 for (t = 0; t < ENTROPY_NODES; ++t) {
1640 vp9_prob newp = new_frame_coef_probs[i][j][k][t]; 1307 vp9_prob newp = new_frame_coef_probs[i][j][k][t];
1641 vp9_prob *oldp = old_frame_coef_probs[i][j][k] + t; 1308 vp9_prob *oldp = old_frame_coef_probs[i][j][k] + t;
1642 const vp9_prob upd = COEF_UPDATE_PROB; 1309 const vp9_prob upd = COEF_UPDATE_PROB;
1643 int s = prev_coef_savings[t]; 1310 int s = prev_coef_savings[t];
1644 int u = 0; 1311 int u = 0;
1645 if (k >= 3 && ((i == 0 && j == 1) || (i > 0 && j == 0))) 1312 if (k >= 3 && ((i == 0 && j == 1) || (i > 0 && j == 0)))
1646 continue; 1313 continue;
1647 1314
1648 #if defined(SEARCH_NEWP) 1315 #if defined(SEARCH_NEWP)
1649 s = prob_diff_update_savings_search( 1316 s = prob_diff_update_savings_search(
1650 frame_branch_ct[i][j][k][t], 1317 frame_branch_ct[i][j][k][t],
1651 *oldp, &newp, upd); 1318 *oldp, &newp, upd);
1652 if (s > 0 && newp != *oldp) 1319 if (s > 0 && newp != *oldp)
1653 u = 1; 1320 u = 1;
1654 #else 1321 #else
1655 s = prob_update_savings( 1322 s = prob_update_savings(
1656 frame_branch_ct[i][j][k][t], 1323 frame_branch_ct[i][j][k][t],
1657 *oldp, newp, upd); 1324 *oldp, newp, upd);
1658 if (s > 0) 1325 if (s > 0)
1659 u = 1; 1326 u = 1;
1660 #endif 1327 #endif
1661 vp9_write(bc, u, upd); 1328 vp9_write(bc, u, upd);
1662 #ifdef ENTROPY_STATS 1329 #ifdef ENTROPY_STATS
1663 if (!cpi->dummy_packing) 1330 if (!cpi->dummy_packing)
1664 ++ tree_update_hist [i][j][k][t] [u]; 1331 ++tree_update_hist[i][j][k][t][u];
1665 #endif 1332 #endif
1666 if (u) { 1333 if (u) {
1667 /* send/use new probability */ 1334 /* send/use new probability */
1668 write_prob_diff_update(bc, newp, *oldp); 1335 write_prob_diff_update(bc, newp, *oldp);
1669 *oldp = newp; 1336 *oldp = newp;
1670 } 1337 }
1671 } 1338 }
1672 } 1339 }
1673 } 1340 }
1674 } 1341 }
1675 } 1342 }
1676 } 1343 }
1677 1344
1678 static void update_coef_probs(VP9_COMP* const cpi, vp9_writer* const bc) { 1345 static void update_coef_probs(VP9_COMP* const cpi, vp9_writer* const bc) {
1679 vp9_clear_system_state(); 1346 vp9_clear_system_state();
1680 1347
1681 // Build the cofficient contexts based on counts collected in encode loop 1348 // Build the cofficient contexts based on counts collected in encode loop
1682 build_coeff_contexts(cpi); 1349 build_coeff_contexts(cpi);
1683 1350
1684 update_coef_probs_common(bc, 1351 update_coef_probs_common(bc,
1685 cpi->frame_coef_probs, 1352 #ifdef ENTROPY_STATS
1686 cpi->common.fc.coef_probs, 1353 cpi,
1687 cpi->frame_branch_ct); 1354 tree_update_hist_4x4,
1355 #endif
1356 cpi->frame_coef_probs_4x4,
1357 cpi->common.fc.coef_probs_4x4,
1358 cpi->frame_branch_ct_4x4,
1359 BLOCK_TYPES_4X4);
1688 1360
1689 update_coef_probs_common(bc, 1361 update_coef_probs_common(bc,
1690 cpi->frame_hybrid_coef_probs, 1362 #ifdef ENTROPY_STATS
1691 cpi->common.fc.hybrid_coef_probs, 1363 cpi,
1692 cpi->frame_hybrid_branch_ct); 1364 hybrid_tree_update_hist_4x4,
1365 #endif
1366 cpi->frame_hybrid_coef_probs_4x4,
1367 cpi->common.fc.hybrid_coef_probs_4x4,
1368 cpi->frame_hybrid_branch_ct_4x4,
1369 BLOCK_TYPES_4X4);
1693 1370
1694 /* do not do this if not even allowed */ 1371 /* do not do this if not even allowed */
1695 if (cpi->common.txfm_mode != ONLY_4X4) { 1372 if (cpi->common.txfm_mode != ONLY_4X4) {
1696 update_coef_probs_common(bc, 1373 update_coef_probs_common(bc,
1374 #ifdef ENTROPY_STATS
1375 cpi,
1376 tree_update_hist_8x8,
1377 #endif
1697 cpi->frame_coef_probs_8x8, 1378 cpi->frame_coef_probs_8x8,
1698 cpi->common.fc.coef_probs_8x8, 1379 cpi->common.fc.coef_probs_8x8,
1699 cpi->frame_branch_ct_8x8); 1380 cpi->frame_branch_ct_8x8,
1381 BLOCK_TYPES_8X8);
1700 1382
1701 update_coef_probs_common(bc, 1383 update_coef_probs_common(bc,
1384 #ifdef ENTROPY_STATS
1385 cpi,
1386 hybrid_tree_update_hist_8x8,
1387 #endif
1702 cpi->frame_hybrid_coef_probs_8x8, 1388 cpi->frame_hybrid_coef_probs_8x8,
1703 cpi->common.fc.hybrid_coef_probs_8x8, 1389 cpi->common.fc.hybrid_coef_probs_8x8,
1704 cpi->frame_hybrid_branch_ct_8x8); 1390 cpi->frame_hybrid_branch_ct_8x8,
1391 BLOCK_TYPES_8X8);
1705 } 1392 }
1706 1393
1707 if (cpi->common.txfm_mode > ALLOW_8X8) { 1394 if (cpi->common.txfm_mode > ALLOW_8X8) {
1708 update_coef_probs_common(bc, 1395 update_coef_probs_common(bc,
1396 #ifdef ENTROPY_STATS
1397 cpi,
1398 tree_update_hist_16x16,
1399 #endif
1709 cpi->frame_coef_probs_16x16, 1400 cpi->frame_coef_probs_16x16,
1710 cpi->common.fc.coef_probs_16x16, 1401 cpi->common.fc.coef_probs_16x16,
1711 cpi->frame_branch_ct_16x16); 1402 cpi->frame_branch_ct_16x16,
1403 BLOCK_TYPES_16X16);
1712 update_coef_probs_common(bc, 1404 update_coef_probs_common(bc,
1405 #ifdef ENTROPY_STATS
1406 cpi,
1407 hybrid_tree_update_hist_16x16,
1408 #endif
1713 cpi->frame_hybrid_coef_probs_16x16, 1409 cpi->frame_hybrid_coef_probs_16x16,
1714 cpi->common.fc.hybrid_coef_probs_16x16, 1410 cpi->common.fc.hybrid_coef_probs_16x16,
1715 cpi->frame_hybrid_branch_ct_16x16); 1411 cpi->frame_hybrid_branch_ct_16x16,
1412 BLOCK_TYPES_16X16);
1413 }
1414
1415 if (cpi->common.txfm_mode > ALLOW_16X16) {
1416 update_coef_probs_common(bc,
1417 #ifdef ENTROPY_STATS
1418 cpi,
1419 tree_update_hist_32x32,
1420 #endif
1421 cpi->frame_coef_probs_32x32,
1422 cpi->common.fc.coef_probs_32x32,
1423 cpi->frame_branch_ct_32x32,
1424 BLOCK_TYPES_32X32);
1716 } 1425 }
1717 } 1426 }
1718 1427
1719 #ifdef PACKET_TESTING 1428 #ifdef PACKET_TESTING
1720 FILE *vpxlogc = 0; 1429 FILE *vpxlogc = 0;
1721 #endif 1430 #endif
1722 1431
1723 static void put_delta_q(vp9_writer *bc, int delta_q) { 1432 static void put_delta_q(vp9_writer *bc, int delta_q) {
1724 if (delta_q != 0) { 1433 if (delta_q != 0) {
1725 vp9_write_bit(bc, 1); 1434 vp9_write_bit(bc, 1);
(...skipping 14 matching lines...) Expand all
1740 int bestcost = INT_MAX; 1449 int bestcost = INT_MAX;
1741 int bestindex = 0; 1450 int bestindex = 0;
1742 int i, j; 1451 int i, j;
1743 1452
1744 for (i = 0; i < 8; i++) { 1453 for (i = 0; i < 8; i++) {
1745 vp9_cost_tokens(mode_cost, cpi->common.kf_ymode_prob[i], vp9_kf_ymode_tree); 1454 vp9_cost_tokens(mode_cost, cpi->common.kf_ymode_prob[i], vp9_kf_ymode_tree);
1746 cost = 0; 1455 cost = 0;
1747 for (j = 0; j < VP9_YMODES; j++) { 1456 for (j = 0; j < VP9_YMODES; j++) {
1748 cost += mode_cost[j] * cpi->ymode_count[j]; 1457 cost += mode_cost[j] * cpi->ymode_count[j];
1749 } 1458 }
1750 #if CONFIG_SUPERBLOCKS
1751 vp9_cost_tokens(mode_cost, cpi->common.sb_kf_ymode_prob[i], 1459 vp9_cost_tokens(mode_cost, cpi->common.sb_kf_ymode_prob[i],
1752 vp9_sb_ymode_tree); 1460 vp9_sb_ymode_tree);
1753 for (j = 0; j < VP9_I32X32_MODES; j++) { 1461 for (j = 0; j < VP9_I32X32_MODES; j++) {
1754 cost += mode_cost[j] * cpi->sb_ymode_count[j]; 1462 cost += mode_cost[j] * cpi->sb_ymode_count[j];
1755 } 1463 }
1756 #endif
1757 if (cost < bestcost) { 1464 if (cost < bestcost) {
1758 bestindex = i; 1465 bestindex = i;
1759 bestcost = cost; 1466 bestcost = cost;
1760 } 1467 }
1761 } 1468 }
1762 cpi->common.kf_ymode_probs_index = bestindex; 1469 cpi->common.kf_ymode_probs_index = bestindex;
1763 1470
1764 } 1471 }
1765 static void segment_reference_frames(VP9_COMP *cpi) { 1472 static void segment_reference_frames(VP9_COMP *cpi) {
1766 VP9_COMMON *oci = &cpi->common; 1473 VP9_COMMON *oci = &cpi->common;
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after
1936 for (i = 0; i < PREDICTION_PROBS; i++) { 1643 for (i = 0; i < PREDICTION_PROBS; i++) {
1937 if (cpi->ref_pred_probs_update[i]) { 1644 if (cpi->ref_pred_probs_update[i]) {
1938 vp9_write_bit(&header_bc, 1); 1645 vp9_write_bit(&header_bc, 1);
1939 vp9_write_literal(&header_bc, pc->ref_pred_probs[i], 8); 1646 vp9_write_literal(&header_bc, pc->ref_pred_probs[i], 8);
1940 } else { 1647 } else {
1941 vp9_write_bit(&header_bc, 0); 1648 vp9_write_bit(&header_bc, 0);
1942 } 1649 }
1943 } 1650 }
1944 } 1651 }
1945 1652
1946 #if CONFIG_SUPERBLOCKS 1653 pc->sb64_coded = get_binary_prob(cpi->sb64_count[0], cpi->sb64_count[1]);
1947 { 1654 vp9_write_literal(&header_bc, pc->sb64_coded, 8);
1948 /* sb mode probability */ 1655 pc->sb32_coded = get_binary_prob(cpi->sb32_count[0], cpi->sb32_count[1]);
1949 const int sb_max = (((pc->mb_rows + 1) >> 1) * ((pc->mb_cols + 1) >> 1)); 1656 vp9_write_literal(&header_bc, pc->sb32_coded, 8);
1950
1951 pc->sb_coded = get_prob(sb_max - cpi->sb_count, sb_max);
1952 vp9_write_literal(&header_bc, pc->sb_coded, 8);
1953 }
1954 #endif
1955 1657
1956 { 1658 {
1957 if (pc->txfm_mode == TX_MODE_SELECT) { 1659 if (pc->txfm_mode == TX_MODE_SELECT) {
1958 pc->prob_tx[0] = get_prob(cpi->txfm_count[0] + cpi->txfm_count_8x8p[0], 1660 pc->prob_tx[0] = get_prob(cpi->txfm_count_32x32p[TX_4X4] +
1959 cpi->txfm_count[0] + cpi->txfm_count[1] + cpi->t xfm_count[2] + 1661 cpi->txfm_count_16x16p[TX_4X4] +
1960 cpi->txfm_count_8x8p[0] + cpi->txfm_count_8x8p[1 ]); 1662 cpi->txfm_count_8x8p[TX_4X4],
1961 pc->prob_tx[1] = get_prob(cpi->txfm_count[1], cpi->txfm_count[1] + cpi->tx fm_count[2]); 1663 cpi->txfm_count_32x32p[TX_4X4] +
1664 cpi->txfm_count_32x32p[TX_8X8] +
1665 cpi->txfm_count_32x32p[TX_16X16] +
1666 cpi->txfm_count_32x32p[TX_32X32] +
1667 cpi->txfm_count_16x16p[TX_4X4] +
1668 cpi->txfm_count_16x16p[TX_8X8] +
1669 cpi->txfm_count_16x16p[TX_16X16] +
1670 cpi->txfm_count_8x8p[TX_4X4] +
1671 cpi->txfm_count_8x8p[TX_8X8]);
1672 pc->prob_tx[1] = get_prob(cpi->txfm_count_32x32p[TX_8X8] +
1673 cpi->txfm_count_16x16p[TX_8X8],
1674 cpi->txfm_count_32x32p[TX_8X8] +
1675 cpi->txfm_count_32x32p[TX_16X16] +
1676 cpi->txfm_count_32x32p[TX_32X32] +
1677 cpi->txfm_count_16x16p[TX_8X8] +
1678 cpi->txfm_count_16x16p[TX_16X16]);
1679 pc->prob_tx[2] = get_prob(cpi->txfm_count_32x32p[TX_16X16],
1680 cpi->txfm_count_32x32p[TX_16X16] +
1681 cpi->txfm_count_32x32p[TX_32X32]);
1962 } else { 1682 } else {
1963 pc->prob_tx[0] = 128; 1683 pc->prob_tx[0] = 128;
1964 pc->prob_tx[1] = 128; 1684 pc->prob_tx[1] = 128;
1685 pc->prob_tx[2] = 128;
1965 } 1686 }
1966 vp9_write_literal(&header_bc, pc->txfm_mode, 2); 1687 vp9_write_literal(&header_bc, pc->txfm_mode <= 3 ? pc->txfm_mode : 3, 2);
1688 if (pc->txfm_mode > ALLOW_16X16) {
1689 vp9_write_bit(&header_bc, pc->txfm_mode == TX_MODE_SELECT);
1690 }
1967 if (pc->txfm_mode == TX_MODE_SELECT) { 1691 if (pc->txfm_mode == TX_MODE_SELECT) {
1968 vp9_write_literal(&header_bc, pc->prob_tx[0], 8); 1692 vp9_write_literal(&header_bc, pc->prob_tx[0], 8);
1969 vp9_write_literal(&header_bc, pc->prob_tx[1], 8); 1693 vp9_write_literal(&header_bc, pc->prob_tx[1], 8);
1694 vp9_write_literal(&header_bc, pc->prob_tx[2], 8);
1970 } 1695 }
1971 } 1696 }
1972 1697
1973 // Encode the loop filter level and type 1698 // Encode the loop filter level and type
1974 vp9_write_bit(&header_bc, pc->filter_type); 1699 vp9_write_bit(&header_bc, pc->filter_type);
1975 vp9_write_literal(&header_bc, pc->filter_level, 6); 1700 vp9_write_literal(&header_bc, pc->filter_level, 6);
1976 vp9_write_literal(&header_bc, pc->sharpness_level, 3); 1701 vp9_write_literal(&header_bc, pc->sharpness_level, 3);
1977 1702
1978 // Write out loop filter deltas applied at the MB level based on mode or ref f rame (if they are enabled). 1703 // Write out loop filter deltas applied at the MB level based on mode or ref f rame (if they are enabled).
1979 vp9_write_bit(&header_bc, (xd->mode_ref_lf_delta_enabled) ? 1 : 0); 1704 vp9_write_bit(&header_bc, (xd->mode_ref_lf_delta_enabled) ? 1 : 0);
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after
2113 1838
2114 #ifdef ENTROPY_STATS 1839 #ifdef ENTROPY_STATS
2115 if (pc->frame_type == INTER_FRAME) 1840 if (pc->frame_type == INTER_FRAME)
2116 active_section = 0; 1841 active_section = 0;
2117 else 1842 else
2118 active_section = 7; 1843 active_section = 7;
2119 #endif 1844 #endif
2120 1845
2121 // If appropriate update the inter mode probability context and code the 1846 // If appropriate update the inter mode probability context and code the
2122 // changes in the bitstream. 1847 // changes in the bitstream.
2123 if ((pc->frame_type != KEY_FRAME)) { 1848 if (pc->frame_type != KEY_FRAME) {
2124 int i, j; 1849 int i, j;
2125 int new_context[INTER_MODE_CONTEXTS][4]; 1850 int new_context[INTER_MODE_CONTEXTS][4];
2126 update_mode_probs(pc, new_context); 1851 update_mode_probs(pc, new_context);
2127 1852
2128 for (i = 0; i < INTER_MODE_CONTEXTS; i++) { 1853 for (i = 0; i < INTER_MODE_CONTEXTS; i++) {
2129 for (j = 0; j < 4; j++) { 1854 for (j = 0; j < 4; j++) {
2130 if (new_context[i][j] != pc->fc.vp9_mode_contexts[i][j]) { 1855 if (new_context[i][j] != pc->fc.vp9_mode_contexts[i][j]) {
2131 vp9_write(&header_bc, 1, 252); 1856 vp9_write(&header_bc, 1, 252);
2132 vp9_write_literal(&header_bc, new_context[i][j], 8); 1857 vp9_write_literal(&header_bc, new_context[i][j], 8);
2133 1858
2134 // Only update the persistent copy if this is the "real pack" 1859 // Only update the persistent copy if this is the "real pack"
2135 if (!cpi->dummy_packing) { 1860 if (!cpi->dummy_packing) {
2136 pc->fc.vp9_mode_contexts[i][j] = new_context[i][j]; 1861 pc->fc.vp9_mode_contexts[i][j] = new_context[i][j];
2137 } 1862 }
2138 } else { 1863 } else {
2139 vp9_write(&header_bc, 0, 252); 1864 vp9_write(&header_bc, 0, 252);
2140 } 1865 }
2141 } 1866 }
2142 } 1867 }
2143 } 1868 }
2144 1869
1870 #if CONFIG_NEW_MVREF
1871 if ((pc->frame_type != KEY_FRAME)) {
1872 int new_mvref_probs[MAX_REF_FRAMES][MAX_MV_REF_CANDIDATES-1];
1873 int i, j;
1874
1875 update_mv_ref_probs(cpi, new_mvref_probs);
1876
1877 for (i = 0; i < MAX_REF_FRAMES; ++i) {
1878 // Skip the dummy entry for intra ref frame.
1879 if (i == INTRA_FRAME) {
1880 continue;
1881 }
1882
1883 // Encode any mandated updates to probabilities
1884 for (j = 0; j < MAX_MV_REF_CANDIDATES - 1; ++j) {
1885 if (new_mvref_probs[i][j] != xd->mb_mv_ref_probs[i][j]) {
1886 vp9_write(&header_bc, 1, VP9_MVREF_UPDATE_PROB);
1887 vp9_write_literal(&header_bc, new_mvref_probs[i][j], 8);
1888
1889 // Only update the persistent copy if this is the "real pack"
1890 if (!cpi->dummy_packing) {
1891 xd->mb_mv_ref_probs[i][j] = new_mvref_probs[i][j];
1892 }
1893 } else {
1894 vp9_write(&header_bc, 0, VP9_MVREF_UPDATE_PROB);
1895 }
1896 }
1897 }
1898 }
1899 #endif
1900
2145 vp9_clear_system_state(); // __asm emms; 1901 vp9_clear_system_state(); // __asm emms;
2146 1902
2147 vp9_copy(cpi->common.fc.pre_coef_probs, cpi->common.fc.coef_probs); 1903 vp9_copy(cpi->common.fc.pre_coef_probs_4x4,
2148 vp9_copy(cpi->common.fc.pre_hybrid_coef_probs, cpi->common.fc.hybrid_coef_prob s); 1904 cpi->common.fc.coef_probs_4x4);
2149 vp9_copy(cpi->common.fc.pre_coef_probs_8x8, cpi->common.fc.coef_probs_8x8); 1905 vp9_copy(cpi->common.fc.pre_hybrid_coef_probs_4x4,
2150 vp9_copy(cpi->common.fc.pre_hybrid_coef_probs_8x8, cpi->common.fc.hybrid_coef_ probs_8x8); 1906 cpi->common.fc.hybrid_coef_probs_4x4);
2151 vp9_copy(cpi->common.fc.pre_coef_probs_16x16, cpi->common.fc.coef_probs_16x16) ; 1907 vp9_copy(cpi->common.fc.pre_coef_probs_8x8,
2152 vp9_copy(cpi->common.fc.pre_hybrid_coef_probs_16x16, cpi->common.fc.hybrid_coe f_probs_16x16); 1908 cpi->common.fc.coef_probs_8x8);
2153 #if CONFIG_SUPERBLOCKS 1909 vp9_copy(cpi->common.fc.pre_hybrid_coef_probs_8x8,
1910 cpi->common.fc.hybrid_coef_probs_8x8);
1911 vp9_copy(cpi->common.fc.pre_coef_probs_16x16,
1912 cpi->common.fc.coef_probs_16x16);
1913 vp9_copy(cpi->common.fc.pre_hybrid_coef_probs_16x16,
1914 cpi->common.fc.hybrid_coef_probs_16x16);
1915 vp9_copy(cpi->common.fc.pre_coef_probs_32x32,
1916 cpi->common.fc.coef_probs_32x32);
2154 vp9_copy(cpi->common.fc.pre_sb_ymode_prob, cpi->common.fc.sb_ymode_prob); 1917 vp9_copy(cpi->common.fc.pre_sb_ymode_prob, cpi->common.fc.sb_ymode_prob);
2155 #endif
2156 vp9_copy(cpi->common.fc.pre_ymode_prob, cpi->common.fc.ymode_prob); 1918 vp9_copy(cpi->common.fc.pre_ymode_prob, cpi->common.fc.ymode_prob);
2157 vp9_copy(cpi->common.fc.pre_uv_mode_prob, cpi->common.fc.uv_mode_prob); 1919 vp9_copy(cpi->common.fc.pre_uv_mode_prob, cpi->common.fc.uv_mode_prob);
2158 vp9_copy(cpi->common.fc.pre_bmode_prob, cpi->common.fc.bmode_prob); 1920 vp9_copy(cpi->common.fc.pre_bmode_prob, cpi->common.fc.bmode_prob);
2159 vp9_copy(cpi->common.fc.pre_sub_mv_ref_prob, cpi->common.fc.sub_mv_ref_prob); 1921 vp9_copy(cpi->common.fc.pre_sub_mv_ref_prob, cpi->common.fc.sub_mv_ref_prob);
2160 vp9_copy(cpi->common.fc.pre_mbsplit_prob, cpi->common.fc.mbsplit_prob); 1922 vp9_copy(cpi->common.fc.pre_mbsplit_prob, cpi->common.fc.mbsplit_prob);
2161 vp9_copy(cpi->common.fc.pre_i8x8_mode_prob, cpi->common.fc.i8x8_mode_prob); 1923 vp9_copy(cpi->common.fc.pre_i8x8_mode_prob, cpi->common.fc.i8x8_mode_prob);
2162 cpi->common.fc.pre_nmvc = cpi->common.fc.nmvc; 1924 cpi->common.fc.pre_nmvc = cpi->common.fc.nmvc;
2163 #if CONFIG_COMP_INTERINTRA_PRED 1925 #if CONFIG_COMP_INTERINTRA_PRED
2164 cpi->common.fc.pre_interintra_prob = cpi->common.fc.interintra_prob; 1926 cpi->common.fc.pre_interintra_prob = cpi->common.fc.interintra_prob;
2165 #endif 1927 #endif
(...skipping 22 matching lines...) Expand all
2188 vp9_write_literal(&header_bc, pc->kf_ymode_probs_index, 3); 1950 vp9_write_literal(&header_bc, pc->kf_ymode_probs_index, 3);
2189 } 1951 }
2190 } else { 1952 } else {
2191 // Update the probabilities used to encode reference frame data 1953 // Update the probabilities used to encode reference frame data
2192 update_ref_probs(cpi); 1954 update_ref_probs(cpi);
2193 1955
2194 #ifdef ENTROPY_STATS 1956 #ifdef ENTROPY_STATS
2195 active_section = 1; 1957 active_section = 1;
2196 #endif 1958 #endif
2197 1959
2198 #if CONFIG_PRED_FILTER
2199 // Write the prediction filter mode used for this frame
2200 vp9_write_literal(&header_bc, pc->pred_filter_mode, 2);
2201
2202 // Write prediction filter on/off probability if signaling at MB level
2203 if (pc->pred_filter_mode == 2)
2204 vp9_write_literal(&header_bc, pc->prob_pred_filter_off, 8);
2205
2206 #endif
2207 if (pc->mcomp_filter_type == SWITCHABLE) 1960 if (pc->mcomp_filter_type == SWITCHABLE)
2208 update_switchable_interp_probs(cpi, &header_bc); 1961 update_switchable_interp_probs(cpi, &header_bc);
2209 1962
2210 #if CONFIG_COMP_INTERINTRA_PRED 1963 #if CONFIG_COMP_INTERINTRA_PRED
2211 if (pc->use_interintra) { 1964 if (pc->use_interintra) {
2212 vp9_cond_prob_update(&header_bc, 1965 vp9_cond_prob_update(&header_bc,
2213 &pc->fc.interintra_prob, 1966 &pc->fc.interintra_prob,
2214 VP9_UPD_INTERINTRA_PROB, 1967 VP9_UPD_INTERINTRA_PROB,
2215 cpi->interintra_count); 1968 cpi->interintra_count);
2216 } 1969 }
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
2256 dest[0] = v; 2009 dest[0] = v;
2257 dest[1] = v >> 8; 2010 dest[1] = v >> 8;
2258 dest[2] = v >> 16; 2011 dest[2] = v >> 16;
2259 } 2012 }
2260 2013
2261 *size = VP9_HEADER_SIZE + extra_bytes_packed + header_bc.pos; 2014 *size = VP9_HEADER_SIZE + extra_bytes_packed + header_bc.pos;
2262 vp9_start_encode(&residual_bc, cx_data + header_bc.pos); 2015 vp9_start_encode(&residual_bc, cx_data + header_bc.pos);
2263 2016
2264 if (pc->frame_type == KEY_FRAME) { 2017 if (pc->frame_type == KEY_FRAME) {
2265 decide_kf_ymode_entropy(cpi); 2018 decide_kf_ymode_entropy(cpi);
2266 write_kfmodes(cpi, &residual_bc); 2019 write_modes(cpi, &residual_bc);
2267 } else { 2020 } else {
2268 /* This is not required if the counts in cpi are consistent with the 2021 /* This is not required if the counts in cpi are consistent with the
2269 * final packing pass */ 2022 * final packing pass */
2270 // if (!cpi->dummy_packing) vp9_zero(cpi->NMVcount); 2023 // if (!cpi->dummy_packing) vp9_zero(cpi->NMVcount);
2271 pack_inter_mode_mvs(cpi, &residual_bc); 2024 write_modes(cpi, &residual_bc);
2272 2025
2273 vp9_update_mode_context(&cpi->common); 2026 vp9_update_mode_context(&cpi->common);
2274 } 2027 }
2275 2028
2276 vp9_stop_encode(&residual_bc); 2029 vp9_stop_encode(&residual_bc);
2277 2030
2278 *size += residual_bc.pos; 2031 *size += residual_bc.pos;
2279 } 2032 }
2280 2033
2281 #ifdef ENTROPY_STATS 2034 #ifdef ENTROPY_STATS
2282 void print_tree_update_probs() { 2035 static void print_tree_update_for_type(FILE *f,
2036 vp9_coeff_stats *tree_update_hist,
2037 int block_types, const char *header) {
2283 int i, j, k, l; 2038 int i, j, k, l;
2284 FILE *f = fopen("coefupdprob.h", "w");
2285 int Sum;
2286 fprintf(f, "\n/* Update probabilities for token entropy tree. */\n\n");
2287 2039
2288 fprintf(f, "const vp9_prob\n" 2040 fprintf(f, "const vp9_coeff_prob %s = {\n", header);
2289 "vp9_coef_update_probs[BLOCK_TYPES]\n" 2041 for (i = 0; i < block_types; i++) {
2290 " [COEF_BANDS]\n"
2291 " [PREV_COEF_CONTEXTS]\n"
2292 " [ENTROPY_NODES] = {\n");
2293 for (i = 0; i < BLOCK_TYPES; i++) {
2294 fprintf(f, " { \n"); 2042 fprintf(f, " { \n");
2295 for (j = 0; j < COEF_BANDS; j++) { 2043 for (j = 0; j < COEF_BANDS; j++) {
2296 fprintf(f, " {\n"); 2044 fprintf(f, " {\n");
2297 for (k = 0; k < PREV_COEF_CONTEXTS; k++) { 2045 for (k = 0; k < PREV_COEF_CONTEXTS; k++) {
2298 fprintf(f, " {"); 2046 fprintf(f, " {");
2299 for (l = 0; l < ENTROPY_NODES; l++) { 2047 for (l = 0; l < ENTROPY_NODES; l++) {
2300 fprintf(f, "%3ld, ", 2048 fprintf(f, "%3d, ",
2301 get_binary_prob(tree_update_hist[i][j][k][l][0], 2049 get_binary_prob(tree_update_hist[i][j][k][l][0],
2302 tree_update_hist[i][j][k][l][1])); 2050 tree_update_hist[i][j][k][l][1]));
2303 } 2051 }
2304 fprintf(f, "},\n"); 2052 fprintf(f, "},\n");
2305 } 2053 }
2306 fprintf(f, " },\n"); 2054 fprintf(f, " },\n");
2307 } 2055 }
2308 fprintf(f, " },\n"); 2056 fprintf(f, " },\n");
2309 } 2057 }
2310 fprintf(f, "};\n"); 2058 fprintf(f, "};\n");
2059 }
2311 2060
2312 fprintf(f, "const vp9_prob\n" 2061 void print_tree_update_probs() {
2313 "vp9_coef_update_probs_8x8[BLOCK_TYPES_8X8]\n" 2062 FILE *f = fopen("coefupdprob.h", "w");
2314 " [COEF_BANDS]\n" 2063 fprintf(f, "\n/* Update probabilities for token entropy tree. */\n\n");
2315 " [PREV_COEF_CONTEXTS]\n"
2316 " [ENTROPY_NODES] = {\n");
2317 for (i = 0; i < BLOCK_TYPES_8X8; i++) {
2318 fprintf(f, " { \n");
2319 for (j = 0; j < COEF_BANDS; j++) {
2320 fprintf(f, " {\n");
2321 for (k = 0; k < PREV_COEF_CONTEXTS; k++) {
2322 fprintf(f, " {");
2323 for (l = 0; l < MAX_ENTROPY_TOKENS - 1; l++) {
2324 fprintf(f, "%3ld, ",
2325 get_binary_prob(tree_update_hist_8x8[i][j][k][l][0],
2326 tree_update_hist_8x8[i][j][k][l][1]));
2327 }
2328 fprintf(f, "},\n");
2329 }
2330 fprintf(f, " },\n");
2331 }
2332 fprintf(f, " },\n");
2333 }
2334 2064
2335 fprintf(f, "const vp9_prob\n" 2065 print_tree_update_for_type(f, tree_update_hist_4x4, BLOCK_TYPES_4X4,
2336 "vp9_coef_update_probs_16x16[BLOCK_TYPES_16X16]\n" 2066 "vp9_coef_update_probs_4x4[BLOCK_TYPES_4X4]");
2337 " [COEF_BANDS]\n" 2067 print_tree_update_for_type(f, hybrid_tree_update_hist_4x4, BLOCK_TYPES_4X4,
2338 " [PREV_COEF_CONTEXTS]\n" 2068 "vp9_coef_update_probs_4x4[BLOCK_TYPES_4X4]");
2339 " [ENTROPY_NODES] = {\n"); 2069 print_tree_update_for_type(f, tree_update_hist_8x8, BLOCK_TYPES_8X8,
2340 for (i = 0; i < BLOCK_TYPES_16X16; i++) { 2070 "vp9_coef_update_probs_8x8[BLOCK_TYPES_8X8]");
2341 fprintf(f, " { \n"); 2071 print_tree_update_for_type(f, hybrid_tree_update_hist_8x8, BLOCK_TYPES_8X8,
2342 for (j = 0; j < COEF_BANDS; j++) { 2072 "vp9_coef_update_probs_8x8[BLOCK_TYPES_8X8]");
2343 fprintf(f, " {\n"); 2073 print_tree_update_for_type(f, tree_update_hist_16x16, BLOCK_TYPES_16X16,
2344 for (k = 0; k < PREV_COEF_CONTEXTS; k++) { 2074 "vp9_coef_update_probs_16x16[BLOCK_TYPES_16X16]");
2345 fprintf(f, " {"); 2075 print_tree_update_for_type(f, hybrid_tree_update_hist_16x16,
2346 for (l = 0; l < MAX_ENTROPY_TOKENS - 1; l++) { 2076 BLOCK_TYPES_16X16,
2347 fprintf(f, "%3ld, ", 2077 "vp9_coef_update_probs_16x16[BLOCK_TYPES_16X16]");
2348 get_binary_prob(tree_update_hist_16x16[i][j][k][l][0], 2078 print_tree_update_for_type(f, tree_update_hist_32x32, BLOCK_TYPES_32X32,
2349 tree_update_hist_16x16[i][j][k][l][1])); 2079 "vp9_coef_update_probs_32x32[BLOCK_TYPES_32X32]");
2350 }
2351 fprintf(f, "},\n");
2352 }
2353 fprintf(f, " },\n");
2354 }
2355 fprintf(f, " },\n");
2356 }
2357 2080
2358 fclose(f); 2081 fclose(f);
2359 f = fopen("treeupdate.bin", "wb"); 2082 f = fopen("treeupdate.bin", "wb");
2360 fwrite(tree_update_hist, sizeof(tree_update_hist), 1, f); 2083 fwrite(tree_update_hist_4x4, sizeof(tree_update_hist_4x4), 1, f);
2361 fwrite(tree_update_hist_8x8, sizeof(tree_update_hist_8x8), 1, f); 2084 fwrite(tree_update_hist_8x8, sizeof(tree_update_hist_8x8), 1, f);
2362 fwrite(tree_update_hist_16x16, sizeof(tree_update_hist_16x16), 1, f); 2085 fwrite(tree_update_hist_16x16, sizeof(tree_update_hist_16x16), 1, f);
2363 fclose(f); 2086 fclose(f);
2364 } 2087 }
2365 #endif 2088 #endif
OLDNEW
« no previous file with comments | « source/libvpx/vp9/encoder/vp9_bitstream.h ('k') | source/libvpx/vp9/encoder/vp9_block.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698