OLD | NEW |
(Empty) | |
| 1 /* |
| 2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved. |
| 3 * |
| 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ |
| 10 |
| 11 |
| 12 #include "vp9/decoder/vp9_onyxd_int.h" |
| 13 #include "vp9/common/vp9_header.h" |
| 14 #include "vp9/common/vp9_reconintra.h" |
| 15 #include "vp9/common/vp9_reconintra4x4.h" |
| 16 #include "vp9/common/vp9_reconinter.h" |
| 17 #include "vp9/decoder/vp9_decodframe.h" |
| 18 #include "vp9/decoder/vp9_detokenize.h" |
| 19 #include "vp9/common/vp9_invtrans.h" |
| 20 #include "vp9/common/vp9_alloccommon.h" |
| 21 #include "vp9/common/vp9_entropymode.h" |
| 22 #include "vp9/common/vp9_quant_common.h" |
| 23 #include "vpx_scale/vpxscale.h" |
| 24 #include "vp9/common/vp9_setupintrarecon.h" |
| 25 |
| 26 #include "vp9/decoder/vp9_decodemv.h" |
| 27 #include "vp9/common/vp9_extend.h" |
| 28 #include "vp9/common/vp9_modecont.h" |
| 29 #include "vpx_mem/vpx_mem.h" |
| 30 #include "vp9/decoder/vp9_dboolhuff.h" |
| 31 |
| 32 #include "vp9/common/vp9_seg_common.h" |
| 33 #include "vp9/common/vp9_entropy.h" |
| 34 #include "vp9_rtcd.h" |
| 35 |
| 36 #include <assert.h> |
| 37 #include <stdio.h> |
| 38 |
| 39 #define COEFCOUNT_TESTING |
| 40 |
| 41 // #define DEC_DEBUG |
| 42 #ifdef DEC_DEBUG |
| 43 int dec_debug = 0; |
| 44 #endif |
| 45 |
| 46 static int merge_index(int v, int n, int modulus) { |
| 47 int max1 = (n - 1 - modulus / 2) / modulus + 1; |
| 48 if (v < max1) v = v * modulus + modulus / 2; |
| 49 else { |
| 50 int w; |
| 51 v -= max1; |
| 52 w = v; |
| 53 v += (v + modulus - modulus / 2) / modulus; |
| 54 while (v % modulus == modulus / 2 || |
| 55 w != v - (v + modulus - modulus / 2) / modulus) v++; |
| 56 } |
| 57 return v; |
| 58 } |
| 59 |
| 60 static int inv_remap_prob(int v, int m) { |
| 61 const int n = 256; |
| 62 const int modulus = MODULUS_PARAM; |
| 63 int i; |
| 64 v = merge_index(v, n - 1, modulus); |
| 65 if ((m << 1) <= n) { |
| 66 i = vp9_inv_recenter_nonneg(v + 1, m); |
| 67 } else { |
| 68 i = n - 1 - vp9_inv_recenter_nonneg(v + 1, n - 1 - m); |
| 69 } |
| 70 return i; |
| 71 } |
| 72 |
| 73 static vp9_prob read_prob_diff_update(vp9_reader *const bc, int oldp) { |
| 74 int delp = vp9_decode_term_subexp(bc, SUBEXP_PARAM, 255); |
| 75 return (vp9_prob)inv_remap_prob(delp, oldp); |
| 76 } |
| 77 |
| 78 void vp9_init_de_quantizer(VP9D_COMP *pbi) { |
| 79 int i; |
| 80 int Q; |
| 81 VP9_COMMON *const pc = &pbi->common; |
| 82 |
| 83 for (Q = 0; Q < QINDEX_RANGE; Q++) { |
| 84 pc->Y1dequant[Q][0] = (short)vp9_dc_quant(Q, pc->y1dc_delta_q); |
| 85 pc->Y2dequant[Q][0] = (short)vp9_dc2quant(Q, pc->y2dc_delta_q); |
| 86 pc->UVdequant[Q][0] = (short)vp9_dc_uv_quant(Q, pc->uvdc_delta_q); |
| 87 |
| 88 /* all the ac values =; */ |
| 89 for (i = 1; i < 16; i++) { |
| 90 int rc = vp9_default_zig_zag1d[i]; |
| 91 |
| 92 pc->Y1dequant[Q][rc] = (short)vp9_ac_yquant(Q); |
| 93 pc->Y2dequant[Q][rc] = (short)vp9_ac2quant(Q, pc->y2ac_delta_q); |
| 94 pc->UVdequant[Q][rc] = (short)vp9_ac_uv_quant(Q, pc->uvac_delta_q); |
| 95 } |
| 96 } |
| 97 } |
| 98 |
| 99 static void mb_init_dequantizer(VP9D_COMP *pbi, MACROBLOCKD *xd) { |
| 100 int i; |
| 101 int QIndex; |
| 102 VP9_COMMON *const pc = &pbi->common; |
| 103 int segment_id = xd->mode_info_context->mbmi.segment_id; |
| 104 |
| 105 // Set the Q baseline allowing for any segment level adjustment |
| 106 if (vp9_segfeature_active(xd, segment_id, SEG_LVL_ALT_Q)) { |
| 107 /* Abs Value */ |
| 108 if (xd->mb_segment_abs_delta == SEGMENT_ABSDATA) |
| 109 QIndex = vp9_get_segdata(xd, segment_id, SEG_LVL_ALT_Q); |
| 110 |
| 111 /* Delta Value */ |
| 112 else { |
| 113 QIndex = pc->base_qindex + |
| 114 vp9_get_segdata(xd, segment_id, SEG_LVL_ALT_Q); |
| 115 QIndex = (QIndex >= 0) ? ((QIndex <= MAXQ) ? QIndex : MAXQ) : 0; /* Cla
mp to valid range */ |
| 116 } |
| 117 } else |
| 118 QIndex = pc->base_qindex; |
| 119 xd->q_index = QIndex; |
| 120 |
| 121 /* Set up the block level dequant pointers */ |
| 122 for (i = 0; i < 16; i++) { |
| 123 xd->block[i].dequant = pc->Y1dequant[QIndex]; |
| 124 } |
| 125 |
| 126 #if CONFIG_LOSSLESS |
| 127 if (!QIndex) { |
| 128 pbi->mb.inv_xform4x4_1_x8 = vp9_short_inv_walsh4x4_1_x8; |
| 129 pbi->mb.inv_xform4x4_x8 = vp9_short_inv_walsh4x4_x8; |
| 130 pbi->mb.inv_walsh4x4_1 = vp9_short_inv_walsh4x4_1_lossless; |
| 131 pbi->mb.inv_walsh4x4_lossless = vp9_short_inv_walsh4x4_lossless; |
| 132 pbi->idct_add = vp9_dequant_idct_add_lossless_c; |
| 133 pbi->dc_idct_add = vp9_dequant_dc_idct_add_lossless_c; |
| 134 pbi->dc_idct_add_y_block = vp9_dequant_dc_idct_add_y_block_lossless_c; |
| 135 pbi->idct_add_y_block = vp9_dequant_idct_add_y_block_lossless_c; |
| 136 pbi->idct_add_uv_block = vp9_dequant_idct_add_uv_block_lossless_c; |
| 137 } else { |
| 138 pbi->mb.inv_xform4x4_1_x8 = vp9_short_idct4x4llm_1; |
| 139 pbi->mb.inv_xform4x4_x8 = vp9_short_idct4x4llm; |
| 140 pbi->mb.inv_walsh4x4_1 = vp9_short_inv_walsh4x4_1; |
| 141 pbi->mb.inv_walsh4x4_lossless = vp9_short_inv_walsh4x4; |
| 142 pbi->idct_add = vp9_dequant_idct_add; |
| 143 pbi->dc_idct_add = vp9_dequant_dc_idct_add; |
| 144 pbi->dc_idct_add_y_block = vp9_dequant_dc_idct_add_y_block; |
| 145 pbi->idct_add_y_block = vp9_dequant_idct_add_y_block; |
| 146 pbi->idct_add_uv_block = vp9_dequant_idct_add_uv_block; |
| 147 } |
| 148 #else |
| 149 pbi->mb.inv_xform4x4_1_x8 = vp9_short_idct4x4llm_1; |
| 150 pbi->mb.inv_xform4x4_x8 = vp9_short_idct4x4llm; |
| 151 pbi->mb.inv_walsh4x4_1 = vp9_short_inv_walsh4x4_1; |
| 152 pbi->mb.inv_walsh4x4_lossless = vp9_short_inv_walsh4x4; |
| 153 pbi->idct_add = vp9_dequant_idct_add; |
| 154 pbi->dc_idct_add = vp9_dequant_dc_idct_add; |
| 155 pbi->dc_idct_add_y_block = vp9_dequant_dc_idct_add_y_block; |
| 156 pbi->idct_add_y_block = vp9_dequant_idct_add_y_block; |
| 157 pbi->idct_add_uv_block = vp9_dequant_idct_add_uv_block; |
| 158 #endif |
| 159 |
| 160 for (i = 16; i < 24; i++) { |
| 161 xd->block[i].dequant = pc->UVdequant[QIndex]; |
| 162 } |
| 163 |
| 164 xd->block[24].dequant = pc->Y2dequant[QIndex]; |
| 165 |
| 166 } |
| 167 |
| 168 /* skip_recon_mb() is Modified: Instead of writing the result to predictor buffe
r and then copying it |
| 169 * to dst buffer, we can write the result directly to dst buffer. This eliminat
es unnecessary copy. |
| 170 */ |
| 171 static void skip_recon_mb(VP9D_COMP *pbi, MACROBLOCKD *xd) { |
| 172 if (xd->mode_info_context->mbmi.ref_frame == INTRA_FRAME) { |
| 173 #if CONFIG_SUPERBLOCKS |
| 174 if (xd->mode_info_context->mbmi.encoded_as_sb) { |
| 175 vp9_build_intra_predictors_sbuv_s(xd); |
| 176 vp9_build_intra_predictors_sby_s(xd); |
| 177 } else { |
| 178 #endif |
| 179 vp9_build_intra_predictors_mbuv_s(xd); |
| 180 vp9_build_intra_predictors_mby_s(xd); |
| 181 #if CONFIG_SUPERBLOCKS |
| 182 } |
| 183 #endif |
| 184 } else { |
| 185 #if CONFIG_SUPERBLOCKS |
| 186 if (xd->mode_info_context->mbmi.encoded_as_sb) { |
| 187 vp9_build_inter32x32_predictors_sb(xd, |
| 188 xd->dst.y_buffer, |
| 189 xd->dst.u_buffer, |
| 190 xd->dst.v_buffer, |
| 191 xd->dst.y_stride, |
| 192 xd->dst.uv_stride); |
| 193 } else { |
| 194 #endif |
| 195 vp9_build_1st_inter16x16_predictors_mb(xd, |
| 196 xd->dst.y_buffer, |
| 197 xd->dst.u_buffer, |
| 198 xd->dst.v_buffer, |
| 199 xd->dst.y_stride, |
| 200 xd->dst.uv_stride); |
| 201 |
| 202 if (xd->mode_info_context->mbmi.second_ref_frame > 0) { |
| 203 vp9_build_2nd_inter16x16_predictors_mb(xd, |
| 204 xd->dst.y_buffer, |
| 205 xd->dst.u_buffer, |
| 206 xd->dst.v_buffer, |
| 207 xd->dst.y_stride, |
| 208 xd->dst.uv_stride); |
| 209 } |
| 210 #if CONFIG_COMP_INTERINTRA_PRED |
| 211 else if (xd->mode_info_context->mbmi.second_ref_frame == INTRA_FRAME) { |
| 212 vp9_build_interintra_16x16_predictors_mb(xd, |
| 213 xd->dst.y_buffer, |
| 214 xd->dst.u_buffer, |
| 215 xd->dst.v_buffer, |
| 216 xd->dst.y_stride, |
| 217 xd->dst.uv_stride); |
| 218 } |
| 219 #endif |
| 220 #if CONFIG_SUPERBLOCKS |
| 221 } |
| 222 #endif |
| 223 } |
| 224 } |
| 225 |
| 226 static void decode_16x16(VP9D_COMP *pbi, MACROBLOCKD *xd, |
| 227 BOOL_DECODER* const bc) { |
| 228 BLOCKD *bd = &xd->block[0]; |
| 229 TX_TYPE tx_type = get_tx_type_16x16(xd, bd); |
| 230 assert(get_2nd_order_usage(xd) == 0); |
| 231 #ifdef DEC_DEBUG |
| 232 if (dec_debug) { |
| 233 int i; |
| 234 printf("\n"); |
| 235 printf("qcoeff 16x16\n"); |
| 236 for (i = 0; i < 400; i++) { |
| 237 printf("%3d ", xd->qcoeff[i]); |
| 238 if (i % 16 == 15) printf("\n"); |
| 239 } |
| 240 printf("\n"); |
| 241 printf("predictor\n"); |
| 242 for (i = 0; i < 400; i++) { |
| 243 printf("%3d ", xd->predictor[i]); |
| 244 if (i % 16 == 15) printf("\n"); |
| 245 } |
| 246 } |
| 247 #endif |
| 248 if (tx_type != DCT_DCT) { |
| 249 vp9_ht_dequant_idct_add_16x16_c(tx_type, xd->qcoeff, |
| 250 xd->block[0].dequant, xd->predictor, |
| 251 xd->dst.y_buffer, 16, xd->dst.y_stride); |
| 252 } else { |
| 253 vp9_dequant_idct_add_16x16(xd->qcoeff, xd->block[0].dequant, |
| 254 xd->predictor, xd->dst.y_buffer, |
| 255 16, xd->dst.y_stride, xd->eobs[0]); |
| 256 } |
| 257 vp9_dequant_idct_add_uv_block_8x8( |
| 258 xd->qcoeff + 16 * 16, xd->block[16].dequant, |
| 259 xd->predictor + 16 * 16, xd->dst.u_buffer, xd->dst.v_buffer, |
| 260 xd->dst.uv_stride, xd->eobs + 16, xd); |
| 261 } |
| 262 |
| 263 static void decode_8x8(VP9D_COMP *pbi, MACROBLOCKD *xd, |
| 264 BOOL_DECODER* const bc) { |
| 265 // First do Y |
| 266 // if the first one is DCT_DCT assume all the rest are as well |
| 267 TX_TYPE tx_type = get_tx_type_8x8(xd, &xd->block[0]); |
| 268 #ifdef DEC_DEBUG |
| 269 if (dec_debug) { |
| 270 int i; |
| 271 printf("\n"); |
| 272 printf("qcoeff 8x8\n"); |
| 273 for (i = 0; i < 400; i++) { |
| 274 printf("%3d ", xd->qcoeff[i]); |
| 275 if (i % 16 == 15) printf("\n"); |
| 276 } |
| 277 } |
| 278 #endif |
| 279 if (tx_type != DCT_DCT || xd->mode_info_context->mbmi.mode == I8X8_PRED) { |
| 280 int i; |
| 281 assert(get_2nd_order_usage(xd) == 0); |
| 282 for (i = 0; i < 4; i++) { |
| 283 int ib = vp9_i8x8_block[i]; |
| 284 int idx = (ib & 0x02) ? (ib + 2) : ib; |
| 285 short *q = xd->block[idx].qcoeff; |
| 286 short *dq = xd->block[0].dequant; |
| 287 unsigned char *pre = xd->block[ib].predictor; |
| 288 unsigned char *dst = *(xd->block[ib].base_dst) + xd->block[ib].dst; |
| 289 int stride = xd->dst.y_stride; |
| 290 BLOCKD *b = &xd->block[ib]; |
| 291 if (xd->mode_info_context->mbmi.mode == I8X8_PRED) { |
| 292 int i8x8mode = b->bmi.as_mode.first; |
| 293 vp9_intra8x8_predict(b, i8x8mode, b->predictor); |
| 294 } |
| 295 tx_type = get_tx_type_8x8(xd, &xd->block[ib]); |
| 296 if (tx_type != DCT_DCT) { |
| 297 vp9_ht_dequant_idct_add_8x8_c(tx_type, q, dq, pre, dst, 16, stride); |
| 298 } else { |
| 299 vp9_dequant_idct_add_8x8_c(q, dq, pre, dst, 16, stride, |
| 300 0, xd->eobs[idx]); |
| 301 } |
| 302 } |
| 303 } else if (xd->mode_info_context->mbmi.mode == SPLITMV) { |
| 304 assert(get_2nd_order_usage(xd) == 0); |
| 305 vp9_dequant_idct_add_y_block_8x8(xd->qcoeff, |
| 306 xd->block[0].dequant, |
| 307 xd->predictor, |
| 308 xd->dst.y_buffer, |
| 309 xd->dst.y_stride, |
| 310 xd->eobs, xd); |
| 311 } else { |
| 312 BLOCKD *b = &xd->block[24]; |
| 313 assert(get_2nd_order_usage(xd) == 1); |
| 314 vp9_dequantize_b_2x2(b); |
| 315 vp9_short_ihaar2x2(&b->dqcoeff[0], b->diff, 8); |
| 316 ((int *)b->qcoeff)[0] = 0; // 2nd order block are set to 0 after idct |
| 317 ((int *)b->qcoeff)[1] = 0; |
| 318 ((int *)b->qcoeff)[2] = 0; |
| 319 ((int *)b->qcoeff)[3] = 0; |
| 320 ((int *)b->qcoeff)[4] = 0; |
| 321 ((int *)b->qcoeff)[5] = 0; |
| 322 ((int *)b->qcoeff)[6] = 0; |
| 323 ((int *)b->qcoeff)[7] = 0; |
| 324 vp9_dequant_dc_idct_add_y_block_8x8(xd->qcoeff, |
| 325 xd->block[0].dequant, |
| 326 xd->predictor, |
| 327 xd->dst.y_buffer, |
| 328 xd->dst.y_stride, |
| 329 xd->eobs, |
| 330 xd->block[24].diff, |
| 331 xd); |
| 332 } |
| 333 |
| 334 // Now do UV |
| 335 if (xd->mode_info_context->mbmi.mode == I8X8_PRED) { |
| 336 int i; |
| 337 for (i = 0; i < 4; i++) { |
| 338 int ib = vp9_i8x8_block[i]; |
| 339 BLOCKD *b = &xd->block[ib]; |
| 340 int i8x8mode = b->bmi.as_mode.first; |
| 341 b = &xd->block[16 + i]; |
| 342 vp9_intra_uv4x4_predict(&xd->block[16 + i], i8x8mode, b->predictor); |
| 343 pbi->idct_add(b->qcoeff, b->dequant, b->predictor, |
| 344 *(b->base_dst) + b->dst, 8, b->dst_stride); |
| 345 b = &xd->block[20 + i]; |
| 346 vp9_intra_uv4x4_predict(&xd->block[20 + i], i8x8mode, b->predictor); |
| 347 pbi->idct_add(b->qcoeff, b->dequant, b->predictor, |
| 348 *(b->base_dst) + b->dst, 8, b->dst_stride); |
| 349 } |
| 350 } else if (xd->mode_info_context->mbmi.mode == SPLITMV) { |
| 351 pbi->idct_add_uv_block(xd->qcoeff + 16 * 16, xd->block[16].dequant, |
| 352 xd->predictor + 16 * 16, xd->dst.u_buffer, xd->dst.v_buffer, |
| 353 xd->dst.uv_stride, xd->eobs + 16); |
| 354 } else { |
| 355 vp9_dequant_idct_add_uv_block_8x8 |
| 356 (xd->qcoeff + 16 * 16, xd->block[16].dequant, |
| 357 xd->predictor + 16 * 16, xd->dst.u_buffer, xd->dst.v_buffer, |
| 358 xd->dst.uv_stride, xd->eobs + 16, xd); |
| 359 } |
| 360 #ifdef DEC_DEBUG |
| 361 if (dec_debug) { |
| 362 int i; |
| 363 printf("\n"); |
| 364 printf("predictor\n"); |
| 365 for (i = 0; i < 384; i++) { |
| 366 printf("%3d ", xd->predictor[i]); |
| 367 if (i % 16 == 15) printf("\n"); |
| 368 } |
| 369 } |
| 370 #endif |
| 371 } |
| 372 |
| 373 static void decode_4x4(VP9D_COMP *pbi, MACROBLOCKD *xd, |
| 374 BOOL_DECODER* const bc) { |
| 375 TX_TYPE tx_type; |
| 376 int i, eobtotal = 0; |
| 377 MB_PREDICTION_MODE mode = xd->mode_info_context->mbmi.mode; |
| 378 if (mode == I8X8_PRED) { |
| 379 assert(get_2nd_order_usage(xd) == 0); |
| 380 for (i = 0; i < 4; i++) { |
| 381 int ib = vp9_i8x8_block[i]; |
| 382 const int iblock[4] = {0, 1, 4, 5}; |
| 383 int j; |
| 384 int i8x8mode; |
| 385 BLOCKD *b; |
| 386 b = &xd->block[ib]; |
| 387 i8x8mode = b->bmi.as_mode.first; |
| 388 vp9_intra8x8_predict(b, i8x8mode, b->predictor); |
| 389 for (j = 0; j < 4; j++) { |
| 390 b = &xd->block[ib + iblock[j]]; |
| 391 tx_type = get_tx_type_4x4(xd, b); |
| 392 if (tx_type != DCT_DCT) { |
| 393 vp9_ht_dequant_idct_add_c(tx_type, b->qcoeff, |
| 394 b->dequant, b->predictor, |
| 395 *(b->base_dst) + b->dst, 16, |
| 396 b->dst_stride); |
| 397 } else { |
| 398 vp9_dequant_idct_add(b->qcoeff, b->dequant, b->predictor, |
| 399 *(b->base_dst) + b->dst, 16, b->dst_stride); |
| 400 } |
| 401 } |
| 402 b = &xd->block[16 + i]; |
| 403 vp9_intra_uv4x4_predict(b, i8x8mode, b->predictor); |
| 404 pbi->idct_add(b->qcoeff, b->dequant, b->predictor, |
| 405 *(b->base_dst) + b->dst, 8, b->dst_stride); |
| 406 b = &xd->block[20 + i]; |
| 407 vp9_intra_uv4x4_predict(b, i8x8mode, b->predictor); |
| 408 pbi->idct_add(b->qcoeff, b->dequant, b->predictor, |
| 409 *(b->base_dst) + b->dst, 8, b->dst_stride); |
| 410 } |
| 411 } else if (mode == B_PRED) { |
| 412 assert(get_2nd_order_usage(xd) == 0); |
| 413 for (i = 0; i < 16; i++) { |
| 414 int b_mode; |
| 415 #if CONFIG_COMP_INTRA_PRED |
| 416 int b_mode2; |
| 417 #endif |
| 418 BLOCKD *b = &xd->block[i]; |
| 419 b_mode = xd->mode_info_context->bmi[i].as_mode.first; |
| 420 #if CONFIG_NEWBINTRAMODES |
| 421 xd->mode_info_context->bmi[i].as_mode.context = b->bmi.as_mode.context = |
| 422 vp9_find_bpred_context(b); |
| 423 #endif |
| 424 if (!xd->mode_info_context->mbmi.mb_skip_coeff) |
| 425 eobtotal += vp9_decode_coefs_4x4(pbi, xd, bc, PLANE_TYPE_Y_WITH_DC, i); |
| 426 #if CONFIG_COMP_INTRA_PRED |
| 427 b_mode2 = xd->mode_info_context->bmi[i].as_mode.second; |
| 428 |
| 429 if (b_mode2 == (B_PREDICTION_MODE)(B_DC_PRED - 1)) { |
| 430 #endif |
| 431 vp9_intra4x4_predict(b, b_mode, b->predictor); |
| 432 #if CONFIG_COMP_INTRA_PRED |
| 433 } else { |
| 434 vp9_comp_intra4x4_predict(b, b_mode, b_mode2, b->predictor); |
| 435 } |
| 436 #endif |
| 437 tx_type = get_tx_type_4x4(xd, b); |
| 438 if (tx_type != DCT_DCT) { |
| 439 vp9_ht_dequant_idct_add_c(tx_type, b->qcoeff, |
| 440 b->dequant, b->predictor, |
| 441 *(b->base_dst) + b->dst, 16, b->dst_stride); |
| 442 } else { |
| 443 vp9_dequant_idct_add(b->qcoeff, b->dequant, b->predictor, |
| 444 *(b->base_dst) + b->dst, 16, b->dst_stride); |
| 445 } |
| 446 xd->above_context->y2 = 1; |
| 447 xd->left_context->y2 = 1; |
| 448 } |
| 449 if (!xd->mode_info_context->mbmi.mb_skip_coeff) { |
| 450 vp9_decode_mb_tokens_4x4_uv(pbi, xd, bc); |
| 451 } |
| 452 vp9_build_intra_predictors_mbuv(xd); |
| 453 pbi->idct_add_uv_block(xd->qcoeff + 16 * 16, |
| 454 xd->block[16].dequant, |
| 455 xd->predictor + 16 * 16, |
| 456 xd->dst.u_buffer, |
| 457 xd->dst.v_buffer, |
| 458 xd->dst.uv_stride, |
| 459 xd->eobs + 16); |
| 460 } else if (mode == SPLITMV) { |
| 461 assert(get_2nd_order_usage(xd) == 0); |
| 462 pbi->idct_add_y_block(xd->qcoeff, |
| 463 xd->block[0].dequant, |
| 464 xd->predictor, |
| 465 xd->dst.y_buffer, |
| 466 xd->dst.y_stride, |
| 467 xd->eobs); |
| 468 pbi->idct_add_uv_block(xd->qcoeff + 16 * 16, |
| 469 xd->block[16].dequant, |
| 470 xd->predictor + 16 * 16, |
| 471 xd->dst.u_buffer, |
| 472 xd->dst.v_buffer, |
| 473 xd->dst.uv_stride, |
| 474 xd->eobs + 16); |
| 475 } else { |
| 476 #ifdef DEC_DEBUG |
| 477 if (dec_debug) { |
| 478 int i; |
| 479 printf("\n"); |
| 480 printf("qcoeff 4x4\n"); |
| 481 for (i = 0; i < 400; i++) { |
| 482 printf("%3d ", xd->qcoeff[i]); |
| 483 if (i % 16 == 15) printf("\n"); |
| 484 } |
| 485 printf("\n"); |
| 486 printf("predictor\n"); |
| 487 for (i = 0; i < 400; i++) { |
| 488 printf("%3d ", xd->predictor[i]); |
| 489 if (i % 16 == 15) printf("\n"); |
| 490 } |
| 491 } |
| 492 #endif |
| 493 tx_type = get_tx_type_4x4(xd, &xd->block[0]); |
| 494 if (tx_type != DCT_DCT) { |
| 495 assert(get_2nd_order_usage(xd) == 0); |
| 496 for (i = 0; i < 16; i++) { |
| 497 BLOCKD *b = &xd->block[i]; |
| 498 tx_type = get_tx_type_4x4(xd, b); |
| 499 if (tx_type != DCT_DCT) { |
| 500 vp9_ht_dequant_idct_add_c(tx_type, b->qcoeff, |
| 501 b->dequant, b->predictor, |
| 502 *(b->base_dst) + b->dst, 16, |
| 503 b->dst_stride); |
| 504 } else { |
| 505 vp9_dequant_idct_add(b->qcoeff, b->dequant, b->predictor, |
| 506 *(b->base_dst) + b->dst, 16, b->dst_stride); |
| 507 } |
| 508 } |
| 509 } else { |
| 510 BLOCKD *b = &xd->block[24]; |
| 511 assert(get_2nd_order_usage(xd) == 1); |
| 512 vp9_dequantize_b(b); |
| 513 if (xd->eobs[24] > 1) { |
| 514 vp9_short_inv_walsh4x4(&b->dqcoeff[0], b->diff); |
| 515 ((int *)b->qcoeff)[0] = 0; |
| 516 ((int *)b->qcoeff)[1] = 0; |
| 517 ((int *)b->qcoeff)[2] = 0; |
| 518 ((int *)b->qcoeff)[3] = 0; |
| 519 ((int *)b->qcoeff)[4] = 0; |
| 520 ((int *)b->qcoeff)[5] = 0; |
| 521 ((int *)b->qcoeff)[6] = 0; |
| 522 ((int *)b->qcoeff)[7] = 0; |
| 523 } else { |
| 524 xd->inv_walsh4x4_1(&b->dqcoeff[0], b->diff); |
| 525 ((int *)b->qcoeff)[0] = 0; |
| 526 } |
| 527 vp9_dequantize_b(b); |
| 528 pbi->dc_idct_add_y_block(xd->qcoeff, |
| 529 xd->block[0].dequant, |
| 530 xd->predictor, |
| 531 xd->dst.y_buffer, |
| 532 xd->dst.y_stride, |
| 533 xd->eobs, |
| 534 xd->block[24].diff); |
| 535 } |
| 536 pbi->idct_add_uv_block(xd->qcoeff + 16 * 16, |
| 537 xd->block[16].dequant, |
| 538 xd->predictor + 16 * 16, |
| 539 xd->dst.u_buffer, |
| 540 xd->dst.v_buffer, |
| 541 xd->dst.uv_stride, |
| 542 xd->eobs + 16); |
| 543 } |
| 544 } |
| 545 |
| 546 #if CONFIG_SUPERBLOCKS |
| 547 static void decode_16x16_sb(VP9D_COMP *pbi, MACROBLOCKD *xd, |
| 548 BOOL_DECODER* const bc, int n) { |
| 549 int x_idx = n & 1, y_idx = n >> 1; |
| 550 TX_TYPE tx_type = get_tx_type_16x16(xd, &xd->block[0]); |
| 551 if (tx_type != DCT_DCT) { |
| 552 vp9_ht_dequant_idct_add_16x16_c( |
| 553 tx_type, xd->qcoeff, xd->block[0].dequant, |
| 554 xd->dst.y_buffer + y_idx * 16 * xd->dst.y_stride + x_idx * 16, |
| 555 xd->dst.y_buffer + y_idx * 16 * xd->dst.y_stride + x_idx * 16, |
| 556 xd->dst.y_stride, xd->dst.y_stride); |
| 557 } else { |
| 558 vp9_dequant_idct_add_16x16( |
| 559 xd->qcoeff, xd->block[0].dequant, |
| 560 xd->dst.y_buffer + y_idx * 16 * xd->dst.y_stride + x_idx * 16, |
| 561 xd->dst.y_buffer + y_idx * 16 * xd->dst.y_stride + x_idx * 16, |
| 562 xd->dst.y_stride, xd->dst.y_stride, xd->eobs[0]); |
| 563 } |
| 564 vp9_dequant_idct_add_uv_block_8x8_inplace_c( |
| 565 xd->qcoeff + 16 * 16, |
| 566 xd->block[16].dequant, |
| 567 xd->dst.u_buffer + y_idx * 8 * xd->dst.uv_stride + x_idx * 8, |
| 568 xd->dst.v_buffer + y_idx * 8 * xd->dst.uv_stride + x_idx * 8, |
| 569 xd->dst.uv_stride, xd->eobs + 16, xd); |
| 570 }; |
| 571 |
| 572 static void decode_8x8_sb(VP9D_COMP *pbi, MACROBLOCKD *xd, |
| 573 BOOL_DECODER* const bc, int n) { |
| 574 BLOCKD *b = &xd->block[24]; |
| 575 int x_idx = n & 1, y_idx = n >> 1; |
| 576 TX_TYPE tx_type = get_tx_type_8x8(xd, &xd->block[0]); |
| 577 if (tx_type != DCT_DCT) { |
| 578 int i; |
| 579 for (i = 0; i < 4; i++) { |
| 580 int ib = vp9_i8x8_block[i]; |
| 581 int idx = (ib & 0x02) ? (ib + 2) : ib; |
| 582 short *q = xd->block[idx].qcoeff; |
| 583 short *dq = xd->block[0].dequant; |
| 584 int stride = xd->dst.y_stride; |
| 585 BLOCKD *b = &xd->block[ib]; |
| 586 tx_type = get_tx_type_8x8(xd, &xd->block[ib]); |
| 587 if (tx_type != DCT_DCT) { |
| 588 vp9_ht_dequant_idct_add_8x8_c( |
| 589 tx_type, q, dq, |
| 590 xd->dst.y_buffer + (y_idx * 16 + (i / 2) * 8) * xd->dst.y_stride |
| 591 + x_idx * 16 + (i & 1) * 8, |
| 592 xd->dst.y_buffer + (y_idx * 16 + (i / 2) * 8) * xd->dst.y_stride |
| 593 + x_idx * 16 + (i & 1) * 8, |
| 594 stride, stride); |
| 595 } else { |
| 596 vp9_dequant_idct_add_8x8_c( |
| 597 q, dq, |
| 598 xd->dst.y_buffer + (y_idx * 16 + (i / 2) * 8) * xd->dst.y_stride |
| 599 + x_idx * 16 + (i & 1) * 8, |
| 600 xd->dst.y_buffer + (y_idx * 16 + (i / 2) * 8) * xd->dst.y_stride |
| 601 + x_idx * 16 + (i & 1) * 8, |
| 602 stride, stride, 0, b->eob); |
| 603 } |
| 604 vp9_dequant_idct_add_uv_block_8x8_inplace_c( |
| 605 xd->qcoeff + 16 * 16, xd->block[16].dequant, |
| 606 xd->dst.u_buffer + y_idx * 8 * xd->dst.uv_stride + x_idx * 8, |
| 607 xd->dst.v_buffer + y_idx * 8 * xd->dst.uv_stride + x_idx * 8, |
| 608 xd->dst.uv_stride, xd->eobs + 16, xd); |
| 609 } |
| 610 } else { |
| 611 vp9_dequantize_b_2x2(b); |
| 612 vp9_short_ihaar2x2(&b->dqcoeff[0], b->diff, 8); |
| 613 ((int *)b->qcoeff)[0] = 0; // 2nd order block are set to 0 after idct |
| 614 ((int *)b->qcoeff)[1] = 0; |
| 615 ((int *)b->qcoeff)[2] = 0; |
| 616 ((int *)b->qcoeff)[3] = 0; |
| 617 ((int *)b->qcoeff)[4] = 0; |
| 618 ((int *)b->qcoeff)[5] = 0; |
| 619 ((int *)b->qcoeff)[6] = 0; |
| 620 ((int *)b->qcoeff)[7] = 0; |
| 621 vp9_dequant_dc_idct_add_y_block_8x8_inplace_c( |
| 622 xd->qcoeff, xd->block[0].dequant, |
| 623 xd->dst.y_buffer + y_idx * 16 * xd->dst.y_stride + x_idx * 16, |
| 624 xd->dst.y_stride, xd->eobs, xd->block[24].diff, xd); |
| 625 vp9_dequant_idct_add_uv_block_8x8_inplace_c( |
| 626 xd->qcoeff + 16 * 16, xd->block[16].dequant, |
| 627 xd->dst.u_buffer + y_idx * 8 * xd->dst.uv_stride + x_idx * 8, |
| 628 xd->dst.v_buffer + y_idx * 8 * xd->dst.uv_stride + x_idx * 8, |
| 629 xd->dst.uv_stride, xd->eobs + 16, xd); |
| 630 } |
| 631 }; |
| 632 |
| 633 static void decode_4x4_sb(VP9D_COMP *pbi, MACROBLOCKD *xd, |
| 634 BOOL_DECODER* const bc, int n) { |
| 635 BLOCKD *b = &xd->block[24]; |
| 636 int x_idx = n & 1, y_idx = n >> 1; |
| 637 TX_TYPE tx_type = get_tx_type_4x4(xd, &xd->block[0]); |
| 638 if (tx_type != DCT_DCT) { |
| 639 int i; |
| 640 for (i = 0; i < 16; i++) { |
| 641 BLOCKD *b = &xd->block[i]; |
| 642 tx_type = get_tx_type_4x4(xd, b); |
| 643 if (tx_type != DCT_DCT) { |
| 644 vp9_ht_dequant_idct_add_c( |
| 645 tx_type, b->qcoeff, b->dequant, |
| 646 xd->dst.y_buffer + (y_idx * 16 + (i / 4) * 4) * xd->dst.y_stride |
| 647 + x_idx * 16 + (i & 3) * 4, |
| 648 xd->dst.y_buffer + (y_idx * 16 + (i / 4) * 4) * xd->dst.y_stride |
| 649 + x_idx * 16 + (i & 3) * 4, |
| 650 xd->dst.y_stride, xd->dst.y_stride); |
| 651 } else { |
| 652 vp9_dequant_idct_add_c( |
| 653 b->qcoeff, b->dequant, |
| 654 xd->dst.y_buffer + (y_idx * 16 + (i / 4) * 4) * xd->dst.y_stride |
| 655 + x_idx * 16 + (i & 3) * 4, |
| 656 xd->dst.y_buffer + (y_idx * 16 + (i / 4) * 4) * xd->dst.y_stride |
| 657 + x_idx * 16 + (i & 3) * 4, |
| 658 xd->dst.y_stride, xd->dst.y_stride); |
| 659 } |
| 660 } |
| 661 } else { |
| 662 vp9_dequantize_b(b); |
| 663 if (xd->eobs[24] > 1) { |
| 664 vp9_short_inv_walsh4x4(&b->dqcoeff[0], b->diff); |
| 665 ((int *)b->qcoeff)[0] = 0; |
| 666 ((int *)b->qcoeff)[1] = 0; |
| 667 ((int *)b->qcoeff)[2] = 0; |
| 668 ((int *)b->qcoeff)[3] = 0; |
| 669 ((int *)b->qcoeff)[4] = 0; |
| 670 ((int *)b->qcoeff)[5] = 0; |
| 671 ((int *)b->qcoeff)[6] = 0; |
| 672 ((int *)b->qcoeff)[7] = 0; |
| 673 } else { |
| 674 xd->inv_walsh4x4_1(&b->dqcoeff[0], b->diff); |
| 675 ((int *)b->qcoeff)[0] = 0; |
| 676 } |
| 677 vp9_dequant_dc_idct_add_y_block_4x4_inplace_c( |
| 678 xd->qcoeff, xd->block[0].dequant, |
| 679 xd->dst.y_buffer + y_idx * 16 * xd->dst.y_stride + x_idx * 16, |
| 680 xd->dst.y_stride, xd->eobs, xd->block[24].diff, xd); |
| 681 } |
| 682 vp9_dequant_idct_add_uv_block_4x4_inplace_c( |
| 683 xd->qcoeff + 16 * 16, xd->block[16].dequant, |
| 684 xd->dst.u_buffer + y_idx * 8 * xd->dst.uv_stride + x_idx * 8, |
| 685 xd->dst.v_buffer + y_idx * 8 * xd->dst.uv_stride + x_idx * 8, |
| 686 xd->dst.uv_stride, xd->eobs + 16, xd); |
| 687 }; |
| 688 |
| 689 static void decode_superblock(VP9D_COMP *pbi, MACROBLOCKD *xd, |
| 690 int mb_row, unsigned int mb_col, |
| 691 BOOL_DECODER* const bc) { |
| 692 int i, n, eobtotal; |
| 693 TX_SIZE tx_size = xd->mode_info_context->mbmi.txfm_size; |
| 694 VP9_COMMON *const pc = &pbi->common; |
| 695 MODE_INFO *orig_mi = xd->mode_info_context; |
| 696 |
| 697 assert(xd->mode_info_context->mbmi.encoded_as_sb); |
| 698 |
| 699 if (pbi->common.frame_type != KEY_FRAME) |
| 700 vp9_setup_interp_filters(xd, xd->mode_info_context->mbmi.interp_filter, pc); |
| 701 |
| 702 // re-initialize macroblock dequantizer before detokenization |
| 703 if (xd->segmentation_enabled) |
| 704 mb_init_dequantizer(pbi, xd); |
| 705 |
| 706 if (xd->mode_info_context->mbmi.mb_skip_coeff) { |
| 707 vp9_reset_mb_tokens_context(xd); |
| 708 if (mb_col < pc->mb_cols - 1) |
| 709 xd->above_context++; |
| 710 if (mb_row < pc->mb_rows - 1) |
| 711 xd->left_context++; |
| 712 vp9_reset_mb_tokens_context(xd); |
| 713 if (mb_col < pc->mb_cols - 1) |
| 714 xd->above_context--; |
| 715 if (mb_row < pc->mb_rows - 1) |
| 716 xd->left_context--; |
| 717 |
| 718 /* Special case: Force the loopfilter to skip when eobtotal and |
| 719 * mb_skip_coeff are zero. |
| 720 */ |
| 721 skip_recon_mb(pbi, xd); |
| 722 return; |
| 723 } |
| 724 |
| 725 /* do prediction */ |
| 726 if (xd->mode_info_context->mbmi.ref_frame == INTRA_FRAME) { |
| 727 vp9_build_intra_predictors_sby_s(xd); |
| 728 vp9_build_intra_predictors_sbuv_s(xd); |
| 729 } else { |
| 730 vp9_build_inter32x32_predictors_sb(xd, xd->dst.y_buffer, |
| 731 xd->dst.u_buffer, xd->dst.v_buffer, |
| 732 xd->dst.y_stride, xd->dst.uv_stride); |
| 733 } |
| 734 |
| 735 /* dequantization and idct */ |
| 736 for (n = 0; n < 4; n++) { |
| 737 int x_idx = n & 1, y_idx = n >> 1; |
| 738 |
| 739 if (mb_col + x_idx >= pc->mb_cols || mb_row + y_idx >= pc->mb_rows) |
| 740 continue; |
| 741 |
| 742 |
| 743 xd->above_context = pc->above_context + mb_col + x_idx; |
| 744 xd->left_context = pc->left_context + y_idx; |
| 745 xd->mode_info_context = orig_mi + x_idx + y_idx * pc->mode_info_stride; |
| 746 for (i = 0; i < 25; i++) { |
| 747 xd->block[i].eob = 0; |
| 748 xd->eobs[i] = 0; |
| 749 } |
| 750 |
| 751 eobtotal = vp9_decode_mb_tokens(pbi, xd, bc); |
| 752 if (eobtotal == 0) { // skip loopfilter |
| 753 xd->mode_info_context->mbmi.mb_skip_coeff = 1; |
| 754 continue; |
| 755 } |
| 756 |
| 757 if (tx_size == TX_16X16) { |
| 758 decode_16x16_sb(pbi, xd, bc, n); |
| 759 } else if (tx_size == TX_8X8) { |
| 760 decode_8x8_sb(pbi, xd, bc, n); |
| 761 } else { |
| 762 decode_4x4_sb(pbi, xd, bc, n); |
| 763 } |
| 764 } |
| 765 |
| 766 xd->above_context = pc->above_context + mb_col; |
| 767 xd->left_context = pc->left_context; |
| 768 xd->mode_info_context = orig_mi; |
| 769 } |
| 770 #endif |
| 771 |
| 772 static void decode_macroblock(VP9D_COMP *pbi, MACROBLOCKD *xd, |
| 773 int mb_row, unsigned int mb_col, |
| 774 BOOL_DECODER* const bc) { |
| 775 int eobtotal = 0; |
| 776 MB_PREDICTION_MODE mode; |
| 777 int i; |
| 778 int tx_size; |
| 779 |
| 780 #if CONFIG_SUPERBLOCKS |
| 781 assert(!xd->mode_info_context->mbmi.encoded_as_sb); |
| 782 #endif |
| 783 |
| 784 // re-initialize macroblock dequantizer before detokenization |
| 785 if (xd->segmentation_enabled) |
| 786 mb_init_dequantizer(pbi, xd); |
| 787 |
| 788 tx_size = xd->mode_info_context->mbmi.txfm_size; |
| 789 mode = xd->mode_info_context->mbmi.mode; |
| 790 |
| 791 if (xd->mode_info_context->mbmi.mb_skip_coeff) { |
| 792 vp9_reset_mb_tokens_context(xd); |
| 793 } else if (!bool_error(bc)) { |
| 794 for (i = 0; i < 25; i++) { |
| 795 xd->block[i].eob = 0; |
| 796 xd->eobs[i] = 0; |
| 797 } |
| 798 if (mode != B_PRED) { |
| 799 eobtotal = vp9_decode_mb_tokens(pbi, xd, bc); |
| 800 } |
| 801 } |
| 802 |
| 803 //mode = xd->mode_info_context->mbmi.mode; |
| 804 if (pbi->common.frame_type != KEY_FRAME) |
| 805 vp9_setup_interp_filters(xd, xd->mode_info_context->mbmi.interp_filter, |
| 806 &pbi->common); |
| 807 |
| 808 if (eobtotal == 0 && mode != B_PRED && mode != SPLITMV |
| 809 && mode != I8X8_PRED |
| 810 && !bool_error(bc)) { |
| 811 /* Special case: Force the loopfilter to skip when eobtotal and |
| 812 * mb_skip_coeff are zero. |
| 813 * */ |
| 814 xd->mode_info_context->mbmi.mb_skip_coeff = 1; |
| 815 skip_recon_mb(pbi, xd); |
| 816 return; |
| 817 } |
| 818 #ifdef DEC_DEBUG |
| 819 if (dec_debug) |
| 820 printf("Decoding mb: %d %d\n", xd->mode_info_context->mbmi.mode, tx_size); |
| 821 #endif |
| 822 |
| 823 // moved to be performed before detokenization |
| 824 // if (xd->segmentation_enabled) |
| 825 // mb_init_dequantizer(pbi, xd); |
| 826 |
| 827 /* do prediction */ |
| 828 if (xd->mode_info_context->mbmi.ref_frame == INTRA_FRAME) { |
| 829 if (mode != I8X8_PRED) { |
| 830 vp9_build_intra_predictors_mbuv(xd); |
| 831 if (mode != B_PRED) { |
| 832 vp9_build_intra_predictors_mby(xd); |
| 833 } |
| 834 } |
| 835 } else { |
| 836 #ifdef DEC_DEBUG |
| 837 if (dec_debug) |
| 838 printf("Decoding mb: %d %d interp %d\n", |
| 839 xd->mode_info_context->mbmi.mode, tx_size, |
| 840 xd->mode_info_context->mbmi.interp_filter); |
| 841 #endif |
| 842 vp9_build_inter_predictors_mb(xd); |
| 843 } |
| 844 |
| 845 if (tx_size == TX_16X16) { |
| 846 decode_16x16(pbi, xd, bc); |
| 847 } else if (tx_size == TX_8X8) { |
| 848 decode_8x8(pbi, xd, bc); |
| 849 } else { |
| 850 decode_4x4(pbi, xd, bc); |
| 851 } |
| 852 #ifdef DEC_DEBUG |
| 853 if (dec_debug) { |
| 854 int i, j; |
| 855 printf("\n"); |
| 856 printf("final y\n"); |
| 857 for (i = 0; i < 16; i++) { |
| 858 for (j = 0; j < 16; j++) |
| 859 printf("%3d ", xd->dst.y_buffer[i * xd->dst.y_stride + j]); |
| 860 printf("\n"); |
| 861 } |
| 862 printf("\n"); |
| 863 printf("final u\n"); |
| 864 for (i = 0; i < 8; i++) { |
| 865 for (j = 0; j < 8; j++) |
| 866 printf("%3d ", xd->dst.u_buffer[i * xd->dst.uv_stride + j]); |
| 867 printf("\n"); |
| 868 } |
| 869 printf("\n"); |
| 870 printf("final v\n"); |
| 871 for (i = 0; i < 8; i++) { |
| 872 for (j = 0; j < 8; j++) |
| 873 printf("%3d ", xd->dst.v_buffer[i * xd->dst.uv_stride + j]); |
| 874 printf("\n"); |
| 875 } |
| 876 fflush(stdout); |
| 877 } |
| 878 #endif |
| 879 } |
| 880 |
| 881 |
| 882 static int get_delta_q(vp9_reader *bc, int prev, int *q_update) { |
| 883 int ret_val = 0; |
| 884 |
| 885 if (vp9_read_bit(bc)) { |
| 886 ret_val = vp9_read_literal(bc, 4); |
| 887 |
| 888 if (vp9_read_bit(bc)) |
| 889 ret_val = -ret_val; |
| 890 } |
| 891 |
| 892 /* Trigger a quantizer update if the delta-q value has changed */ |
| 893 if (ret_val != prev) |
| 894 *q_update = 1; |
| 895 |
| 896 return ret_val; |
| 897 } |
| 898 |
| 899 #ifdef PACKET_TESTING |
| 900 #include <stdio.h> |
| 901 FILE *vpxlog = 0; |
| 902 #endif |
| 903 |
| 904 /* Decode a row of Superblocks (2x2 region of MBs) */ |
| 905 static void |
| 906 decode_sb_row(VP9D_COMP *pbi, VP9_COMMON *pc, int mbrow, MACROBLOCKD *xd, |
| 907 BOOL_DECODER* const bc) { |
| 908 int i; |
| 909 int sb_col; |
| 910 int mb_row, mb_col; |
| 911 int recon_yoffset, recon_uvoffset; |
| 912 int ref_fb_idx = pc->lst_fb_idx; |
| 913 int dst_fb_idx = pc->new_fb_idx; |
| 914 int recon_y_stride = pc->yv12_fb[ref_fb_idx].y_stride; |
| 915 int recon_uv_stride = pc->yv12_fb[ref_fb_idx].uv_stride; |
| 916 int row_delta[4] = { 0, +1, 0, -1}; |
| 917 int col_delta[4] = { +1, -1, +1, +1}; |
| 918 int sb_cols = (pc->mb_cols + 1) >> 1; |
| 919 |
| 920 // For a SB there are 2 left contexts, each pertaining to a MB row within |
| 921 vpx_memset(pc->left_context, 0, sizeof(pc->left_context)); |
| 922 |
| 923 mb_row = mbrow; |
| 924 mb_col = 0; |
| 925 |
| 926 for (sb_col = 0; sb_col < sb_cols; sb_col++) { |
| 927 MODE_INFO *mi = xd->mode_info_context; |
| 928 |
| 929 #if CONFIG_SUPERBLOCKS |
| 930 mi->mbmi.encoded_as_sb = vp9_read(bc, pc->sb_coded); |
| 931 #endif |
| 932 |
| 933 // Process the 4 MBs within the SB in the order: |
| 934 // top-left, top-right, bottom-left, bottom-right |
| 935 for (i = 0; i < 4; i++) { |
| 936 int dy = row_delta[i]; |
| 937 int dx = col_delta[i]; |
| 938 int offset_extended = dy * xd->mode_info_stride + dx; |
| 939 |
| 940 xd->mb_index = i; |
| 941 |
| 942 mi = xd->mode_info_context; |
| 943 if ((mb_row >= pc->mb_rows) || (mb_col >= pc->mb_cols)) { |
| 944 // MB lies outside frame, skip on to next |
| 945 mb_row += dy; |
| 946 mb_col += dx; |
| 947 xd->mode_info_context += offset_extended; |
| 948 xd->prev_mode_info_context += offset_extended; |
| 949 continue; |
| 950 } |
| 951 #if CONFIG_SUPERBLOCKS |
| 952 if (i) |
| 953 mi->mbmi.encoded_as_sb = 0; |
| 954 #endif |
| 955 |
| 956 // Set above context pointer |
| 957 xd->above_context = pc->above_context + mb_col; |
| 958 xd->left_context = pc->left_context + (i >> 1); |
| 959 |
| 960 /* Distance of Mb to the various image edges. |
| 961 * These are specified to 8th pel as they are always compared to |
| 962 * values that are in 1/8th pel units |
| 963 */ |
| 964 xd->mb_to_top_edge = -((mb_row * 16)) << 3; |
| 965 xd->mb_to_left_edge = -((mb_col * 16) << 3); |
| 966 #if CONFIG_SUPERBLOCKS |
| 967 if (mi->mbmi.encoded_as_sb) { |
| 968 xd->mb_to_bottom_edge = ((pc->mb_rows - 2 - mb_row) * 16) << 3; |
| 969 xd->mb_to_right_edge = ((pc->mb_cols - 2 - mb_col) * 16) << 3; |
| 970 } else { |
| 971 #endif |
| 972 xd->mb_to_bottom_edge = ((pc->mb_rows - 1 - mb_row) * 16) << 3; |
| 973 xd->mb_to_right_edge = ((pc->mb_cols - 1 - mb_col) * 16) << 3; |
| 974 #if CONFIG_SUPERBLOCKS |
| 975 } |
| 976 #endif |
| 977 #ifdef DEC_DEBUG |
| 978 dec_debug = (pbi->common.current_video_frame == 46 && |
| 979 mb_row == 5 && mb_col == 2); |
| 980 if (dec_debug) |
| 981 #if CONFIG_SUPERBLOCKS |
| 982 printf("Enter Debug %d %d sb %d\n", mb_row, mb_col, |
| 983 mi->mbmi.encoded_as_sb); |
| 984 #else |
| 985 printf("Enter Debug %d %d\n", mb_row, mb_col); |
| 986 #endif |
| 987 #endif |
| 988 xd->up_available = (mb_row != 0); |
| 989 xd->left_available = (mb_col != 0); |
| 990 |
| 991 |
| 992 recon_yoffset = (mb_row * recon_y_stride * 16) + (mb_col * 16); |
| 993 recon_uvoffset = (mb_row * recon_uv_stride * 8) + (mb_col * 8); |
| 994 |
| 995 xd->dst.y_buffer = pc->yv12_fb[dst_fb_idx].y_buffer + recon_yoffset; |
| 996 xd->dst.u_buffer = pc->yv12_fb[dst_fb_idx].u_buffer + recon_uvoffset; |
| 997 xd->dst.v_buffer = pc->yv12_fb[dst_fb_idx].v_buffer + recon_uvoffset; |
| 998 |
| 999 vp9_decode_mb_mode_mv(pbi, xd, mb_row, mb_col, bc); |
| 1000 |
| 1001 update_blockd_bmi(xd); |
| 1002 #ifdef DEC_DEBUG |
| 1003 if (dec_debug) |
| 1004 printf("Hello\n"); |
| 1005 #endif |
| 1006 |
| 1007 /* Select the appropriate reference frame for this MB */ |
| 1008 if (xd->mode_info_context->mbmi.ref_frame == LAST_FRAME) |
| 1009 ref_fb_idx = pc->lst_fb_idx; |
| 1010 else if (xd->mode_info_context->mbmi.ref_frame == GOLDEN_FRAME) |
| 1011 ref_fb_idx = pc->gld_fb_idx; |
| 1012 else |
| 1013 ref_fb_idx = pc->alt_fb_idx; |
| 1014 |
| 1015 xd->pre.y_buffer = pc->yv12_fb[ref_fb_idx].y_buffer + recon_yoffset; |
| 1016 xd->pre.u_buffer = pc->yv12_fb[ref_fb_idx].u_buffer + recon_uvoffset; |
| 1017 xd->pre.v_buffer = pc->yv12_fb[ref_fb_idx].v_buffer + recon_uvoffset; |
| 1018 |
| 1019 if (xd->mode_info_context->mbmi.second_ref_frame > 0) { |
| 1020 int second_ref_fb_idx; |
| 1021 |
| 1022 /* Select the appropriate reference frame for this MB */ |
| 1023 if (xd->mode_info_context->mbmi.second_ref_frame == LAST_FRAME) |
| 1024 second_ref_fb_idx = pc->lst_fb_idx; |
| 1025 else if (xd->mode_info_context->mbmi.second_ref_frame == |
| 1026 GOLDEN_FRAME) |
| 1027 second_ref_fb_idx = pc->gld_fb_idx; |
| 1028 else |
| 1029 second_ref_fb_idx = pc->alt_fb_idx; |
| 1030 |
| 1031 xd->second_pre.y_buffer = |
| 1032 pc->yv12_fb[second_ref_fb_idx].y_buffer + recon_yoffset; |
| 1033 xd->second_pre.u_buffer = |
| 1034 pc->yv12_fb[second_ref_fb_idx].u_buffer + recon_uvoffset; |
| 1035 xd->second_pre.v_buffer = |
| 1036 pc->yv12_fb[second_ref_fb_idx].v_buffer + recon_uvoffset; |
| 1037 } |
| 1038 |
| 1039 if (xd->mode_info_context->mbmi.ref_frame != INTRA_FRAME) { |
| 1040 /* propagate errors from reference frames */ |
| 1041 xd->corrupted |= pc->yv12_fb[ref_fb_idx].corrupted; |
| 1042 } |
| 1043 |
| 1044 #if CONFIG_SUPERBLOCKS |
| 1045 if (xd->mode_info_context->mbmi.encoded_as_sb) { |
| 1046 if (mb_col < pc->mb_cols - 1) |
| 1047 mi[1] = mi[0]; |
| 1048 if (mb_row < pc->mb_rows - 1) { |
| 1049 mi[pc->mode_info_stride] = mi[0]; |
| 1050 if (mb_col < pc->mb_cols - 1) |
| 1051 mi[pc->mode_info_stride + 1] = mi[0]; |
| 1052 } |
| 1053 } |
| 1054 if (xd->mode_info_context->mbmi.encoded_as_sb) { |
| 1055 decode_superblock(pbi, xd, mb_row, mb_col, bc); |
| 1056 } else { |
| 1057 #endif |
| 1058 vp9_intra_prediction_down_copy(xd); |
| 1059 decode_macroblock(pbi, xd, mb_row, mb_col, bc); |
| 1060 #if CONFIG_SUPERBLOCKS |
| 1061 } |
| 1062 #endif |
| 1063 |
| 1064 /* check if the boolean decoder has suffered an error */ |
| 1065 xd->corrupted |= bool_error(bc); |
| 1066 |
| 1067 #if CONFIG_SUPERBLOCKS |
| 1068 if (mi->mbmi.encoded_as_sb) { |
| 1069 assert(!i); |
| 1070 mb_col += 2; |
| 1071 xd->mode_info_context += 2; |
| 1072 xd->prev_mode_info_context += 2; |
| 1073 break; |
| 1074 } |
| 1075 #endif |
| 1076 |
| 1077 // skip to next MB |
| 1078 xd->mode_info_context += offset_extended; |
| 1079 xd->prev_mode_info_context += offset_extended; |
| 1080 mb_row += dy; |
| 1081 mb_col += dx; |
| 1082 } |
| 1083 } |
| 1084 |
| 1085 /* skip prediction column */ |
| 1086 xd->mode_info_context += 1 - (pc->mb_cols & 0x1) + xd->mode_info_stride; |
| 1087 xd->prev_mode_info_context += 1 - (pc->mb_cols & 0x1) + xd->mode_info_stride; |
| 1088 } |
| 1089 |
| 1090 static unsigned int read_partition_size(const unsigned char *cx_size) { |
| 1091 const unsigned int size = |
| 1092 cx_size[0] + (cx_size[1] << 8) + (cx_size[2] << 16); |
| 1093 return size; |
| 1094 } |
| 1095 |
| 1096 static int read_is_valid(const unsigned char *start, |
| 1097 size_t len, |
| 1098 const unsigned char *end) { |
| 1099 return (start + len > start && start + len <= end); |
| 1100 } |
| 1101 |
| 1102 |
| 1103 static void setup_token_decoder(VP9D_COMP *pbi, |
| 1104 const unsigned char *cx_data, |
| 1105 BOOL_DECODER* const bool_decoder) { |
| 1106 VP9_COMMON *pc = &pbi->common; |
| 1107 const unsigned char *user_data_end = pbi->Source + pbi->source_sz; |
| 1108 const unsigned char *partition; |
| 1109 |
| 1110 ptrdiff_t partition_size; |
| 1111 ptrdiff_t bytes_left; |
| 1112 |
| 1113 // Set up pointers to token partition |
| 1114 partition = cx_data; |
| 1115 bytes_left = user_data_end - partition; |
| 1116 partition_size = bytes_left; |
| 1117 |
| 1118 /* Validate the calculated partition length. If the buffer |
| 1119 * described by the partition can't be fully read, then restrict |
| 1120 * it to the portion that can be (for EC mode) or throw an error. |
| 1121 */ |
| 1122 if (!read_is_valid(partition, partition_size, user_data_end)) { |
| 1123 vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME, |
| 1124 "Truncated packet or corrupt partition " |
| 1125 "%d length", 1); |
| 1126 } |
| 1127 |
| 1128 if (vp9_start_decode(bool_decoder, |
| 1129 partition, (unsigned int)partition_size)) |
| 1130 vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR, |
| 1131 "Failed to allocate bool decoder %d", 1); |
| 1132 } |
| 1133 |
| 1134 static void init_frame(VP9D_COMP *pbi) { |
| 1135 VP9_COMMON *const pc = &pbi->common; |
| 1136 MACROBLOCKD *const xd = &pbi->mb; |
| 1137 |
| 1138 if (pc->frame_type == KEY_FRAME) { |
| 1139 /* Various keyframe initializations */ |
| 1140 vp9_init_mv_probs(pc); |
| 1141 |
| 1142 vp9_init_mbmode_probs(pc); |
| 1143 vp9_default_bmode_probs(pc->fc.bmode_prob); |
| 1144 |
| 1145 vp9_default_coef_probs(pc); |
| 1146 vp9_kf_default_bmode_probs(pc->kf_bmode_prob); |
| 1147 |
| 1148 // Reset the segment feature data to the default stats: |
| 1149 // Features disabled, 0, with delta coding (Default state). |
| 1150 vp9_clearall_segfeatures(xd); |
| 1151 |
| 1152 xd->mb_segment_abs_delta = SEGMENT_DELTADATA; |
| 1153 |
| 1154 /* reset the mode ref deltasa for loop filter */ |
| 1155 vpx_memset(xd->ref_lf_deltas, 0, sizeof(xd->ref_lf_deltas)); |
| 1156 vpx_memset(xd->mode_lf_deltas, 0, sizeof(xd->mode_lf_deltas)); |
| 1157 |
| 1158 /* All buffers are implicitly updated on key frames. */ |
| 1159 pc->refresh_golden_frame = 1; |
| 1160 pc->refresh_alt_ref_frame = 1; |
| 1161 pc->copy_buffer_to_gf = 0; |
| 1162 pc->copy_buffer_to_arf = 0; |
| 1163 |
| 1164 /* Note that Golden and Altref modes cannot be used on a key frame so |
| 1165 * ref_frame_sign_bias[] is undefined and meaningless |
| 1166 */ |
| 1167 pc->ref_frame_sign_bias[GOLDEN_FRAME] = 0; |
| 1168 pc->ref_frame_sign_bias[ALTREF_FRAME] = 0; |
| 1169 |
| 1170 vp9_init_mode_contexts(&pbi->common); |
| 1171 vpx_memcpy(&pc->lfc, &pc->fc, sizeof(pc->fc)); |
| 1172 vpx_memcpy(&pc->lfc_a, &pc->fc, sizeof(pc->fc)); |
| 1173 |
| 1174 vpx_memset(pc->prev_mip, 0, |
| 1175 (pc->mb_cols + 1) * (pc->mb_rows + 1)* sizeof(MODE_INFO)); |
| 1176 vpx_memset(pc->mip, 0, |
| 1177 (pc->mb_cols + 1) * (pc->mb_rows + 1)* sizeof(MODE_INFO)); |
| 1178 |
| 1179 vp9_update_mode_info_border(pc, pc->mip); |
| 1180 vp9_update_mode_info_in_image(pc, pc->mi); |
| 1181 |
| 1182 } else { |
| 1183 |
| 1184 if (!pc->use_bilinear_mc_filter) |
| 1185 pc->mcomp_filter_type = EIGHTTAP; |
| 1186 else |
| 1187 pc->mcomp_filter_type = BILINEAR; |
| 1188 |
| 1189 /* To enable choice of different interpolation filters */ |
| 1190 vp9_setup_interp_filters(xd, pc->mcomp_filter_type, pc); |
| 1191 } |
| 1192 |
| 1193 xd->mode_info_context = pc->mi; |
| 1194 xd->prev_mode_info_context = pc->prev_mi; |
| 1195 xd->frame_type = pc->frame_type; |
| 1196 xd->mode_info_context->mbmi.mode = DC_PRED; |
| 1197 xd->mode_info_stride = pc->mode_info_stride; |
| 1198 xd->corrupted = 0; /* init without corruption */ |
| 1199 |
| 1200 xd->fullpixel_mask = 0xffffffff; |
| 1201 if (pc->full_pixel) |
| 1202 xd->fullpixel_mask = 0xfffffff8; |
| 1203 |
| 1204 } |
| 1205 |
| 1206 static void read_coef_probs_common( |
| 1207 BOOL_DECODER* const bc, |
| 1208 vp9_prob coef_probs[BLOCK_TYPES][COEF_BANDS] |
| 1209 [PREV_COEF_CONTEXTS][ENTROPY_NODES]) { |
| 1210 int i, j, k, l; |
| 1211 |
| 1212 if (vp9_read_bit(bc)) { |
| 1213 for (i = 0; i < BLOCK_TYPES; i++) { |
| 1214 for (j = !i; j < COEF_BANDS; j++) { |
| 1215 /* NB: This j loop starts from 1 on block type i == 0 */ |
| 1216 for (k = 0; k < PREV_COEF_CONTEXTS; k++) { |
| 1217 if (k >= 3 && ((i == 0 && j == 1) || |
| 1218 (i > 0 && j == 0))) |
| 1219 continue; |
| 1220 for (l = 0; l < ENTROPY_NODES; l++) { |
| 1221 vp9_prob *const p = coef_probs[i][j][k] + l; |
| 1222 |
| 1223 if (vp9_read(bc, COEF_UPDATE_PROB)) { |
| 1224 *p = read_prob_diff_update(bc, *p); |
| 1225 } |
| 1226 } |
| 1227 } |
| 1228 } |
| 1229 } |
| 1230 } |
| 1231 } |
| 1232 |
| 1233 static void read_coef_probs(VP9D_COMP *pbi, BOOL_DECODER* const bc) { |
| 1234 VP9_COMMON *const pc = &pbi->common; |
| 1235 |
| 1236 read_coef_probs_common(bc, pc->fc.coef_probs); |
| 1237 read_coef_probs_common(bc, pc->fc.hybrid_coef_probs); |
| 1238 |
| 1239 if (pbi->common.txfm_mode != ONLY_4X4) { |
| 1240 read_coef_probs_common(bc, pc->fc.coef_probs_8x8); |
| 1241 read_coef_probs_common(bc, pc->fc.hybrid_coef_probs_8x8); |
| 1242 } |
| 1243 if (pbi->common.txfm_mode > ALLOW_8X8) { |
| 1244 read_coef_probs_common(bc, pc->fc.coef_probs_16x16); |
| 1245 read_coef_probs_common(bc, pc->fc.hybrid_coef_probs_16x16); |
| 1246 } |
| 1247 } |
| 1248 |
| 1249 int vp9_decode_frame(VP9D_COMP *pbi, const unsigned char **p_data_end) { |
| 1250 BOOL_DECODER header_bc, residual_bc; |
| 1251 VP9_COMMON *const pc = &pbi->common; |
| 1252 MACROBLOCKD *const xd = &pbi->mb; |
| 1253 const unsigned char *data = (const unsigned char *)pbi->Source; |
| 1254 const unsigned char *data_end = data + pbi->source_sz; |
| 1255 ptrdiff_t first_partition_length_in_bytes = 0; |
| 1256 |
| 1257 int mb_row; |
| 1258 int i, j; |
| 1259 int corrupt_tokens = 0; |
| 1260 |
| 1261 /* start with no corruption of current frame */ |
| 1262 xd->corrupted = 0; |
| 1263 pc->yv12_fb[pc->new_fb_idx].corrupted = 0; |
| 1264 |
| 1265 if (data_end - data < 3) { |
| 1266 vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME, |
| 1267 "Truncated packet"); |
| 1268 } else { |
| 1269 pc->last_frame_type = pc->frame_type; |
| 1270 pc->frame_type = (FRAME_TYPE)(data[0] & 1); |
| 1271 pc->version = (data[0] >> 1) & 7; |
| 1272 pc->show_frame = (data[0] >> 4) & 1; |
| 1273 first_partition_length_in_bytes = |
| 1274 (data[0] | (data[1] << 8) | (data[2] << 16)) >> 5; |
| 1275 |
| 1276 if ((data + first_partition_length_in_bytes > data_end |
| 1277 || data + first_partition_length_in_bytes < data)) |
| 1278 vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME, |
| 1279 "Truncated packet or corrupt partition 0 length"); |
| 1280 |
| 1281 data += 3; |
| 1282 |
| 1283 vp9_setup_version(pc); |
| 1284 |
| 1285 if (pc->frame_type == KEY_FRAME) { |
| 1286 const int Width = pc->Width; |
| 1287 const int Height = pc->Height; |
| 1288 |
| 1289 /* vet via sync code */ |
| 1290 /* When error concealment is enabled we should only check the sync |
| 1291 * code if we have enough bits available |
| 1292 */ |
| 1293 if (data + 3 < data_end) { |
| 1294 if (data[0] != 0x9d || data[1] != 0x01 || data[2] != 0x2a) |
| 1295 vpx_internal_error(&pc->error, VPX_CODEC_UNSUP_BITSTREAM, |
| 1296 "Invalid frame sync code"); |
| 1297 } |
| 1298 |
| 1299 /* If error concealment is enabled we should only parse the new size |
| 1300 * if we have enough data. Otherwise we will end up with the wrong |
| 1301 * size. |
| 1302 */ |
| 1303 if (data + 6 < data_end) { |
| 1304 pc->Width = (data[3] | (data[4] << 8)) & 0x3fff; |
| 1305 pc->horiz_scale = data[4] >> 6; |
| 1306 pc->Height = (data[5] | (data[6] << 8)) & 0x3fff; |
| 1307 pc->vert_scale = data[6] >> 6; |
| 1308 } |
| 1309 data += 7; |
| 1310 |
| 1311 if (Width != pc->Width || Height != pc->Height) { |
| 1312 if (pc->Width <= 0) { |
| 1313 pc->Width = Width; |
| 1314 vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME, |
| 1315 "Invalid frame width"); |
| 1316 } |
| 1317 |
| 1318 if (pc->Height <= 0) { |
| 1319 pc->Height = Height; |
| 1320 vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME, |
| 1321 "Invalid frame height"); |
| 1322 } |
| 1323 |
| 1324 if (vp9_alloc_frame_buffers(pc, pc->Width, pc->Height)) |
| 1325 vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR, |
| 1326 "Failed to allocate frame buffers"); |
| 1327 } |
| 1328 } |
| 1329 } |
| 1330 #ifdef DEC_DEBUG |
| 1331 printf("Decode frame %d\n", pc->current_video_frame); |
| 1332 #endif |
| 1333 |
| 1334 if ((!pbi->decoded_key_frame && pc->frame_type != KEY_FRAME) || |
| 1335 pc->Width == 0 || pc->Height == 0) { |
| 1336 return -1; |
| 1337 } |
| 1338 |
| 1339 init_frame(pbi); |
| 1340 |
| 1341 if (vp9_start_decode(&header_bc, data, |
| 1342 (unsigned int)first_partition_length_in_bytes)) |
| 1343 vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR, |
| 1344 "Failed to allocate bool decoder 0"); |
| 1345 if (pc->frame_type == KEY_FRAME) { |
| 1346 pc->clr_type = (YUV_TYPE)vp9_read_bit(&header_bc); |
| 1347 pc->clamp_type = (CLAMP_TYPE)vp9_read_bit(&header_bc); |
| 1348 } |
| 1349 |
| 1350 /* Is segmentation enabled */ |
| 1351 xd->segmentation_enabled = (unsigned char)vp9_read_bit(&header_bc); |
| 1352 |
| 1353 if (xd->segmentation_enabled) { |
| 1354 // Read whether or not the segmentation map is being explicitly |
| 1355 // updated this frame. |
| 1356 xd->update_mb_segmentation_map = (unsigned char)vp9_read_bit(&header_bc); |
| 1357 |
| 1358 // If so what method will be used. |
| 1359 if (xd->update_mb_segmentation_map) { |
| 1360 // Which macro block level features are enabled |
| 1361 |
| 1362 // Read the probs used to decode the segment id for each macro |
| 1363 // block. |
| 1364 for (i = 0; i < MB_FEATURE_TREE_PROBS; i++) { |
| 1365 xd->mb_segment_tree_probs[i] = vp9_read_bit(&header_bc) ? |
| 1366 (vp9_prob)vp9_read_literal(&header_bc, 8) : 255; |
| 1367 } |
| 1368 |
| 1369 // Read the prediction probs needed to decode the segment id |
| 1370 pc->temporal_update = (unsigned char)vp9_read_bit(&header_bc); |
| 1371 for (i = 0; i < PREDICTION_PROBS; i++) { |
| 1372 if (pc->temporal_update) { |
| 1373 pc->segment_pred_probs[i] = vp9_read_bit(&header_bc) ? |
| 1374 (vp9_prob)vp9_read_literal(&header_bc, 8) : 255; |
| 1375 } else { |
| 1376 pc->segment_pred_probs[i] = 255; |
| 1377 } |
| 1378 } |
| 1379 } |
| 1380 // Is the segment data being updated |
| 1381 xd->update_mb_segmentation_data = (unsigned char)vp9_read_bit(&header_bc); |
| 1382 |
| 1383 if (xd->update_mb_segmentation_data) { |
| 1384 int data; |
| 1385 |
| 1386 xd->mb_segment_abs_delta = (unsigned char)vp9_read_bit(&header_bc); |
| 1387 |
| 1388 vp9_clearall_segfeatures(xd); |
| 1389 |
| 1390 // For each segmentation... |
| 1391 for (i = 0; i < MAX_MB_SEGMENTS; i++) { |
| 1392 // For each of the segments features... |
| 1393 for (j = 0; j < SEG_LVL_MAX; j++) { |
| 1394 // Is the feature enabled |
| 1395 if (vp9_read_bit(&header_bc)) { |
| 1396 // Update the feature data and mask |
| 1397 vp9_enable_segfeature(xd, i, j); |
| 1398 |
| 1399 data = vp9_decode_unsigned_max(&header_bc, |
| 1400 vp9_seg_feature_data_max(j)); |
| 1401 |
| 1402 // Is the segment data signed.. |
| 1403 if (vp9_is_segfeature_signed(j)) { |
| 1404 if (vp9_read_bit(&header_bc)) |
| 1405 data = -data; |
| 1406 } |
| 1407 } else |
| 1408 data = 0; |
| 1409 |
| 1410 vp9_set_segdata(xd, i, j, data); |
| 1411 } |
| 1412 } |
| 1413 } |
| 1414 } |
| 1415 |
| 1416 // Read common prediction model status flag probability updates for the |
| 1417 // reference frame |
| 1418 if (pc->frame_type == KEY_FRAME) { |
| 1419 // Set the prediction probabilities to defaults |
| 1420 pc->ref_pred_probs[0] = 120; |
| 1421 pc->ref_pred_probs[1] = 80; |
| 1422 pc->ref_pred_probs[2] = 40; |
| 1423 } else { |
| 1424 for (i = 0; i < PREDICTION_PROBS; i++) { |
| 1425 if (vp9_read_bit(&header_bc)) |
| 1426 pc->ref_pred_probs[i] = (vp9_prob)vp9_read_literal(&header_bc, 8); |
| 1427 } |
| 1428 } |
| 1429 |
| 1430 #if CONFIG_SUPERBLOCKS |
| 1431 pc->sb_coded = vp9_read_literal(&header_bc, 8); |
| 1432 #endif |
| 1433 |
| 1434 /* Read the loop filter level and type */ |
| 1435 pc->txfm_mode = vp9_read_literal(&header_bc, 2); |
| 1436 if (pc->txfm_mode == TX_MODE_SELECT) { |
| 1437 pc->prob_tx[0] = vp9_read_literal(&header_bc, 8); |
| 1438 pc->prob_tx[1] = vp9_read_literal(&header_bc, 8); |
| 1439 } |
| 1440 |
| 1441 pc->filter_type = (LOOPFILTERTYPE) vp9_read_bit(&header_bc); |
| 1442 pc->filter_level = vp9_read_literal(&header_bc, 6); |
| 1443 pc->sharpness_level = vp9_read_literal(&header_bc, 3); |
| 1444 |
| 1445 /* Read in loop filter deltas applied at the MB level based on mode or ref fra
me. */ |
| 1446 xd->mode_ref_lf_delta_update = 0; |
| 1447 xd->mode_ref_lf_delta_enabled = (unsigned char)vp9_read_bit(&header_bc); |
| 1448 |
| 1449 if (xd->mode_ref_lf_delta_enabled) { |
| 1450 /* Do the deltas need to be updated */ |
| 1451 xd->mode_ref_lf_delta_update = (unsigned char)vp9_read_bit(&header_bc); |
| 1452 |
| 1453 if (xd->mode_ref_lf_delta_update) { |
| 1454 /* Send update */ |
| 1455 for (i = 0; i < MAX_REF_LF_DELTAS; i++) { |
| 1456 if (vp9_read_bit(&header_bc)) { |
| 1457 /*sign = vp9_read_bit( &header_bc );*/ |
| 1458 xd->ref_lf_deltas[i] = (signed char)vp9_read_literal(&header_bc, 6); |
| 1459 |
| 1460 if (vp9_read_bit(&header_bc)) /* Apply sign */ |
| 1461 xd->ref_lf_deltas[i] = xd->ref_lf_deltas[i] * -1; |
| 1462 } |
| 1463 } |
| 1464 |
| 1465 /* Send update */ |
| 1466 for (i = 0; i < MAX_MODE_LF_DELTAS; i++) { |
| 1467 if (vp9_read_bit(&header_bc)) { |
| 1468 /*sign = vp9_read_bit( &header_bc );*/ |
| 1469 xd->mode_lf_deltas[i] = (signed char)vp9_read_literal(&header_bc, 6); |
| 1470 |
| 1471 if (vp9_read_bit(&header_bc)) /* Apply sign */ |
| 1472 xd->mode_lf_deltas[i] = xd->mode_lf_deltas[i] * -1; |
| 1473 } |
| 1474 } |
| 1475 } |
| 1476 } |
| 1477 |
| 1478 // Dummy read for now |
| 1479 vp9_read_literal(&header_bc, 2); |
| 1480 |
| 1481 setup_token_decoder(pbi, data + first_partition_length_in_bytes, |
| 1482 &residual_bc); |
| 1483 |
| 1484 /* Read the default quantizers. */ |
| 1485 { |
| 1486 int Q, q_update; |
| 1487 |
| 1488 Q = vp9_read_literal(&header_bc, QINDEX_BITS); |
| 1489 pc->base_qindex = Q; |
| 1490 q_update = 0; |
| 1491 /* AC 1st order Q = default */ |
| 1492 pc->y1dc_delta_q = get_delta_q(&header_bc, pc->y1dc_delta_q, &q_update); |
| 1493 pc->y2dc_delta_q = get_delta_q(&header_bc, pc->y2dc_delta_q, &q_update); |
| 1494 pc->y2ac_delta_q = get_delta_q(&header_bc, pc->y2ac_delta_q, &q_update); |
| 1495 pc->uvdc_delta_q = get_delta_q(&header_bc, pc->uvdc_delta_q, &q_update); |
| 1496 pc->uvac_delta_q = get_delta_q(&header_bc, pc->uvac_delta_q, &q_update); |
| 1497 |
| 1498 if (q_update) |
| 1499 vp9_init_de_quantizer(pbi); |
| 1500 |
| 1501 /* MB level dequantizer setup */ |
| 1502 mb_init_dequantizer(pbi, &pbi->mb); |
| 1503 } |
| 1504 |
| 1505 /* Determine if the golden frame or ARF buffer should be updated and how. |
| 1506 * For all non key frames the GF and ARF refresh flags and sign bias |
| 1507 * flags must be set explicitly. |
| 1508 */ |
| 1509 if (pc->frame_type != KEY_FRAME) { |
| 1510 /* Should the GF or ARF be updated from the current frame */ |
| 1511 pc->refresh_golden_frame = vp9_read_bit(&header_bc); |
| 1512 pc->refresh_alt_ref_frame = vp9_read_bit(&header_bc); |
| 1513 |
| 1514 if (pc->refresh_alt_ref_frame) { |
| 1515 vpx_memcpy(&pc->fc, &pc->lfc_a, sizeof(pc->fc)); |
| 1516 } else { |
| 1517 vpx_memcpy(&pc->fc, &pc->lfc, sizeof(pc->fc)); |
| 1518 } |
| 1519 |
| 1520 /* Buffer to buffer copy flags. */ |
| 1521 pc->copy_buffer_to_gf = 0; |
| 1522 |
| 1523 if (!pc->refresh_golden_frame) |
| 1524 pc->copy_buffer_to_gf = vp9_read_literal(&header_bc, 2); |
| 1525 |
| 1526 pc->copy_buffer_to_arf = 0; |
| 1527 |
| 1528 if (!pc->refresh_alt_ref_frame) |
| 1529 pc->copy_buffer_to_arf = vp9_read_literal(&header_bc, 2); |
| 1530 |
| 1531 pc->ref_frame_sign_bias[GOLDEN_FRAME] = vp9_read_bit(&header_bc); |
| 1532 pc->ref_frame_sign_bias[ALTREF_FRAME] = vp9_read_bit(&header_bc); |
| 1533 |
| 1534 /* Is high precision mv allowed */ |
| 1535 xd->allow_high_precision_mv = (unsigned char)vp9_read_bit(&header_bc); |
| 1536 // Read the type of subpel filter to use |
| 1537 if (vp9_read_bit(&header_bc)) { |
| 1538 pc->mcomp_filter_type = SWITCHABLE; |
| 1539 } else { |
| 1540 pc->mcomp_filter_type = vp9_read_literal(&header_bc, 2); |
| 1541 } |
| 1542 #if CONFIG_COMP_INTERINTRA_PRED |
| 1543 pc->use_interintra = vp9_read_bit(&header_bc); |
| 1544 #endif |
| 1545 /* To enable choice of different interploation filters */ |
| 1546 vp9_setup_interp_filters(xd, pc->mcomp_filter_type, pc); |
| 1547 } |
| 1548 |
| 1549 pc->refresh_entropy_probs = vp9_read_bit(&header_bc); |
| 1550 if (pc->refresh_entropy_probs == 0) { |
| 1551 vpx_memcpy(&pc->lfc, &pc->fc, sizeof(pc->fc)); |
| 1552 } |
| 1553 |
| 1554 pc->refresh_last_frame = (pc->frame_type == KEY_FRAME) |
| 1555 || vp9_read_bit(&header_bc); |
| 1556 |
| 1557 // Read inter mode probability context updates |
| 1558 if (pc->frame_type != KEY_FRAME) { |
| 1559 int i, j; |
| 1560 for (i = 0; i < INTER_MODE_CONTEXTS; i++) { |
| 1561 for (j = 0; j < 4; j++) { |
| 1562 if (vp9_read(&header_bc, 252)) { |
| 1563 pc->fc.vp9_mode_contexts[i][j] = |
| 1564 (vp9_prob)vp9_read_literal(&header_bc, 8); |
| 1565 } |
| 1566 } |
| 1567 } |
| 1568 } |
| 1569 |
| 1570 if (0) { |
| 1571 FILE *z = fopen("decodestats.stt", "a"); |
| 1572 fprintf(z, "%6d F:%d,G:%d,A:%d,L:%d,Q:%d\n", |
| 1573 pc->current_video_frame, |
| 1574 pc->frame_type, |
| 1575 pc->refresh_golden_frame, |
| 1576 pc->refresh_alt_ref_frame, |
| 1577 pc->refresh_last_frame, |
| 1578 pc->base_qindex); |
| 1579 fclose(z); |
| 1580 } |
| 1581 |
| 1582 vp9_copy(pbi->common.fc.pre_coef_probs, |
| 1583 pbi->common.fc.coef_probs); |
| 1584 vp9_copy(pbi->common.fc.pre_hybrid_coef_probs, |
| 1585 pbi->common.fc.hybrid_coef_probs); |
| 1586 vp9_copy(pbi->common.fc.pre_coef_probs_8x8, |
| 1587 pbi->common.fc.coef_probs_8x8); |
| 1588 vp9_copy(pbi->common.fc.pre_hybrid_coef_probs_8x8, |
| 1589 pbi->common.fc.hybrid_coef_probs_8x8); |
| 1590 vp9_copy(pbi->common.fc.pre_coef_probs_16x16, |
| 1591 pbi->common.fc.coef_probs_16x16); |
| 1592 vp9_copy(pbi->common.fc.pre_hybrid_coef_probs_16x16, |
| 1593 pbi->common.fc.hybrid_coef_probs_16x16); |
| 1594 vp9_copy(pbi->common.fc.pre_ymode_prob, pbi->common.fc.ymode_prob); |
| 1595 #if CONFIG_SUPERBLOCKS |
| 1596 vp9_copy(pbi->common.fc.pre_sb_ymode_prob, pbi->common.fc.sb_ymode_prob); |
| 1597 #endif |
| 1598 vp9_copy(pbi->common.fc.pre_uv_mode_prob, pbi->common.fc.uv_mode_prob); |
| 1599 vp9_copy(pbi->common.fc.pre_bmode_prob, pbi->common.fc.bmode_prob); |
| 1600 vp9_copy(pbi->common.fc.pre_i8x8_mode_prob, pbi->common.fc.i8x8_mode_prob); |
| 1601 vp9_copy(pbi->common.fc.pre_sub_mv_ref_prob, pbi->common.fc.sub_mv_ref_prob); |
| 1602 vp9_copy(pbi->common.fc.pre_mbsplit_prob, pbi->common.fc.mbsplit_prob); |
| 1603 #if CONFIG_COMP_INTERINTRA_PRED |
| 1604 pbi->common.fc.pre_interintra_prob = pbi->common.fc.interintra_prob; |
| 1605 #endif |
| 1606 pbi->common.fc.pre_nmvc = pbi->common.fc.nmvc; |
| 1607 vp9_zero(pbi->common.fc.coef_counts); |
| 1608 vp9_zero(pbi->common.fc.hybrid_coef_counts); |
| 1609 vp9_zero(pbi->common.fc.coef_counts_8x8); |
| 1610 vp9_zero(pbi->common.fc.hybrid_coef_counts_8x8); |
| 1611 vp9_zero(pbi->common.fc.coef_counts_16x16); |
| 1612 vp9_zero(pbi->common.fc.hybrid_coef_counts_16x16); |
| 1613 vp9_zero(pbi->common.fc.ymode_counts); |
| 1614 #if CONFIG_SUPERBLOCKS |
| 1615 vp9_zero(pbi->common.fc.sb_ymode_counts); |
| 1616 #endif |
| 1617 vp9_zero(pbi->common.fc.uv_mode_counts); |
| 1618 vp9_zero(pbi->common.fc.bmode_counts); |
| 1619 vp9_zero(pbi->common.fc.i8x8_mode_counts); |
| 1620 vp9_zero(pbi->common.fc.sub_mv_ref_counts); |
| 1621 vp9_zero(pbi->common.fc.mbsplit_counts); |
| 1622 vp9_zero(pbi->common.fc.NMVcount); |
| 1623 vp9_zero(pbi->common.fc.mv_ref_ct); |
| 1624 #if CONFIG_COMP_INTERINTRA_PRED |
| 1625 vp9_zero(pbi->common.fc.interintra_counts); |
| 1626 #endif |
| 1627 |
| 1628 read_coef_probs(pbi, &header_bc); |
| 1629 |
| 1630 vpx_memcpy(&xd->pre, &pc->yv12_fb[pc->lst_fb_idx], sizeof(YV12_BUFFER_CONFIG))
; |
| 1631 vpx_memcpy(&xd->dst, &pc->yv12_fb[pc->new_fb_idx], sizeof(YV12_BUFFER_CONFIG))
; |
| 1632 |
| 1633 // Create the segmentation map structure and set to 0 |
| 1634 if (!pc->last_frame_seg_map) |
| 1635 CHECK_MEM_ERROR(pc->last_frame_seg_map, |
| 1636 vpx_calloc((pc->mb_rows * pc->mb_cols), 1)); |
| 1637 |
| 1638 /* set up frame new frame for intra coded blocks */ |
| 1639 vp9_setup_intra_recon(&pc->yv12_fb[pc->new_fb_idx]); |
| 1640 |
| 1641 vp9_setup_block_dptrs(xd); |
| 1642 |
| 1643 vp9_build_block_doffsets(xd); |
| 1644 |
| 1645 /* clear out the coeff buffer */ |
| 1646 vpx_memset(xd->qcoeff, 0, sizeof(xd->qcoeff)); |
| 1647 |
| 1648 /* Read the mb_no_coeff_skip flag */ |
| 1649 pc->mb_no_coeff_skip = (int)vp9_read_bit(&header_bc); |
| 1650 |
| 1651 vp9_decode_mode_mvs_init(pbi, &header_bc); |
| 1652 |
| 1653 vpx_memset(pc->above_context, 0, sizeof(ENTROPY_CONTEXT_PLANES) * pc->mb_cols)
; |
| 1654 |
| 1655 // Resset the macroblock mode info context to the start of the list |
| 1656 xd->mode_info_context = pc->mi; |
| 1657 xd->prev_mode_info_context = pc->prev_mi; |
| 1658 |
| 1659 /* Decode a row of superblocks */ |
| 1660 for (mb_row = 0; mb_row < pc->mb_rows; mb_row += 2) { |
| 1661 decode_sb_row(pbi, pc, mb_row, xd, &residual_bc); |
| 1662 } |
| 1663 corrupt_tokens |= xd->corrupted; |
| 1664 |
| 1665 /* Collect information about decoder corruption. */ |
| 1666 /* 1. Check first boolean decoder for errors. */ |
| 1667 pc->yv12_fb[pc->new_fb_idx].corrupted = bool_error(&header_bc); |
| 1668 /* 2. Check the macroblock information */ |
| 1669 pc->yv12_fb[pc->new_fb_idx].corrupted |= corrupt_tokens; |
| 1670 |
| 1671 if (!pbi->decoded_key_frame) { |
| 1672 if (pc->frame_type == KEY_FRAME && |
| 1673 !pc->yv12_fb[pc->new_fb_idx].corrupted) |
| 1674 pbi->decoded_key_frame = 1; |
| 1675 else |
| 1676 vpx_internal_error(&pbi->common.error, VPX_CODEC_CORRUPT_FRAME, |
| 1677 "A stream must start with a complete key frame"); |
| 1678 } |
| 1679 |
| 1680 vp9_adapt_coef_probs(pc); |
| 1681 if (pc->frame_type != KEY_FRAME) { |
| 1682 vp9_adapt_mode_probs(pc); |
| 1683 vp9_adapt_nmv_probs(pc, xd->allow_high_precision_mv); |
| 1684 vp9_update_mode_context(&pbi->common); |
| 1685 } |
| 1686 |
| 1687 /* If this was a kf or Gf note the Q used */ |
| 1688 if ((pc->frame_type == KEY_FRAME) || |
| 1689 pc->refresh_golden_frame || pc->refresh_alt_ref_frame) { |
| 1690 pc->last_kf_gf_q = pc->base_qindex; |
| 1691 } |
| 1692 if (pc->refresh_entropy_probs) { |
| 1693 if (pc->refresh_alt_ref_frame) |
| 1694 vpx_memcpy(&pc->lfc_a, &pc->fc, sizeof(pc->fc)); |
| 1695 else |
| 1696 vpx_memcpy(&pc->lfc, &pc->fc, sizeof(pc->fc)); |
| 1697 } |
| 1698 |
| 1699 #ifdef PACKET_TESTING |
| 1700 { |
| 1701 FILE *f = fopen("decompressor.VP8", "ab"); |
| 1702 unsigned int size = residual_bc.pos + header_bc.pos + 8; |
| 1703 fwrite((void *) &size, 4, 1, f); |
| 1704 fwrite((void *) pbi->Source, size, 1, f); |
| 1705 fclose(f); |
| 1706 } |
| 1707 #endif |
| 1708 // printf("Frame %d Done\n", frame_count++); |
| 1709 |
| 1710 /* Find the end of the coded buffer */ |
| 1711 while (residual_bc.count > CHAR_BIT |
| 1712 && residual_bc.count < VP9_BD_VALUE_SIZE) { |
| 1713 residual_bc.count -= CHAR_BIT; |
| 1714 residual_bc.user_buffer--; |
| 1715 } |
| 1716 *p_data_end = residual_bc.user_buffer; |
| 1717 return 0; |
| 1718 } |
OLD | NEW |