| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved. | 2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 188 for (i = 1; i < MAX_MB_PLANE; i++) | 188 for (i = 1; i < MAX_MB_PLANE; i++) |
| 189 xd->plane[i].dequant = cm->uv_dequant[q_index]; | 189 xd->plane[i].dequant = cm->uv_dequant[q_index]; |
| 190 } | 190 } |
| 191 | 191 |
| 192 static void inverse_transform_block(MACROBLOCKD* xd, int plane, int block, | 192 static void inverse_transform_block(MACROBLOCKD* xd, int plane, int block, |
| 193 TX_SIZE tx_size, uint8_t *dst, int stride, | 193 TX_SIZE tx_size, uint8_t *dst, int stride, |
| 194 int eob) { | 194 int eob) { |
| 195 struct macroblockd_plane *const pd = &xd->plane[plane]; | 195 struct macroblockd_plane *const pd = &xd->plane[plane]; |
| 196 if (eob > 0) { | 196 if (eob > 0) { |
| 197 TX_TYPE tx_type = DCT_DCT; | 197 TX_TYPE tx_type = DCT_DCT; |
| 198 int16_t *const dqcoeff = BLOCK_OFFSET(pd->dqcoeff, block); | 198 tran_low_t *const dqcoeff = BLOCK_OFFSET(pd->dqcoeff, block); |
| 199 if (xd->lossless) { | 199 if (xd->lossless) { |
| 200 tx_type = DCT_DCT; | 200 tx_type = DCT_DCT; |
| 201 vp9_iwht4x4_add(dqcoeff, dst, stride, eob); | 201 vp9_iwht4x4_add(dqcoeff, dst, stride, eob); |
| 202 } else { | 202 } else { |
| 203 const PLANE_TYPE plane_type = pd->plane_type; | 203 const PLANE_TYPE plane_type = pd->plane_type; |
| 204 switch (tx_size) { | 204 switch (tx_size) { |
| 205 case TX_4X4: | 205 case TX_4X4: |
| 206 tx_type = get_tx_type_4x4(plane_type, xd, block); | 206 tx_type = get_tx_type_4x4(plane_type, xd, block); |
| 207 vp9_iht4x4_add(tx_type, dqcoeff, dst, stride, eob); | 207 vp9_iht4x4_add(tx_type, dqcoeff, dst, stride, eob); |
| 208 break; | 208 break; |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 242 vp9_reader *r; | 242 vp9_reader *r; |
| 243 }; | 243 }; |
| 244 | 244 |
| 245 static void predict_and_reconstruct_intra_block(int plane, int block, | 245 static void predict_and_reconstruct_intra_block(int plane, int block, |
| 246 BLOCK_SIZE plane_bsize, | 246 BLOCK_SIZE plane_bsize, |
| 247 TX_SIZE tx_size, void *arg) { | 247 TX_SIZE tx_size, void *arg) { |
| 248 struct intra_args *const args = (struct intra_args *)arg; | 248 struct intra_args *const args = (struct intra_args *)arg; |
| 249 VP9_COMMON *const cm = args->cm; | 249 VP9_COMMON *const cm = args->cm; |
| 250 MACROBLOCKD *const xd = args->xd; | 250 MACROBLOCKD *const xd = args->xd; |
| 251 struct macroblockd_plane *const pd = &xd->plane[plane]; | 251 struct macroblockd_plane *const pd = &xd->plane[plane]; |
| 252 MODE_INFO *const mi = xd->mi[0]; | 252 MODE_INFO *const mi = xd->mi[0].src_mi; |
| 253 const PREDICTION_MODE mode = (plane == 0) ? get_y_mode(mi, block) | 253 const PREDICTION_MODE mode = (plane == 0) ? get_y_mode(mi, block) |
| 254 : mi->mbmi.uv_mode; | 254 : mi->mbmi.uv_mode; |
| 255 int x, y; | 255 int x, y; |
| 256 uint8_t *dst; | 256 uint8_t *dst; |
| 257 txfrm_block_to_raster_xy(plane_bsize, tx_size, block, &x, &y); | 257 txfrm_block_to_raster_xy(plane_bsize, tx_size, block, &x, &y); |
| 258 dst = &pd->dst.buf[4 * y * pd->dst.stride + 4 * x]; | 258 dst = &pd->dst.buf[4 * y * pd->dst.stride + 4 * x]; |
| 259 | 259 |
| 260 vp9_predict_intra_block(xd, block >> (tx_size << 1), | 260 vp9_predict_intra_block(xd, block >> (tx_size << 1), |
| 261 b_width_log2(plane_bsize), tx_size, mode, | 261 b_width_log2(plane_bsize), tx_size, mode, |
| 262 dst, pd->dst.stride, dst, pd->dst.stride, | 262 dst, pd->dst.stride, dst, pd->dst.stride, |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 298 static MB_MODE_INFO *set_offsets(VP9_COMMON *const cm, MACROBLOCKD *const xd, | 298 static MB_MODE_INFO *set_offsets(VP9_COMMON *const cm, MACROBLOCKD *const xd, |
| 299 const TileInfo *const tile, | 299 const TileInfo *const tile, |
| 300 BLOCK_SIZE bsize, int mi_row, int mi_col) { | 300 BLOCK_SIZE bsize, int mi_row, int mi_col) { |
| 301 const int bw = num_8x8_blocks_wide_lookup[bsize]; | 301 const int bw = num_8x8_blocks_wide_lookup[bsize]; |
| 302 const int bh = num_8x8_blocks_high_lookup[bsize]; | 302 const int bh = num_8x8_blocks_high_lookup[bsize]; |
| 303 const int x_mis = MIN(bw, cm->mi_cols - mi_col); | 303 const int x_mis = MIN(bw, cm->mi_cols - mi_col); |
| 304 const int y_mis = MIN(bh, cm->mi_rows - mi_row); | 304 const int y_mis = MIN(bh, cm->mi_rows - mi_row); |
| 305 const int offset = mi_row * cm->mi_stride + mi_col; | 305 const int offset = mi_row * cm->mi_stride + mi_col; |
| 306 int x, y; | 306 int x, y; |
| 307 | 307 |
| 308 xd->mi = cm->mi_grid_visible + offset; | 308 xd->mi = cm->mi + offset; |
| 309 xd->mi[0] = &cm->mi[offset]; | 309 xd->mi[0].src_mi = &xd->mi[0]; // Point to self. |
| 310 xd->mi[0]->mbmi.sb_type = bsize; | 310 xd->mi[0].mbmi.sb_type = bsize; |
| 311 |
| 311 for (y = 0; y < y_mis; ++y) | 312 for (y = 0; y < y_mis; ++y) |
| 312 for (x = !y; x < x_mis; ++x) | 313 for (x = !y; x < x_mis; ++x) { |
| 313 xd->mi[y * cm->mi_stride + x] = xd->mi[0]; | 314 xd->mi[y * cm->mi_stride + x].src_mi = &xd->mi[0]; |
| 315 } |
| 314 | 316 |
| 315 set_skip_context(xd, mi_row, mi_col); | 317 set_skip_context(xd, mi_row, mi_col); |
| 316 | 318 |
| 317 // Distance of Mb to the various image edges. These are specified to 8th pel | 319 // Distance of Mb to the various image edges. These are specified to 8th pel |
| 318 // as they are always compared to values that are in 1/8th pel units | 320 // as they are always compared to values that are in 1/8th pel units |
| 319 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols); | 321 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols); |
| 320 | 322 |
| 321 vp9_setup_dst_planes(xd->plane, get_frame_new_buffer(cm), mi_row, mi_col); | 323 vp9_setup_dst_planes(xd->plane, get_frame_new_buffer(cm), mi_row, mi_col); |
| 322 return &xd->mi[0]->mbmi; | 324 return &xd->mi[0].mbmi; |
| 323 } | 325 } |
| 324 | 326 |
| 325 static void set_ref(VP9_COMMON *const cm, MACROBLOCKD *const xd, | 327 static void set_ref(VP9_COMMON *const cm, MACROBLOCKD *const xd, |
| 326 int idx, int mi_row, int mi_col) { | 328 int idx, int mi_row, int mi_col) { |
| 327 MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi; | 329 MB_MODE_INFO *const mbmi = &xd->mi[0].src_mi->mbmi; |
| 328 RefBuffer *ref_buffer = &cm->frame_refs[mbmi->ref_frame[idx] - LAST_FRAME]; | 330 RefBuffer *ref_buffer = &cm->frame_refs[mbmi->ref_frame[idx] - LAST_FRAME]; |
| 329 xd->block_refs[idx] = ref_buffer; | 331 xd->block_refs[idx] = ref_buffer; |
| 330 if (!vp9_is_valid_scale(&ref_buffer->sf)) | 332 if (!vp9_is_valid_scale(&ref_buffer->sf)) |
| 331 vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM, | 333 vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM, |
| 332 "Invalid scale factors"); | 334 "Invalid scale factors"); |
| 333 if (ref_buffer->buf->corrupted) | 335 if (ref_buffer->buf->corrupted) |
| 334 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, | 336 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, |
| 335 "Block reference is corrupt"); | 337 "Block reference is corrupt"); |
| 336 vp9_setup_pre_planes(xd, idx, ref_buffer->buf, mi_row, mi_col, | 338 vp9_setup_pre_planes(xd, idx, ref_buffer->buf, mi_row, mi_col, |
| 337 &ref_buffer->sf); | 339 &ref_buffer->sf); |
| (...skipping 323 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 661 cm->subsampling_x, cm->subsampling_y, | 663 cm->subsampling_x, cm->subsampling_y, |
| 662 #if CONFIG_VP9_HIGHBITDEPTH | 664 #if CONFIG_VP9_HIGHBITDEPTH |
| 663 cm->use_highbitdepth, | 665 cm->use_highbitdepth, |
| 664 #endif | 666 #endif |
| 665 VP9_DEC_BORDER_IN_PIXELS, | 667 VP9_DEC_BORDER_IN_PIXELS, |
| 666 &cm->frame_bufs[cm->new_fb_idx].raw_frame_buffer, cm->get_fb_cb, | 668 &cm->frame_bufs[cm->new_fb_idx].raw_frame_buffer, cm->get_fb_cb, |
| 667 cm->cb_priv)) { | 669 cm->cb_priv)) { |
| 668 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR, | 670 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR, |
| 669 "Failed to allocate frame buffer"); | 671 "Failed to allocate frame buffer"); |
| 670 } | 672 } |
| 673 cm->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth; |
| 674 } |
| 675 |
| 676 static INLINE int valid_ref_frame_img_fmt(vpx_bit_depth_t ref_bit_depth, |
| 677 int ref_xss, int ref_yss, |
| 678 vpx_bit_depth_t this_bit_depth, |
| 679 int this_xss, int this_yss) { |
| 680 return ref_bit_depth == this_bit_depth && ref_xss == this_xss && |
| 681 ref_yss == this_yss; |
| 671 } | 682 } |
| 672 | 683 |
| 673 static void setup_frame_size_with_refs(VP9_COMMON *cm, | 684 static void setup_frame_size_with_refs(VP9_COMMON *cm, |
| 674 struct vp9_read_bit_buffer *rb) { | 685 struct vp9_read_bit_buffer *rb) { |
| 675 int width, height; | 686 int width, height; |
| 676 int found = 0, i; | 687 int found = 0, i; |
| 677 int has_valid_ref_frame = 0; | 688 int has_valid_ref_frame = 0; |
| 678 for (i = 0; i < REFS_PER_FRAME; ++i) { | 689 for (i = 0; i < REFS_PER_FRAME; ++i) { |
| 679 if (vp9_rb_read_bit(rb)) { | 690 if (vp9_rb_read_bit(rb)) { |
| 680 YV12_BUFFER_CONFIG *const buf = cm->frame_refs[i].buf; | 691 YV12_BUFFER_CONFIG *const buf = cm->frame_refs[i].buf; |
| (...skipping 19 matching lines...) Expand all Loading... |
| 700 // has valid dimensions. | 711 // has valid dimensions. |
| 701 for (i = 0; i < REFS_PER_FRAME; ++i) { | 712 for (i = 0; i < REFS_PER_FRAME; ++i) { |
| 702 RefBuffer *const ref_frame = &cm->frame_refs[i]; | 713 RefBuffer *const ref_frame = &cm->frame_refs[i]; |
| 703 has_valid_ref_frame |= valid_ref_frame_size(ref_frame->buf->y_crop_width, | 714 has_valid_ref_frame |= valid_ref_frame_size(ref_frame->buf->y_crop_width, |
| 704 ref_frame->buf->y_crop_height, | 715 ref_frame->buf->y_crop_height, |
| 705 width, height); | 716 width, height); |
| 706 } | 717 } |
| 707 if (!has_valid_ref_frame) | 718 if (!has_valid_ref_frame) |
| 708 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, | 719 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, |
| 709 "Referenced frame has invalid size"); | 720 "Referenced frame has invalid size"); |
| 721 for (i = 0; i < REFS_PER_FRAME; ++i) { |
| 722 RefBuffer *const ref_frame = &cm->frame_refs[i]; |
| 723 if (!valid_ref_frame_img_fmt( |
| 724 ref_frame->buf->bit_depth, |
| 725 ref_frame->buf->uv_crop_width < ref_frame->buf->y_crop_width, |
| 726 ref_frame->buf->uv_crop_height < ref_frame->buf->y_crop_height, |
| 727 cm->bit_depth, |
| 728 cm->subsampling_x, |
| 729 cm->subsampling_y)) |
| 730 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, |
| 731 "Referenced frame has incompatible color space"); |
| 732 } |
| 710 | 733 |
| 711 resize_context_buffers(cm, width, height); | 734 resize_context_buffers(cm, width, height); |
| 712 setup_display_size(cm, rb); | 735 setup_display_size(cm, rb); |
| 713 | 736 |
| 714 if (vp9_realloc_frame_buffer( | 737 if (vp9_realloc_frame_buffer( |
| 715 get_frame_new_buffer(cm), cm->width, cm->height, | 738 get_frame_new_buffer(cm), cm->width, cm->height, |
| 716 cm->subsampling_x, cm->subsampling_y, | 739 cm->subsampling_x, cm->subsampling_y, |
| 717 #if CONFIG_VP9_HIGHBITDEPTH | 740 #if CONFIG_VP9_HIGHBITDEPTH |
| 718 cm->use_highbitdepth, | 741 cm->use_highbitdepth, |
| 719 #endif | 742 #endif |
| 720 VP9_DEC_BORDER_IN_PIXELS, | 743 VP9_DEC_BORDER_IN_PIXELS, |
| 721 &cm->frame_bufs[cm->new_fb_idx].raw_frame_buffer, cm->get_fb_cb, | 744 &cm->frame_bufs[cm->new_fb_idx].raw_frame_buffer, cm->get_fb_cb, |
| 722 cm->cb_priv)) { | 745 cm->cb_priv)) { |
| 723 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR, | 746 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR, |
| 724 "Failed to allocate frame buffer"); | 747 "Failed to allocate frame buffer"); |
| 725 } | 748 } |
| 749 cm->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth; |
| 726 } | 750 } |
| 727 | 751 |
| 728 static void setup_tile_info(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) { | 752 static void setup_tile_info(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) { |
| 729 int min_log2_tile_cols, max_log2_tile_cols, max_ones; | 753 int min_log2_tile_cols, max_log2_tile_cols, max_ones; |
| 730 vp9_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols); | 754 vp9_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols); |
| 731 | 755 |
| 732 // columns | 756 // columns |
| 733 max_ones = max_log2_tile_cols - min_log2_tile_cols; | 757 max_ones = max_log2_tile_cols - min_log2_tile_cols; |
| 734 cm->log2_tile_cols = min_log2_tile_cols; | 758 cm->log2_tile_cols = min_log2_tile_cols; |
| 735 while (max_ones-- && vp9_rb_read_bit(rb)) | 759 while (max_ones-- && vp9_rb_read_bit(rb)) |
| (...skipping 195 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 931 lf_data->stop = cm->mi_rows; | 955 lf_data->stop = cm->mi_rows; |
| 932 winterface->execute(&pbi->lf_worker); | 956 winterface->execute(&pbi->lf_worker); |
| 933 } | 957 } |
| 934 | 958 |
| 935 // Get last tile data. | 959 // Get last tile data. |
| 936 tile_data = pbi->tile_data + tile_cols * tile_rows - 1; | 960 tile_data = pbi->tile_data + tile_cols * tile_rows - 1; |
| 937 | 961 |
| 938 return vp9_reader_find_end(&tile_data->bit_reader); | 962 return vp9_reader_find_end(&tile_data->bit_reader); |
| 939 } | 963 } |
| 940 | 964 |
| 941 static int tile_worker_hook(void *arg1, void *arg2) { | 965 static int tile_worker_hook(TileWorkerData *const tile_data, |
| 942 TileWorkerData *const tile_data = (TileWorkerData*)arg1; | 966 const TileInfo *const tile) { |
| 943 const TileInfo *const tile = (TileInfo*)arg2; | |
| 944 int mi_row, mi_col; | 967 int mi_row, mi_col; |
| 945 | 968 |
| 946 for (mi_row = tile->mi_row_start; mi_row < tile->mi_row_end; | 969 for (mi_row = tile->mi_row_start; mi_row < tile->mi_row_end; |
| 947 mi_row += MI_BLOCK_SIZE) { | 970 mi_row += MI_BLOCK_SIZE) { |
| 948 vp9_zero(tile_data->xd.left_context); | 971 vp9_zero(tile_data->xd.left_context); |
| 949 vp9_zero(tile_data->xd.left_seg_context); | 972 vp9_zero(tile_data->xd.left_seg_context); |
| 950 for (mi_col = tile->mi_col_start; mi_col < tile->mi_col_end; | 973 for (mi_col = tile->mi_col_start; mi_col < tile->mi_col_end; |
| 951 mi_col += MI_BLOCK_SIZE) { | 974 mi_col += MI_BLOCK_SIZE) { |
| 952 decode_partition(tile_data->cm, &tile_data->xd, tile, | 975 decode_partition(tile_data->cm, &tile_data->xd, tile, |
| 953 mi_row, mi_col, &tile_data->bit_reader, BLOCK_64X64); | 976 mi_row, mi_col, &tile_data->bit_reader, BLOCK_64X64); |
| (...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1194 | 1217 |
| 1195 read_bitdepth_colorspace_sampling(cm, rb); | 1218 read_bitdepth_colorspace_sampling(cm, rb); |
| 1196 pbi->refresh_frame_flags = (1 << REF_FRAMES) - 1; | 1219 pbi->refresh_frame_flags = (1 << REF_FRAMES) - 1; |
| 1197 | 1220 |
| 1198 for (i = 0; i < REFS_PER_FRAME; ++i) { | 1221 for (i = 0; i < REFS_PER_FRAME; ++i) { |
| 1199 cm->frame_refs[i].idx = -1; | 1222 cm->frame_refs[i].idx = -1; |
| 1200 cm->frame_refs[i].buf = NULL; | 1223 cm->frame_refs[i].buf = NULL; |
| 1201 } | 1224 } |
| 1202 | 1225 |
| 1203 setup_frame_size(cm, rb); | 1226 setup_frame_size(cm, rb); |
| 1227 pbi->need_resync = 0; |
| 1204 } else { | 1228 } else { |
| 1205 cm->intra_only = cm->show_frame ? 0 : vp9_rb_read_bit(rb); | 1229 cm->intra_only = cm->show_frame ? 0 : vp9_rb_read_bit(rb); |
| 1206 | 1230 |
| 1207 cm->reset_frame_context = cm->error_resilient_mode ? | 1231 cm->reset_frame_context = cm->error_resilient_mode ? |
| 1208 0 : vp9_rb_read_literal(rb, 2); | 1232 0 : vp9_rb_read_literal(rb, 2); |
| 1209 | 1233 |
| 1210 if (cm->intra_only) { | 1234 if (cm->intra_only) { |
| 1211 if (!vp9_read_sync_code(rb)) | 1235 if (!vp9_read_sync_code(rb)) |
| 1212 vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM, | 1236 vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM, |
| 1213 "Invalid frame sync code"); | 1237 "Invalid frame sync code"); |
| 1214 if (cm->profile > PROFILE_0) { | 1238 if (cm->profile > PROFILE_0) { |
| 1215 read_bitdepth_colorspace_sampling(cm, rb); | 1239 read_bitdepth_colorspace_sampling(cm, rb); |
| 1216 } else { | 1240 } else { |
| 1217 // NOTE: The intra-only frame header does not include the specification | 1241 // NOTE: The intra-only frame header does not include the specification |
| 1218 // of either the color format or color sub-sampling in profile 0. VP9 | 1242 // of either the color format or color sub-sampling in profile 0. VP9 |
| 1219 // specifies that the default color space should be YUV 4:2:0 in this | 1243 // specifies that the default color space should be YUV 4:2:0 in this |
| 1220 // case (normative). | 1244 // case (normative). |
| 1221 cm->color_space = BT_601; | 1245 cm->color_space = BT_601; |
| 1222 cm->subsampling_y = cm->subsampling_x = 1; | 1246 cm->subsampling_y = cm->subsampling_x = 1; |
| 1223 } | 1247 } |
| 1224 | 1248 |
| 1225 pbi->refresh_frame_flags = vp9_rb_read_literal(rb, REF_FRAMES); | 1249 pbi->refresh_frame_flags = vp9_rb_read_literal(rb, REF_FRAMES); |
| 1226 setup_frame_size(cm, rb); | 1250 setup_frame_size(cm, rb); |
| 1251 pbi->need_resync = 0; |
| 1227 } else { | 1252 } else { |
| 1228 pbi->refresh_frame_flags = vp9_rb_read_literal(rb, REF_FRAMES); | 1253 pbi->refresh_frame_flags = vp9_rb_read_literal(rb, REF_FRAMES); |
| 1229 for (i = 0; i < REFS_PER_FRAME; ++i) { | 1254 for (i = 0; i < REFS_PER_FRAME; ++i) { |
| 1230 const int ref = vp9_rb_read_literal(rb, REF_FRAMES_LOG2); | 1255 const int ref = vp9_rb_read_literal(rb, REF_FRAMES_LOG2); |
| 1231 const int idx = cm->ref_frame_map[ref]; | 1256 const int idx = cm->ref_frame_map[ref]; |
| 1232 RefBuffer *const ref_frame = &cm->frame_refs[i]; | 1257 RefBuffer *const ref_frame = &cm->frame_refs[i]; |
| 1233 ref_frame->idx = idx; | 1258 ref_frame->idx = idx; |
| 1234 ref_frame->buf = &cm->frame_bufs[idx].buf; | 1259 ref_frame->buf = &cm->frame_bufs[idx].buf; |
| 1235 cm->ref_frame_sign_bias[LAST_FRAME + i] = vp9_rb_read_bit(rb); | 1260 cm->ref_frame_sign_bias[LAST_FRAME + i] = vp9_rb_read_bit(rb); |
| 1236 } | 1261 } |
| 1237 | 1262 |
| 1238 setup_frame_size_with_refs(cm, rb); | 1263 setup_frame_size_with_refs(cm, rb); |
| 1239 | 1264 |
| 1240 cm->allow_high_precision_mv = vp9_rb_read_bit(rb); | 1265 cm->allow_high_precision_mv = vp9_rb_read_bit(rb); |
| 1241 cm->interp_filter = read_interp_filter(rb); | 1266 cm->interp_filter = read_interp_filter(rb); |
| 1242 | 1267 |
| 1243 for (i = 0; i < REFS_PER_FRAME; ++i) { | 1268 for (i = 0; i < REFS_PER_FRAME; ++i) { |
| 1244 RefBuffer *const ref_buf = &cm->frame_refs[i]; | 1269 RefBuffer *const ref_buf = &cm->frame_refs[i]; |
| 1270 #if CONFIG_VP9_HIGHBITDEPTH |
| 1271 vp9_setup_scale_factors_for_frame(&ref_buf->sf, |
| 1272 ref_buf->buf->y_crop_width, |
| 1273 ref_buf->buf->y_crop_height, |
| 1274 cm->width, cm->height, |
| 1275 cm->use_highbitdepth); |
| 1276 #else |
| 1245 vp9_setup_scale_factors_for_frame(&ref_buf->sf, | 1277 vp9_setup_scale_factors_for_frame(&ref_buf->sf, |
| 1246 ref_buf->buf->y_crop_width, | 1278 ref_buf->buf->y_crop_width, |
| 1247 ref_buf->buf->y_crop_height, | 1279 ref_buf->buf->y_crop_height, |
| 1248 cm->width, cm->height); | 1280 cm->width, cm->height); |
| 1281 #endif |
| 1249 if (vp9_is_scaled(&ref_buf->sf)) | 1282 if (vp9_is_scaled(&ref_buf->sf)) |
| 1250 vp9_extend_frame_borders(ref_buf->buf); | 1283 vp9_extend_frame_borders(ref_buf->buf); |
| 1251 } | 1284 } |
| 1252 } | 1285 } |
| 1253 } | 1286 } |
| 1254 | 1287 |
| 1288 if (pbi->need_resync) { |
| 1289 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, |
| 1290 "Keyframe / intra-only frame required to reset decoder" |
| 1291 " state"); |
| 1292 } |
| 1293 |
| 1255 if (!cm->error_resilient_mode) { | 1294 if (!cm->error_resilient_mode) { |
| 1256 cm->refresh_frame_context = vp9_rb_read_bit(rb); | 1295 cm->refresh_frame_context = vp9_rb_read_bit(rb); |
| 1257 cm->frame_parallel_decoding_mode = vp9_rb_read_bit(rb); | 1296 cm->frame_parallel_decoding_mode = vp9_rb_read_bit(rb); |
| 1258 } else { | 1297 } else { |
| 1259 cm->refresh_frame_context = 0; | 1298 cm->refresh_frame_context = 0; |
| 1260 cm->frame_parallel_decoding_mode = 1; | 1299 cm->frame_parallel_decoding_mode = 1; |
| 1261 } | 1300 } |
| 1262 | 1301 |
| 1263 // This flag will be overridden by the call to vp9_setup_past_independence | 1302 // This flag will be overridden by the call to vp9_setup_past_independence |
| 1264 // below, forcing the use of context 0 for those frame types. | 1303 // below, forcing the use of context 0 for those frame types. |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1330 read_mv_probs(nmvc, cm->allow_high_precision_mv, &r); | 1369 read_mv_probs(nmvc, cm->allow_high_precision_mv, &r); |
| 1331 } | 1370 } |
| 1332 | 1371 |
| 1333 return vp9_reader_has_error(&r); | 1372 return vp9_reader_has_error(&r); |
| 1334 } | 1373 } |
| 1335 | 1374 |
| 1336 void vp9_init_dequantizer(VP9_COMMON *cm) { | 1375 void vp9_init_dequantizer(VP9_COMMON *cm) { |
| 1337 int q; | 1376 int q; |
| 1338 | 1377 |
| 1339 for (q = 0; q < QINDEX_RANGE; q++) { | 1378 for (q = 0; q < QINDEX_RANGE; q++) { |
| 1340 cm->y_dequant[q][0] = vp9_dc_quant(q, cm->y_dc_delta_q); | 1379 cm->y_dequant[q][0] = vp9_dc_quant(q, cm->y_dc_delta_q, cm->bit_depth); |
| 1341 cm->y_dequant[q][1] = vp9_ac_quant(q, 0); | 1380 cm->y_dequant[q][1] = vp9_ac_quant(q, 0, cm->bit_depth); |
| 1342 | 1381 |
| 1343 cm->uv_dequant[q][0] = vp9_dc_quant(q, cm->uv_dc_delta_q); | 1382 cm->uv_dequant[q][0] = vp9_dc_quant(q, cm->uv_dc_delta_q, cm->bit_depth); |
| 1344 cm->uv_dequant[q][1] = vp9_ac_quant(q, cm->uv_ac_delta_q); | 1383 cm->uv_dequant[q][1] = vp9_ac_quant(q, cm->uv_ac_delta_q, cm->bit_depth); |
| 1345 } | 1384 } |
| 1346 } | 1385 } |
| 1347 | 1386 |
| 1348 #ifdef NDEBUG | 1387 #ifdef NDEBUG |
| 1349 #define debug_check_frame_counts(cm) (void)0 | 1388 #define debug_check_frame_counts(cm) (void)0 |
| 1350 #else // !NDEBUG | 1389 #else // !NDEBUG |
| 1351 // Counts should only be incremented when frame_parallel_decoding_mode and | 1390 // Counts should only be incremented when frame_parallel_decoding_mode and |
| 1352 // error_resilient_mode are disabled. | 1391 // error_resilient_mode are disabled. |
| 1353 static void debug_check_frame_counts(const VP9_COMMON *const cm) { | 1392 static void debug_check_frame_counts(const VP9_COMMON *const cm) { |
| 1354 FRAME_COUNTS zero_counts; | 1393 FRAME_COUNTS zero_counts; |
| (...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1474 debug_check_frame_counts(cm); | 1513 debug_check_frame_counts(cm); |
| 1475 } | 1514 } |
| 1476 } else { | 1515 } else { |
| 1477 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, | 1516 vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, |
| 1478 "Decode failed. Frame data is corrupted."); | 1517 "Decode failed. Frame data is corrupted."); |
| 1479 } | 1518 } |
| 1480 | 1519 |
| 1481 if (cm->refresh_frame_context) | 1520 if (cm->refresh_frame_context) |
| 1482 cm->frame_contexts[cm->frame_context_idx] = cm->fc; | 1521 cm->frame_contexts[cm->frame_context_idx] = cm->fc; |
| 1483 } | 1522 } |
| OLD | NEW |