Index: source/libvpx/vp9/encoder/vp9_aq_variance.c |
=================================================================== |
--- source/libvpx/vp9/encoder/vp9_aq_variance.c (revision 292608) |
+++ source/libvpx/vp9/encoder/vp9_aq_variance.c (working copy) |
@@ -34,6 +34,9 @@ |
#define SEGMENT_ID(i) segment_id[(i) - ENERGY_MIN] |
DECLARE_ALIGNED(16, static const uint8_t, vp9_64_zeros[64]) = {0}; |
+#if CONFIG_VP9_HIGHBITDEPTH |
+DECLARE_ALIGNED(16, static const uint16_t, vp9_highbd_64_zeros[64]) = {0}; |
+#endif |
unsigned int vp9_vaq_segment_id(int energy) { |
ENERGY_IN_BOUNDS(energy); |
@@ -126,14 +129,40 @@ |
const int bw = 8 * num_8x8_blocks_wide_lookup[bs] - right_overflow; |
const int bh = 8 * num_8x8_blocks_high_lookup[bs] - bottom_overflow; |
int avg; |
+#if CONFIG_VP9_HIGHBITDEPTH |
+ if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) { |
+ highbd_variance(x->plane[0].src.buf, x->plane[0].src.stride, |
+ CONVERT_TO_BYTEPTR(vp9_highbd_64_zeros), 0, bw, bh, |
+ &sse, &avg); |
+ sse >>= 2 * (xd->bd - 8); |
+ avg >>= (xd->bd - 8); |
+ } else { |
+ variance(x->plane[0].src.buf, x->plane[0].src.stride, |
+ vp9_64_zeros, 0, bw, bh, &sse, &avg); |
+ } |
+#else |
variance(x->plane[0].src.buf, x->plane[0].src.stride, |
vp9_64_zeros, 0, bw, bh, &sse, &avg); |
+#endif // CONFIG_VP9_HIGHBITDEPTH |
var = sse - (((int64_t)avg * avg) / (bw * bh)); |
return (256 * var) / (bw * bh); |
} else { |
+#if CONFIG_VP9_HIGHBITDEPTH |
+ if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) { |
+ var = cpi->fn_ptr[bs].vf(x->plane[0].src.buf, |
+ x->plane[0].src.stride, |
+ CONVERT_TO_BYTEPTR(vp9_highbd_64_zeros), |
+ 0, &sse); |
+ } else { |
+ var = cpi->fn_ptr[bs].vf(x->plane[0].src.buf, |
+ x->plane[0].src.stride, |
+ vp9_64_zeros, 0, &sse); |
+ } |
+#else |
var = cpi->fn_ptr[bs].vf(x->plane[0].src.buf, |
x->plane[0].src.stride, |
vp9_64_zeros, 0, &sse); |
+#endif // CONFIG_VP9_HIGHBITDEPTH |
return (256 * var) >> num_pels_log2_lookup[bs]; |
} |
} |