Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(77)

Side by Side Diff: source/libvpx/vp9/encoder/vp9_tokenize.c

Issue 181493009: libvpx: Pull from upstream (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/deps/third_party/libvpx/
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « source/libvpx/vp9/encoder/vp9_tokenize.h ('k') | source/libvpx/vp9/encoder/vp9_vaq.c » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved. 2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 11
12 #include <math.h> 12 #include <math.h>
13 #include <stdio.h> 13 #include <stdio.h>
14 #include <string.h> 14 #include <string.h>
15 #include <assert.h> 15 #include <assert.h>
16 #include "vp9/encoder/vp9_onyx_int.h" 16 #include "vp9/encoder/vp9_onyx_int.h"
17 #include "vp9/encoder/vp9_tokenize.h" 17 #include "vp9/encoder/vp9_tokenize.h"
18 #include "vpx_mem/vpx_mem.h" 18 #include "vpx_mem/vpx_mem.h"
19 19
20 #include "vp9/common/vp9_pred_common.h" 20 #include "vp9/common/vp9_pred_common.h"
21 #include "vp9/common/vp9_seg_common.h" 21 #include "vp9/common/vp9_seg_common.h"
22 #include "vp9/common/vp9_entropy.h" 22 #include "vp9/common/vp9_entropy.h"
23 23
24 static TOKENVALUE dct_value_tokens[DCT_MAX_VALUE * 2]; 24 static TOKENVALUE dct_value_tokens[DCT_MAX_VALUE * 2];
25 const TOKENVALUE *vp9_dct_value_tokens_ptr; 25 const TOKENVALUE *vp9_dct_value_tokens_ptr;
26 static int dct_value_cost[DCT_MAX_VALUE * 2]; 26 static int16_t dct_value_cost[DCT_MAX_VALUE * 2];
27 const int *vp9_dct_value_cost_ptr; 27 const int16_t *vp9_dct_value_cost_ptr;
28 28
29 // Array indices are identical to previously-existing CONTEXT_NODE indices 29 // Array indices are identical to previously-existing CONTEXT_NODE indices
30 const vp9_tree_index vp9_coef_tree[TREE_SIZE(ENTROPY_TOKENS)] = { 30 const vp9_tree_index vp9_coef_tree[TREE_SIZE(ENTROPY_TOKENS)] = {
31 -EOB_TOKEN, 2, // 0 = EOB 31 -EOB_TOKEN, 2, // 0 = EOB
32 -ZERO_TOKEN, 4, // 1 = ZERO 32 -ZERO_TOKEN, 4, // 1 = ZERO
33 -ONE_TOKEN, 6, // 2 = ONE 33 -ONE_TOKEN, 6, // 2 = ONE
34 8, 12, // 3 = LOW_VAL 34 8, 12, // 3 = LOW_VAL
35 -TWO_TOKEN, 10, // 4 = TWO 35 -TWO_TOKEN, 10, // 4 = TWO
36 -THREE_TOKEN, -FOUR_TOKEN, // 5 = THREE 36 -THREE_TOKEN, -FOUR_TOKEN, // 5 = THREE
37 14, 16, // 6 = HIGH_LOW 37 14, 16, // 6 = HIGH_LOW
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after
192 uint8_t token, 192 uint8_t token,
193 uint8_t skip_eob_node, 193 uint8_t skip_eob_node,
194 unsigned int *counts) { 194 unsigned int *counts) {
195 (*t)->token = token; 195 (*t)->token = token;
196 (*t)->context_tree = context_tree; 196 (*t)->context_tree = context_tree;
197 (*t)->skip_eob_node = skip_eob_node; 197 (*t)->skip_eob_node = skip_eob_node;
198 (*t)++; 198 (*t)++;
199 ++counts[token]; 199 ++counts[token];
200 } 200 }
201 201
202 static INLINE int get_tx_eob(const struct segmentation *seg, int segment_id,
203 TX_SIZE tx_size) {
204 const int eob_max = 16 << (tx_size << 1);
205 return vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP) ? 0 : eob_max;
206 }
207
202 static void tokenize_b(int plane, int block, BLOCK_SIZE plane_bsize, 208 static void tokenize_b(int plane, int block, BLOCK_SIZE plane_bsize,
203 TX_SIZE tx_size, void *arg) { 209 TX_SIZE tx_size, void *arg) {
204 struct tokenize_b_args* const args = arg; 210 struct tokenize_b_args* const args = arg;
205 VP9_COMP *cpi = args->cpi; 211 VP9_COMP *cpi = args->cpi;
206 MACROBLOCKD *xd = args->xd; 212 MACROBLOCKD *xd = args->xd;
207 TOKENEXTRA **tp = args->tp; 213 TOKENEXTRA **tp = args->tp;
208 uint8_t *token_cache = args->token_cache; 214 uint8_t *token_cache = args->token_cache;
209 struct macroblock_plane *p = &cpi->mb.plane[plane]; 215 struct macroblock_plane *p = &cpi->mb.plane[plane];
210 struct macroblockd_plane *pd = &xd->plane[plane]; 216 struct macroblockd_plane *pd = &xd->plane[plane];
211 MB_MODE_INFO *mbmi = &xd->mi_8x8[0]->mbmi; 217 MB_MODE_INFO *mbmi = &xd->mi_8x8[0]->mbmi;
212 int pt; /* near block/prev token context index */ 218 int pt; /* near block/prev token context index */
213 int c; 219 int c;
214 TOKENEXTRA *t = *tp; /* store tokens starting here */ 220 TOKENEXTRA *t = *tp; /* store tokens starting here */
215 int eob = p->eobs[block]; 221 int eob = p->eobs[block];
216 const PLANE_TYPE type = pd->plane_type; 222 const PLANE_TYPE type = pd->plane_type;
217 const int16_t *qcoeff_ptr = BLOCK_OFFSET(p->qcoeff, block); 223 const int16_t *qcoeff = BLOCK_OFFSET(p->qcoeff, block);
218 const int segment_id = mbmi->segment_id; 224 const int segment_id = mbmi->segment_id;
219 const int16_t *scan, *nb; 225 const int16_t *scan, *nb;
220 const scan_order *so; 226 const scan_order *so;
221 const int ref = is_inter_block(mbmi); 227 const int ref = is_inter_block(mbmi);
222 unsigned int (*const counts)[COEFF_CONTEXTS][ENTROPY_TOKENS] = 228 unsigned int (*const counts)[COEFF_CONTEXTS][ENTROPY_TOKENS] =
223 cpi->coef_counts[tx_size][type][ref]; 229 cpi->coef_counts[tx_size][type][ref];
224 vp9_prob (*const coef_probs)[COEFF_CONTEXTS][UNCONSTRAINED_NODES] = 230 vp9_prob (*const coef_probs)[COEFF_CONTEXTS][UNCONSTRAINED_NODES] =
225 cpi->common.fc.coef_probs[tx_size][type][ref]; 231 cpi->common.fc.coef_probs[tx_size][type][ref];
226 unsigned int (*const eob_branch)[COEFF_CONTEXTS] = 232 unsigned int (*const eob_branch)[COEFF_CONTEXTS] =
227 cpi->common.counts.eob_branch[tx_size][type][ref]; 233 cpi->common.counts.eob_branch[tx_size][type][ref];
228 234
229 const uint8_t *const band = get_band_translate(tx_size); 235 const uint8_t *const band = get_band_translate(tx_size);
230 const int seg_eob = get_tx_eob(&cpi->common.seg, segment_id, tx_size); 236 const int seg_eob = get_tx_eob(&cpi->common.seg, segment_id, tx_size);
231 237
232 int aoff, loff; 238 int aoff, loff;
233 txfrm_block_to_raster_xy(plane_bsize, tx_size, block, &aoff, &loff); 239 txfrm_block_to_raster_xy(plane_bsize, tx_size, block, &aoff, &loff);
234 240
235 pt = get_entropy_context(tx_size, pd->above_context + aoff, 241 pt = get_entropy_context(tx_size, pd->above_context + aoff,
236 pd->left_context + loff); 242 pd->left_context + loff);
237 so = get_scan(xd, tx_size, type, block); 243 so = get_scan(xd, tx_size, type, block);
238 scan = so->scan; 244 scan = so->scan;
239 nb = so->neighbors; 245 nb = so->neighbors;
240 c = 0; 246 c = 0;
241 while (c < eob) { 247 while (c < eob) {
242 int v = 0; 248 int v = 0;
243 int skip_eob = 0; 249 int skip_eob = 0;
244 v = qcoeff_ptr[scan[c]]; 250 v = qcoeff[scan[c]];
245 251
246 while (!v) { 252 while (!v) {
247 add_token_no_extra(&t, coef_probs[band[c]][pt], ZERO_TOKEN, skip_eob, 253 add_token_no_extra(&t, coef_probs[band[c]][pt], ZERO_TOKEN, skip_eob,
248 counts[band[c]][pt]); 254 counts[band[c]][pt]);
249 eob_branch[band[c]][pt] += !skip_eob; 255 eob_branch[band[c]][pt] += !skip_eob;
250 256
251 skip_eob = 1; 257 skip_eob = 1;
252 token_cache[scan[c]] = 0; 258 token_cache[scan[c]] = 0;
253 ++c; 259 ++c;
254 pt = get_coef_context(nb, token_cache, c); 260 pt = get_coef_context(nb, token_cache, c);
255 v = qcoeff_ptr[scan[c]]; 261 v = qcoeff[scan[c]];
256 } 262 }
257 263
258 add_token(&t, coef_probs[band[c]][pt], 264 add_token(&t, coef_probs[band[c]][pt],
259 vp9_dct_value_tokens_ptr[v].extra, 265 vp9_dct_value_tokens_ptr[v].extra,
260 vp9_dct_value_tokens_ptr[v].token, skip_eob, 266 (uint8_t)vp9_dct_value_tokens_ptr[v].token,
267 (uint8_t)skip_eob,
261 counts[band[c]][pt]); 268 counts[band[c]][pt]);
262 eob_branch[band[c]][pt] += !skip_eob; 269 eob_branch[band[c]][pt] += !skip_eob;
263 270
264 token_cache[scan[c]] = 271 token_cache[scan[c]] =
265 vp9_pt_energy_class[vp9_dct_value_tokens_ptr[v].token]; 272 vp9_pt_energy_class[vp9_dct_value_tokens_ptr[v].token];
266 ++c; 273 ++c;
267 pt = get_coef_context(nb, token_cache, c); 274 pt = get_coef_context(nb, token_cache, c);
268 } 275 }
269 if (c < seg_eob) { 276 if (c < seg_eob) {
270 add_token_no_extra(&t, coef_probs[band[c]][pt], EOB_TOKEN, 0, 277 add_token_no_extra(&t, coef_probs[band[c]][pt], EOB_TOKEN, 0,
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
328 vp9_foreach_transformed_block(xd, bsize, tokenize_b, &arg); 335 vp9_foreach_transformed_block(xd, bsize, tokenize_b, &arg);
329 } else { 336 } else {
330 vp9_foreach_transformed_block(xd, bsize, set_entropy_context_b, &arg); 337 vp9_foreach_transformed_block(xd, bsize, set_entropy_context_b, &arg);
331 *t = t_backup; 338 *t = t_backup;
332 } 339 }
333 } 340 }
334 341
335 void vp9_tokenize_initialize() { 342 void vp9_tokenize_initialize() {
336 fill_value_tokens(); 343 fill_value_tokens();
337 } 344 }
OLDNEW
« no previous file with comments | « source/libvpx/vp9/encoder/vp9_tokenize.h ('k') | source/libvpx/vp9/encoder/vp9_vaq.c » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698