| Index: third_party/libwebp/enc/vp8l.c
|
| diff --git a/third_party/libwebp/enc/vp8l.c b/third_party/libwebp/enc/vp8l.c
|
| index db94e78a808e41496911718c60d31117db9d3752..c16e2560ec82059139bfdf7e64c93a8b3ab6a89b 100644
|
| --- a/third_party/libwebp/enc/vp8l.c
|
| +++ b/third_party/libwebp/enc/vp8l.c
|
| @@ -126,54 +126,8 @@ static int AnalyzeAndCreatePalette(const WebPPicture* const pic,
|
| int low_effort,
|
| uint32_t palette[MAX_PALETTE_SIZE],
|
| int* const palette_size) {
|
| - int i, x, y, key;
|
| - int num_colors = 0;
|
| - uint8_t in_use[MAX_PALETTE_SIZE * 4] = { 0 };
|
| - uint32_t colors[MAX_PALETTE_SIZE * 4];
|
| - static const uint32_t kHashMul = 0x1e35a7bd;
|
| - const uint32_t* argb = pic->argb;
|
| - const int width = pic->width;
|
| - const int height = pic->height;
|
| - uint32_t last_pix = ~argb[0]; // so we're sure that last_pix != argb[0]
|
| -
|
| - for (y = 0; y < height; ++y) {
|
| - for (x = 0; x < width; ++x) {
|
| - if (argb[x] == last_pix) {
|
| - continue;
|
| - }
|
| - last_pix = argb[x];
|
| - key = (kHashMul * last_pix) >> PALETTE_KEY_RIGHT_SHIFT;
|
| - while (1) {
|
| - if (!in_use[key]) {
|
| - colors[key] = last_pix;
|
| - in_use[key] = 1;
|
| - ++num_colors;
|
| - if (num_colors > MAX_PALETTE_SIZE) {
|
| - return 0;
|
| - }
|
| - break;
|
| - } else if (colors[key] == last_pix) {
|
| - // The color is already there.
|
| - break;
|
| - } else {
|
| - // Some other color sits there.
|
| - // Do linear conflict resolution.
|
| - ++key;
|
| - key &= (MAX_PALETTE_SIZE * 4 - 1); // key mask for 1K buffer.
|
| - }
|
| - }
|
| - }
|
| - argb += pic->argb_stride;
|
| - }
|
| -
|
| - // TODO(skal): could we reuse in_use[] to speed up EncodePalette()?
|
| - num_colors = 0;
|
| - for (i = 0; i < (int)(sizeof(in_use) / sizeof(in_use[0])); ++i) {
|
| - if (in_use[i]) {
|
| - palette[num_colors] = colors[i];
|
| - ++num_colors;
|
| - }
|
| - }
|
| + const int num_colors = WebPGetColorPalette(pic, palette);
|
| + if (num_colors > MAX_PALETTE_SIZE) return 0;
|
| *palette_size = num_colors;
|
| qsort(palette, num_colors, sizeof(*palette), PaletteCompareColorsForQsort);
|
| if (!low_effort && PaletteHasNonMonotonousDeltas(palette, num_colors)) {
|
| @@ -336,7 +290,7 @@ static int AnalyzeEntropy(const uint32_t* argb,
|
| }
|
| }
|
| }
|
| - free(histo);
|
| + WebPSafeFree(histo);
|
| return 1;
|
| } else {
|
| return 0;
|
| @@ -761,6 +715,10 @@ static WebPEncodingError EncodeImageNoHuffman(VP8LBitWriter* const bw,
|
| }
|
|
|
| // Calculate backward references from ARGB image.
|
| + if (VP8LHashChainFill(hash_chain, quality, argb, width, height) == 0) {
|
| + err = VP8_ENC_ERROR_OUT_OF_MEMORY;
|
| + goto Error;
|
| + }
|
| refs = VP8LGetBackwardReferences(width, height, argb, quality, 0, &cache_bits,
|
| hash_chain, refs_array);
|
| if (refs == NULL) {
|
| @@ -824,7 +782,8 @@ static WebPEncodingError EncodeImageInternal(VP8LBitWriter* const bw,
|
| VP8LHashChain* const hash_chain,
|
| VP8LBackwardRefs refs_array[2],
|
| int width, int height, int quality,
|
| - int low_effort, int* cache_bits,
|
| + int low_effort,
|
| + int use_cache, int* cache_bits,
|
| int histogram_bits,
|
| size_t init_byte_position,
|
| int* const hdr_size,
|
| @@ -856,10 +815,14 @@ static WebPEncodingError EncodeImageInternal(VP8LBitWriter* const bw,
|
| goto Error;
|
| }
|
|
|
| - *cache_bits = MAX_COLOR_CACHE_BITS;
|
| + *cache_bits = use_cache ? MAX_COLOR_CACHE_BITS : 0;
|
| // 'best_refs' is the reference to the best backward refs and points to one
|
| // of refs_array[0] or refs_array[1].
|
| // Calculate backward references from ARGB image.
|
| + if (VP8LHashChainFill(hash_chain, quality, argb, width, height) == 0) {
|
| + err = VP8_ENC_ERROR_OUT_OF_MEMORY;
|
| + goto Error;
|
| + }
|
| best_refs = VP8LGetBackwardReferences(width, height, argb, quality,
|
| low_effort, cache_bits, hash_chain,
|
| refs_array);
|
| @@ -1007,14 +970,19 @@ static void ApplySubtractGreen(VP8LEncoder* const enc, int width, int height,
|
| static WebPEncodingError ApplyPredictFilter(const VP8LEncoder* const enc,
|
| int width, int height,
|
| int quality, int low_effort,
|
| + int used_subtract_green,
|
| VP8LBitWriter* const bw) {
|
| const int pred_bits = enc->transform_bits_;
|
| const int transform_width = VP8LSubSampleSize(width, pred_bits);
|
| const int transform_height = VP8LSubSampleSize(height, pred_bits);
|
| + // we disable near-lossless quantization if palette is used.
|
| + const int near_lossless_strength = enc->use_palette_ ? 100
|
| + : enc->config_->near_lossless;
|
|
|
| VP8LResidualImage(width, height, pred_bits, low_effort, enc->argb_,
|
| enc->argb_scratch_, enc->transform_data_,
|
| - enc->config_->exact);
|
| + near_lossless_strength, enc->config_->exact,
|
| + used_subtract_green);
|
| VP8LPutBits(bw, TRANSFORM_PRESENT, 1);
|
| VP8LPutBits(bw, PREDICTOR_TRANSFORM, 2);
|
| assert(pred_bits >= 2);
|
| @@ -1114,6 +1082,12 @@ static WebPEncodingError WriteImage(const WebPPicture* const pic,
|
|
|
| // -----------------------------------------------------------------------------
|
|
|
| +static void ClearTransformBuffer(VP8LEncoder* const enc) {
|
| + WebPSafeFree(enc->transform_mem_);
|
| + enc->transform_mem_ = NULL;
|
| + enc->transform_mem_size_ = 0;
|
| +}
|
| +
|
| // Allocates the memory for argb (W x H) buffer, 2 rows of context for
|
| // prediction and transform data.
|
| // Flags influencing the memory allocated:
|
| @@ -1122,43 +1096,48 @@ static WebPEncodingError WriteImage(const WebPPicture* const pic,
|
| static WebPEncodingError AllocateTransformBuffer(VP8LEncoder* const enc,
|
| int width, int height) {
|
| WebPEncodingError err = VP8_ENC_OK;
|
| - if (enc->argb_ == NULL) {
|
| - const int tile_size = 1 << enc->transform_bits_;
|
| - const uint64_t image_size = width * height;
|
| - // Ensure enough size for tiles, as well as for two scanlines and two
|
| - // extra pixels for CopyImageWithPrediction.
|
| - const uint64_t argb_scratch_size =
|
| - enc->use_predict_ ? tile_size * width + width + 2 : 0;
|
| - const int transform_data_size =
|
| - (enc->use_predict_ || enc->use_cross_color_)
|
| - ? VP8LSubSampleSize(width, enc->transform_bits_) *
|
| - VP8LSubSampleSize(height, enc->transform_bits_)
|
| - : 0;
|
| - const uint64_t total_size =
|
| - image_size + WEBP_ALIGN_CST +
|
| - argb_scratch_size + WEBP_ALIGN_CST +
|
| - (uint64_t)transform_data_size;
|
| - uint32_t* mem = (uint32_t*)WebPSafeMalloc(total_size, sizeof(*mem));
|
| + const uint64_t image_size = width * height;
|
| + // VP8LResidualImage needs room for 2 scanlines of uint32 pixels with an extra
|
| + // pixel in each, plus 2 regular scanlines of bytes.
|
| + // TODO(skal): Clean up by using arithmetic in bytes instead of words.
|
| + const uint64_t argb_scratch_size =
|
| + enc->use_predict_
|
| + ? (width + 1) * 2 +
|
| + (width * 2 + sizeof(uint32_t) - 1) / sizeof(uint32_t)
|
| + : 0;
|
| + const uint64_t transform_data_size =
|
| + (enc->use_predict_ || enc->use_cross_color_)
|
| + ? VP8LSubSampleSize(width, enc->transform_bits_) *
|
| + VP8LSubSampleSize(height, enc->transform_bits_)
|
| + : 0;
|
| + const uint64_t max_alignment_in_words =
|
| + (WEBP_ALIGN_CST + sizeof(uint32_t) - 1) / sizeof(uint32_t);
|
| + const uint64_t mem_size =
|
| + image_size + max_alignment_in_words +
|
| + argb_scratch_size + max_alignment_in_words +
|
| + transform_data_size;
|
| + uint32_t* mem = enc->transform_mem_;
|
| + if (mem == NULL || mem_size > enc->transform_mem_size_) {
|
| + ClearTransformBuffer(enc);
|
| + mem = (uint32_t*)WebPSafeMalloc(mem_size, sizeof(*mem));
|
| if (mem == NULL) {
|
| err = VP8_ENC_ERROR_OUT_OF_MEMORY;
|
| goto Error;
|
| }
|
| - enc->argb_ = mem;
|
| - mem = (uint32_t*)WEBP_ALIGN(mem + image_size);
|
| - enc->argb_scratch_ = mem;
|
| - mem = (uint32_t*)WEBP_ALIGN(mem + argb_scratch_size);
|
| - enc->transform_data_ = mem;
|
| - enc->current_width_ = width;
|
| + enc->transform_mem_ = mem;
|
| + enc->transform_mem_size_ = (size_t)mem_size;
|
| }
|
| + enc->argb_ = mem;
|
| + mem = (uint32_t*)WEBP_ALIGN(mem + image_size);
|
| + enc->argb_scratch_ = mem;
|
| + mem = (uint32_t*)WEBP_ALIGN(mem + argb_scratch_size);
|
| + enc->transform_data_ = mem;
|
| +
|
| + enc->current_width_ = width;
|
| Error:
|
| return err;
|
| }
|
|
|
| -static void ClearTransformBuffer(VP8LEncoder* const enc) {
|
| - WebPSafeFree(enc->argb_);
|
| - enc->argb_ = NULL;
|
| -}
|
| -
|
| static WebPEncodingError MakeInputImageCopy(VP8LEncoder* const enc) {
|
| WebPEncodingError err = VP8_ENC_OK;
|
| const WebPPicture* const picture = enc->pic_;
|
| @@ -1178,8 +1157,35 @@ static WebPEncodingError MakeInputImageCopy(VP8LEncoder* const enc) {
|
|
|
| // -----------------------------------------------------------------------------
|
|
|
| -static void MapToPalette(const uint32_t palette[], int num_colors,
|
| +static int SearchColor(const uint32_t sorted[], uint32_t color, int hi) {
|
| + int low = 0;
|
| + if (sorted[low] == color) return low; // loop invariant: sorted[low] != color
|
| + while (1) {
|
| + const int mid = (low + hi) >> 1;
|
| + if (sorted[mid] == color) {
|
| + return mid;
|
| + } else if (sorted[mid] < color) {
|
| + low = mid;
|
| + } else {
|
| + hi = mid;
|
| + }
|
| + }
|
| +}
|
| +
|
| +// Sort palette in increasing order and prepare an inverse mapping array.
|
| +static void PrepareMapToPalette(const uint32_t palette[], int num_colors,
|
| + uint32_t sorted[], int idx_map[]) {
|
| + int i;
|
| + memcpy(sorted, palette, num_colors * sizeof(*sorted));
|
| + qsort(sorted, num_colors, sizeof(*sorted), PaletteCompareColorsForQsort);
|
| + for (i = 0; i < num_colors; ++i) {
|
| + idx_map[SearchColor(sorted, palette[i], num_colors)] = i;
|
| + }
|
| +}
|
| +
|
| +static void MapToPalette(const uint32_t sorted_palette[], int num_colors,
|
| uint32_t* const last_pix, int* const last_idx,
|
| + const int idx_map[],
|
| const uint32_t* src, uint8_t* dst, int width) {
|
| int x;
|
| int prev_idx = *last_idx;
|
| @@ -1187,14 +1193,8 @@ static void MapToPalette(const uint32_t palette[], int num_colors,
|
| for (x = 0; x < width; ++x) {
|
| const uint32_t pix = src[x];
|
| if (pix != prev_pix) {
|
| - int i;
|
| - for (i = 0; i < num_colors; ++i) {
|
| - if (pix == palette[i]) {
|
| - prev_idx = i;
|
| - prev_pix = pix;
|
| - break;
|
| - }
|
| - }
|
| + prev_idx = idx_map[SearchColor(sorted_palette, pix, num_colors)];
|
| + prev_pix = pix;
|
| }
|
| dst[x] = prev_idx;
|
| }
|
| @@ -1241,11 +1241,16 @@ static WebPEncodingError ApplyPalette(const uint32_t* src, uint32_t src_stride,
|
| }
|
| } else {
|
| // Use 1 pixel cache for ARGB pixels.
|
| - uint32_t last_pix = palette[0];
|
| - int last_idx = 0;
|
| + uint32_t last_pix;
|
| + int last_idx;
|
| + uint32_t sorted[MAX_PALETTE_SIZE];
|
| + int idx_map[MAX_PALETTE_SIZE];
|
| + PrepareMapToPalette(palette, palette_size, sorted, idx_map);
|
| + last_pix = palette[0];
|
| + last_idx = 0;
|
| for (y = 0; y < height; ++y) {
|
| - MapToPalette(palette, palette_size, &last_pix, &last_idx,
|
| - src, tmp_row, width);
|
| + MapToPalette(sorted, palette_size, &last_pix, &last_idx,
|
| + idx_map, src, tmp_row, width);
|
| VP8LBundleColorMap(tmp_row, width, xbits, dst);
|
| src += src_stride;
|
| dst += dst_stride;
|
| @@ -1378,7 +1383,7 @@ static void VP8LEncoderDelete(VP8LEncoder* enc) {
|
|
|
| WebPEncodingError VP8LEncodeStream(const WebPConfig* const config,
|
| const WebPPicture* const picture,
|
| - VP8LBitWriter* const bw) {
|
| + VP8LBitWriter* const bw, int use_cache) {
|
| WebPEncodingError err = VP8_ENC_OK;
|
| const int quality = (int)config->quality;
|
| const int low_effort = (config->method == 0);
|
| @@ -1405,7 +1410,8 @@ WebPEncodingError VP8LEncodeStream(const WebPConfig* const config,
|
| }
|
|
|
| // Apply near-lossless preprocessing.
|
| - use_near_lossless = !enc->use_palette_ && (config->near_lossless < 100);
|
| + use_near_lossless =
|
| + (config->near_lossless < 100) && !enc->use_palette_ && !enc->use_predict_;
|
| if (use_near_lossless) {
|
| if (!VP8ApplyNearLossless(width, height, picture->argb,
|
| config->near_lossless)) {
|
| @@ -1457,7 +1463,7 @@ WebPEncodingError VP8LEncodeStream(const WebPConfig* const config,
|
|
|
| if (enc->use_predict_) {
|
| err = ApplyPredictFilter(enc, enc->current_width_, height, quality,
|
| - low_effort, bw);
|
| + low_effort, enc->use_subtract_green_, bw);
|
| if (err != VP8_ENC_OK) goto Error;
|
| }
|
|
|
| @@ -1474,8 +1480,8 @@ WebPEncodingError VP8LEncodeStream(const WebPConfig* const config,
|
| // Encode and write the transformed image.
|
| err = EncodeImageInternal(bw, enc->argb_, &enc->hash_chain_, enc->refs_,
|
| enc->current_width_, height, quality, low_effort,
|
| - &enc->cache_bits_, enc->histo_bits_, byte_position,
|
| - &hdr_size, &data_size);
|
| + use_cache, &enc->cache_bits_, enc->histo_bits_,
|
| + byte_position, &hdr_size, &data_size);
|
| if (err != VP8_ENC_OK) goto Error;
|
|
|
| if (picture->stats != NULL) {
|
| @@ -1560,7 +1566,7 @@ int VP8LEncodeImage(const WebPConfig* const config,
|
| if (!WebPReportProgress(picture, 5, &percent)) goto UserAbort;
|
|
|
| // Encode main image stream.
|
| - err = VP8LEncodeStream(config, picture, &bw);
|
| + err = VP8LEncodeStream(config, picture, &bw, 1 /*use_cache*/);
|
| if (err != VP8_ENC_OK) goto Error;
|
|
|
| // TODO(skal): have a fine-grained progress report in VP8LEncodeStream().
|
|
|