| OLD | NEW |
| (Empty) |
| 1 /* | |
| 2 * Copyright (C) 2010 Google Inc. All rights reserved. | |
| 3 * | |
| 4 * Redistribution and use in source and binary forms, with or without | |
| 5 * modification, are permitted provided that the following conditions | |
| 6 * are met: | |
| 7 * | |
| 8 * 1. Redistributions of source code must retain the above copyright | |
| 9 * notice, this list of conditions and the following disclaimer. | |
| 10 * 2. Redistributions in binary form must reproduce the above copyright | |
| 11 * notice, this list of conditions and the following disclaimer in the | |
| 12 * documentation and/or other materials provided with the distribution. | |
| 13 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of | |
| 14 * its contributors may be used to endorse or promote products derived | |
| 15 * from this software without specific prior written permission. | |
| 16 * | |
| 17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY | |
| 18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED | |
| 19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | |
| 20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY | |
| 21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES | |
| 22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; | |
| 23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND | |
| 24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
| 25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF | |
| 26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
| 27 */ | |
| 28 | |
| 29 #include "platform/image-decoders/webp/WEBPImageDecoder.h" | |
| 30 | |
| 31 #if CPU(BIG_ENDIAN) || CPU(MIDDLE_ENDIAN) | |
| 32 #error Blink assumes a little-endian target. | |
| 33 #endif | |
| 34 | |
| 35 #if SK_B32_SHIFT // Output little-endian RGBA pixels (Android). | |
| 36 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { | |
| 37 return hasAlpha ? MODE_rgbA : MODE_RGBA; | |
| 38 } | |
| 39 #else // Output little-endian BGRA pixels. | |
| 40 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { | |
| 41 return hasAlpha ? MODE_bgrA : MODE_BGRA; | |
| 42 } | |
| 43 #endif | |
| 44 | |
| 45 namespace { | |
| 46 | |
| 47 // Returns two point ranges (<left, width> pairs) at row |canvasY| which belong | |
| 48 // to |src| but not |dst|. A range is empty if its width is 0. | |
| 49 inline void findBlendRangeAtRow(const blink::IntRect& src, | |
| 50 const blink::IntRect& dst, | |
| 51 int canvasY, | |
| 52 int& left1, | |
| 53 int& width1, | |
| 54 int& left2, | |
| 55 int& width2) { | |
| 56 SECURITY_DCHECK(canvasY >= src.Y() && canvasY < src.MaxY()); | |
| 57 left1 = -1; | |
| 58 width1 = 0; | |
| 59 left2 = -1; | |
| 60 width2 = 0; | |
| 61 | |
| 62 if (canvasY < dst.Y() || canvasY >= dst.MaxY() || src.X() >= dst.MaxX() || | |
| 63 src.MaxX() <= dst.X()) { | |
| 64 left1 = src.X(); | |
| 65 width1 = src.Width(); | |
| 66 return; | |
| 67 } | |
| 68 | |
| 69 if (src.X() < dst.X()) { | |
| 70 left1 = src.X(); | |
| 71 width1 = dst.X() - src.X(); | |
| 72 } | |
| 73 | |
| 74 if (src.MaxX() > dst.MaxX()) { | |
| 75 left2 = dst.MaxX(); | |
| 76 width2 = src.MaxX() - dst.MaxX(); | |
| 77 } | |
| 78 } | |
| 79 | |
| 80 // alphaBlendPremultiplied and alphaBlendNonPremultiplied are separate methods, | |
| 81 // even though they only differ by one line. This is done so that the compiler | |
| 82 // can inline BlendSrcOverDstPremultiplied() and BlensSrcOverDstRaw() calls. | |
| 83 // For GIF images, this optimization reduces decoding time by 15% for 3MB | |
| 84 // images. | |
| 85 void alphaBlendPremultiplied(blink::ImageFrame& src, | |
| 86 blink::ImageFrame& dst, | |
| 87 int canvasY, | |
| 88 int left, | |
| 89 int width) { | |
| 90 for (int x = 0; x < width; ++x) { | |
| 91 int canvasX = left + x; | |
| 92 blink::ImageFrame::PixelData* pixel = src.GetAddr(canvasX, canvasY); | |
| 93 if (SkGetPackedA32(*pixel) != 0xff) { | |
| 94 blink::ImageFrame::PixelData prevPixel = *dst.GetAddr(canvasX, canvasY); | |
| 95 blink::ImageFrame::BlendSrcOverDstPremultiplied(pixel, prevPixel); | |
| 96 } | |
| 97 } | |
| 98 } | |
| 99 | |
| 100 void alphaBlendNonPremultiplied(blink::ImageFrame& src, | |
| 101 blink::ImageFrame& dst, | |
| 102 int canvasY, | |
| 103 int left, | |
| 104 int width) { | |
| 105 for (int x = 0; x < width; ++x) { | |
| 106 int canvasX = left + x; | |
| 107 blink::ImageFrame::PixelData* pixel = src.GetAddr(canvasX, canvasY); | |
| 108 if (SkGetPackedA32(*pixel) != 0xff) { | |
| 109 blink::ImageFrame::PixelData prevPixel = *dst.GetAddr(canvasX, canvasY); | |
| 110 blink::ImageFrame::BlendSrcOverDstRaw(pixel, prevPixel); | |
| 111 } | |
| 112 } | |
| 113 } | |
| 114 | |
| 115 } // namespace | |
| 116 | |
| 117 namespace blink { | |
| 118 | |
| 119 WEBPImageDecoder::WEBPImageDecoder(AlphaOption alpha_option, | |
| 120 const ColorBehavior& color_behavior, | |
| 121 size_t max_decoded_bytes) | |
| 122 : ImageDecoder(alpha_option, color_behavior, max_decoded_bytes), | |
| 123 decoder_(0), | |
| 124 format_flags_(0), | |
| 125 frame_background_has_alpha_(false), | |
| 126 demux_(0), | |
| 127 demux_state_(WEBP_DEMUX_PARSING_HEADER), | |
| 128 have_already_parsed_this_data_(false), | |
| 129 repetition_count_(kAnimationLoopOnce), | |
| 130 decoded_height_(0) { | |
| 131 blend_function_ = (alpha_option == kAlphaPremultiplied) | |
| 132 ? alphaBlendPremultiplied | |
| 133 : alphaBlendNonPremultiplied; | |
| 134 } | |
| 135 | |
| 136 WEBPImageDecoder::~WEBPImageDecoder() { | |
| 137 Clear(); | |
| 138 } | |
| 139 | |
| 140 void WEBPImageDecoder::Clear() { | |
| 141 WebPDemuxDelete(demux_); | |
| 142 demux_ = 0; | |
| 143 consolidated_data_.reset(); | |
| 144 ClearDecoder(); | |
| 145 } | |
| 146 | |
| 147 void WEBPImageDecoder::ClearDecoder() { | |
| 148 WebPIDelete(decoder_); | |
| 149 decoder_ = 0; | |
| 150 decoded_height_ = 0; | |
| 151 frame_background_has_alpha_ = false; | |
| 152 } | |
| 153 | |
| 154 void WEBPImageDecoder::OnSetData(SegmentReader*) { | |
| 155 have_already_parsed_this_data_ = false; | |
| 156 } | |
| 157 | |
| 158 int WEBPImageDecoder::RepetitionCount() const { | |
| 159 return Failed() ? kAnimationLoopOnce : repetition_count_; | |
| 160 } | |
| 161 | |
| 162 bool WEBPImageDecoder::FrameIsCompleteAtIndex(size_t index) const { | |
| 163 if (!demux_ || demux_state_ <= WEBP_DEMUX_PARSING_HEADER) | |
| 164 return false; | |
| 165 if (!(format_flags_ & ANIMATION_FLAG)) | |
| 166 return ImageDecoder::FrameIsCompleteAtIndex(index); | |
| 167 bool frame_is_received_at_index = index < frame_buffer_cache_.size(); | |
| 168 return frame_is_received_at_index; | |
| 169 } | |
| 170 | |
| 171 float WEBPImageDecoder::FrameDurationAtIndex(size_t index) const { | |
| 172 return index < frame_buffer_cache_.size() | |
| 173 ? frame_buffer_cache_[index].Duration() | |
| 174 : 0; | |
| 175 } | |
| 176 | |
| 177 bool WEBPImageDecoder::UpdateDemuxer() { | |
| 178 if (Failed()) | |
| 179 return false; | |
| 180 | |
| 181 if (have_already_parsed_this_data_) | |
| 182 return true; | |
| 183 | |
| 184 have_already_parsed_this_data_ = true; | |
| 185 | |
| 186 const unsigned kWebpHeaderSize = 30; | |
| 187 if (data_->size() < kWebpHeaderSize) | |
| 188 return false; // Await VP8X header so WebPDemuxPartial succeeds. | |
| 189 | |
| 190 WebPDemuxDelete(demux_); | |
| 191 consolidated_data_ = data_->GetAsSkData(); | |
| 192 WebPData input_data = { | |
| 193 reinterpret_cast<const uint8_t*>(consolidated_data_->data()), | |
| 194 consolidated_data_->size()}; | |
| 195 demux_ = WebPDemuxPartial(&input_data, &demux_state_); | |
| 196 if (!demux_ || (IsAllDataReceived() && demux_state_ != WEBP_DEMUX_DONE)) { | |
| 197 if (!demux_) | |
| 198 consolidated_data_.reset(); | |
| 199 return SetFailed(); | |
| 200 } | |
| 201 | |
| 202 DCHECK_GT(demux_state_, WEBP_DEMUX_PARSING_HEADER); | |
| 203 if (!WebPDemuxGetI(demux_, WEBP_FF_FRAME_COUNT)) | |
| 204 return false; // Wait until the encoded image frame data arrives. | |
| 205 | |
| 206 if (!IsDecodedSizeAvailable()) { | |
| 207 int width = WebPDemuxGetI(demux_, WEBP_FF_CANVAS_WIDTH); | |
| 208 int height = WebPDemuxGetI(demux_, WEBP_FF_CANVAS_HEIGHT); | |
| 209 if (!SetSize(width, height)) | |
| 210 return SetFailed(); | |
| 211 | |
| 212 format_flags_ = WebPDemuxGetI(demux_, WEBP_FF_FORMAT_FLAGS); | |
| 213 if (!(format_flags_ & ANIMATION_FLAG)) { | |
| 214 repetition_count_ = kAnimationNone; | |
| 215 } else { | |
| 216 // Since we have parsed at least one frame, even if partially, | |
| 217 // the global animation (ANIM) properties have been read since | |
| 218 // an ANIM chunk must precede the ANMF frame chunks. | |
| 219 repetition_count_ = WebPDemuxGetI(demux_, WEBP_FF_LOOP_COUNT); | |
| 220 // Repetition count is always <= 16 bits. | |
| 221 DCHECK_EQ(repetition_count_, repetition_count_ & 0xffff); | |
| 222 if (!repetition_count_) | |
| 223 repetition_count_ = kAnimationLoopInfinite; | |
| 224 // FIXME: Implement ICC profile support for animated images. | |
| 225 format_flags_ &= ~ICCP_FLAG; | |
| 226 } | |
| 227 | |
| 228 if ((format_flags_ & ICCP_FLAG) && !IgnoresColorSpace()) | |
| 229 ReadColorProfile(); | |
| 230 } | |
| 231 | |
| 232 DCHECK(IsDecodedSizeAvailable()); | |
| 233 | |
| 234 size_t frame_count = WebPDemuxGetI(demux_, WEBP_FF_FRAME_COUNT); | |
| 235 UpdateAggressivePurging(frame_count); | |
| 236 | |
| 237 return true; | |
| 238 } | |
| 239 | |
| 240 void WEBPImageDecoder::OnInitFrameBuffer(size_t frame_index) { | |
| 241 // ImageDecoder::InitFrameBuffer does a DCHECK if |frame_index| exists. | |
| 242 ImageFrame& buffer = frame_buffer_cache_[frame_index]; | |
| 243 | |
| 244 const size_t required_previous_frame_index = | |
| 245 buffer.RequiredPreviousFrameIndex(); | |
| 246 if (required_previous_frame_index == kNotFound) { | |
| 247 frame_background_has_alpha_ = | |
| 248 !buffer.OriginalFrameRect().Contains(IntRect(IntPoint(), Size())); | |
| 249 } else { | |
| 250 const ImageFrame& prev_buffer = | |
| 251 frame_buffer_cache_[required_previous_frame_index]; | |
| 252 frame_background_has_alpha_ = | |
| 253 prev_buffer.HasAlpha() || (prev_buffer.GetDisposalMethod() == | |
| 254 ImageFrame::kDisposeOverwriteBgcolor); | |
| 255 } | |
| 256 | |
| 257 // The buffer is transparent outside the decoded area while the image is | |
| 258 // loading. The correct alpha value for the frame will be set when it is fully | |
| 259 // decoded. | |
| 260 buffer.SetHasAlpha(true); | |
| 261 } | |
| 262 | |
| 263 bool WEBPImageDecoder::CanReusePreviousFrameBuffer(size_t frame_index) const { | |
| 264 DCHECK(frame_index < frame_buffer_cache_.size()); | |
| 265 return frame_buffer_cache_[frame_index].GetAlphaBlendSource() != | |
| 266 ImageFrame::kBlendAtopPreviousFrame; | |
| 267 } | |
| 268 | |
| 269 void WEBPImageDecoder::ClearFrameBuffer(size_t frame_index) { | |
| 270 if (demux_ && demux_state_ >= WEBP_DEMUX_PARSED_HEADER && | |
| 271 frame_buffer_cache_[frame_index].GetStatus() == | |
| 272 ImageFrame::kFramePartial) { | |
| 273 // Clear the decoder state so that this partial frame can be decoded again | |
| 274 // when requested. | |
| 275 ClearDecoder(); | |
| 276 } | |
| 277 ImageDecoder::ClearFrameBuffer(frame_index); | |
| 278 } | |
| 279 | |
| 280 void WEBPImageDecoder::ReadColorProfile() { | |
| 281 WebPChunkIterator chunk_iterator; | |
| 282 if (!WebPDemuxGetChunk(demux_, "ICCP", 1, &chunk_iterator)) { | |
| 283 WebPDemuxReleaseChunkIterator(&chunk_iterator); | |
| 284 return; | |
| 285 } | |
| 286 | |
| 287 const char* profile_data = | |
| 288 reinterpret_cast<const char*>(chunk_iterator.chunk.bytes); | |
| 289 size_t profile_size = chunk_iterator.chunk.size; | |
| 290 | |
| 291 SetEmbeddedColorProfile(profile_data, profile_size); | |
| 292 | |
| 293 WebPDemuxReleaseChunkIterator(&chunk_iterator); | |
| 294 } | |
| 295 | |
| 296 void WEBPImageDecoder::ApplyPostProcessing(size_t frame_index) { | |
| 297 ImageFrame& buffer = frame_buffer_cache_[frame_index]; | |
| 298 int width; | |
| 299 int decoded_height; | |
| 300 if (!WebPIDecGetRGB(decoder_, &decoded_height, &width, 0, 0)) | |
| 301 return; // See also https://bugs.webkit.org/show_bug.cgi?id=74062 | |
| 302 if (decoded_height <= 0) | |
| 303 return; | |
| 304 | |
| 305 const IntRect& frame_rect = buffer.OriginalFrameRect(); | |
| 306 SECURITY_DCHECK(width == frame_rect.Width()); | |
| 307 SECURITY_DCHECK(decoded_height <= frame_rect.Height()); | |
| 308 const int left = frame_rect.X(); | |
| 309 const int top = frame_rect.Y(); | |
| 310 | |
| 311 // TODO (msarett): | |
| 312 // Here we apply the color space transformation to the dst space. | |
| 313 // It does not really make sense to transform to a gamma-encoded | |
| 314 // space and then immediately after, perform a linear premultiply | |
| 315 // and linear blending. Can we find a way to perform the | |
| 316 // premultiplication and blending in a linear space? | |
| 317 SkColorSpaceXform* xform = ColorTransform(); | |
| 318 if (xform) { | |
| 319 const SkColorSpaceXform::ColorFormat kSrcFormat = | |
| 320 SkColorSpaceXform::kBGRA_8888_ColorFormat; | |
| 321 const SkColorSpaceXform::ColorFormat kDstFormat = | |
| 322 SkColorSpaceXform::kRGBA_8888_ColorFormat; | |
| 323 for (int y = decoded_height_; y < decoded_height; ++y) { | |
| 324 const int canvas_y = top + y; | |
| 325 uint8_t* row = reinterpret_cast<uint8_t*>(buffer.GetAddr(left, canvas_y)); | |
| 326 xform->apply(kDstFormat, row, kSrcFormat, row, width, | |
| 327 kUnpremul_SkAlphaType); | |
| 328 | |
| 329 uint8_t* pixel = row; | |
| 330 for (int x = 0; x < width; ++x, pixel += 4) { | |
| 331 const int canvas_x = left + x; | |
| 332 buffer.SetRGBA(canvas_x, canvas_y, pixel[0], pixel[1], pixel[2], | |
| 333 pixel[3]); | |
| 334 } | |
| 335 } | |
| 336 } | |
| 337 | |
| 338 // During the decoding of the current frame, we may have set some pixels to be | |
| 339 // transparent (i.e. alpha < 255). If the alpha blend source was | |
| 340 // 'BlendAtopPreviousFrame', the values of these pixels should be determined | |
| 341 // by blending them against the pixels of the corresponding previous frame. | |
| 342 // Compute the correct opaque values now. | |
| 343 // FIXME: This could be avoided if libwebp decoder had an API that used the | |
| 344 // previous required frame to do the alpha-blending by itself. | |
| 345 if ((format_flags_ & ANIMATION_FLAG) && frame_index && | |
| 346 buffer.GetAlphaBlendSource() == ImageFrame::kBlendAtopPreviousFrame && | |
| 347 buffer.RequiredPreviousFrameIndex() != kNotFound) { | |
| 348 ImageFrame& prev_buffer = frame_buffer_cache_[frame_index - 1]; | |
| 349 DCHECK_EQ(prev_buffer.GetStatus(), ImageFrame::kFrameComplete); | |
| 350 ImageFrame::DisposalMethod prev_disposal_method = | |
| 351 prev_buffer.GetDisposalMethod(); | |
| 352 if (prev_disposal_method == ImageFrame::kDisposeKeep) { | |
| 353 // Blend transparent pixels with pixels in previous canvas. | |
| 354 for (int y = decoded_height_; y < decoded_height; ++y) { | |
| 355 blend_function_(buffer, prev_buffer, top + y, left, width); | |
| 356 } | |
| 357 } else if (prev_disposal_method == ImageFrame::kDisposeOverwriteBgcolor) { | |
| 358 const IntRect& prev_rect = prev_buffer.OriginalFrameRect(); | |
| 359 // We need to blend a transparent pixel with the starting value (from just | |
| 360 // after the InitFrame() call). If the pixel belongs to prev_rect, the | |
| 361 // starting value was fully transparent, so this is a no-op. Otherwise, we | |
| 362 // need to blend against the pixel from the previous canvas. | |
| 363 for (int y = decoded_height_; y < decoded_height; ++y) { | |
| 364 int canvas_y = top + y; | |
| 365 int left1, width1, left2, width2; | |
| 366 findBlendRangeAtRow(frame_rect, prev_rect, canvas_y, left1, width1, | |
| 367 left2, width2); | |
| 368 if (width1 > 0) | |
| 369 blend_function_(buffer, prev_buffer, canvas_y, left1, width1); | |
| 370 if (width2 > 0) | |
| 371 blend_function_(buffer, prev_buffer, canvas_y, left2, width2); | |
| 372 } | |
| 373 } | |
| 374 } | |
| 375 | |
| 376 decoded_height_ = decoded_height; | |
| 377 buffer.SetPixelsChanged(true); | |
| 378 } | |
| 379 | |
| 380 size_t WEBPImageDecoder::DecodeFrameCount() { | |
| 381 // If UpdateDemuxer() fails, return the existing number of frames. This way | |
| 382 // if we get halfway through the image before decoding fails, we won't | |
| 383 // suddenly start reporting that the image has zero frames. | |
| 384 return UpdateDemuxer() ? WebPDemuxGetI(demux_, WEBP_FF_FRAME_COUNT) | |
| 385 : frame_buffer_cache_.size(); | |
| 386 } | |
| 387 | |
| 388 void WEBPImageDecoder::InitializeNewFrame(size_t index) { | |
| 389 if (!(format_flags_ & ANIMATION_FLAG)) { | |
| 390 DCHECK(!index); | |
| 391 return; | |
| 392 } | |
| 393 WebPIterator animated_frame; | |
| 394 WebPDemuxGetFrame(demux_, index + 1, &animated_frame); | |
| 395 DCHECK_EQ(animated_frame.complete, 1); | |
| 396 ImageFrame* buffer = &frame_buffer_cache_[index]; | |
| 397 IntRect frame_rect(animated_frame.x_offset, animated_frame.y_offset, | |
| 398 animated_frame.width, animated_frame.height); | |
| 399 buffer->SetOriginalFrameRect( | |
| 400 Intersection(frame_rect, IntRect(IntPoint(), Size()))); | |
| 401 buffer->SetDuration(animated_frame.duration); | |
| 402 buffer->SetDisposalMethod(animated_frame.dispose_method == | |
| 403 WEBP_MUX_DISPOSE_BACKGROUND | |
| 404 ? ImageFrame::kDisposeOverwriteBgcolor | |
| 405 : ImageFrame::kDisposeKeep); | |
| 406 buffer->SetAlphaBlendSource(animated_frame.blend_method == WEBP_MUX_BLEND | |
| 407 ? ImageFrame::kBlendAtopPreviousFrame | |
| 408 : ImageFrame::kBlendAtopBgcolor); | |
| 409 buffer->SetRequiredPreviousFrameIndex( | |
| 410 FindRequiredPreviousFrame(index, !animated_frame.has_alpha)); | |
| 411 WebPDemuxReleaseIterator(&animated_frame); | |
| 412 } | |
| 413 | |
| 414 void WEBPImageDecoder::Decode(size_t index) { | |
| 415 if (Failed()) | |
| 416 return; | |
| 417 | |
| 418 Vector<size_t> frames_to_decode = FindFramesToDecode(index); | |
| 419 | |
| 420 DCHECK(demux_); | |
| 421 for (auto i = frames_to_decode.rbegin(); i != frames_to_decode.rend(); ++i) { | |
| 422 if ((format_flags_ & ANIMATION_FLAG) && !InitFrameBuffer(*i)) { | |
| 423 SetFailed(); | |
| 424 return; | |
| 425 } | |
| 426 | |
| 427 WebPIterator webp_frame; | |
| 428 if (!WebPDemuxGetFrame(demux_, *i + 1, &webp_frame)) { | |
| 429 SetFailed(); | |
| 430 } else { | |
| 431 DecodeSingleFrame(webp_frame.fragment.bytes, webp_frame.fragment.size, | |
| 432 *i); | |
| 433 WebPDemuxReleaseIterator(&webp_frame); | |
| 434 } | |
| 435 if (Failed()) | |
| 436 return; | |
| 437 | |
| 438 // If this returns false, we need more data to continue decoding. | |
| 439 if (!PostDecodeProcessing(*i)) | |
| 440 break; | |
| 441 } | |
| 442 | |
| 443 // It is also a fatal error if all data is received and we have decoded all | |
| 444 // frames available but the file is truncated. | |
| 445 if (index >= frame_buffer_cache_.size() - 1 && IsAllDataReceived() && | |
| 446 demux_ && demux_state_ != WEBP_DEMUX_DONE) | |
| 447 SetFailed(); | |
| 448 } | |
| 449 | |
| 450 bool WEBPImageDecoder::DecodeSingleFrame(const uint8_t* data_bytes, | |
| 451 size_t data_size, | |
| 452 size_t frame_index) { | |
| 453 if (Failed()) | |
| 454 return false; | |
| 455 | |
| 456 DCHECK(IsDecodedSizeAvailable()); | |
| 457 | |
| 458 DCHECK_GT(frame_buffer_cache_.size(), frame_index); | |
| 459 ImageFrame& buffer = frame_buffer_cache_[frame_index]; | |
| 460 DCHECK_NE(buffer.GetStatus(), ImageFrame::kFrameComplete); | |
| 461 | |
| 462 if (buffer.GetStatus() == ImageFrame::kFrameEmpty) { | |
| 463 if (!buffer.AllocatePixelData(Size().Width(), Size().Height(), | |
| 464 ColorSpaceForSkImages())) | |
| 465 return SetFailed(); | |
| 466 buffer.ZeroFillPixelData(); | |
| 467 buffer.SetStatus(ImageFrame::kFramePartial); | |
| 468 // The buffer is transparent outside the decoded area while the image is | |
| 469 // loading. The correct alpha value for the frame will be set when it is | |
| 470 // fully decoded. | |
| 471 buffer.SetHasAlpha(true); | |
| 472 buffer.SetOriginalFrameRect(IntRect(IntPoint(), Size())); | |
| 473 } | |
| 474 | |
| 475 const IntRect& frame_rect = buffer.OriginalFrameRect(); | |
| 476 if (!decoder_) { | |
| 477 WEBP_CSP_MODE mode = outputMode(format_flags_ & ALPHA_FLAG); | |
| 478 if (!premultiply_alpha_) | |
| 479 mode = outputMode(false); | |
| 480 if (ColorTransform()) { | |
| 481 // Swizzling between RGBA and BGRA is zero cost in a color transform. | |
| 482 // So when we have a color transform, we should decode to whatever is | |
| 483 // easiest for libwebp, and then let the color transform swizzle if | |
| 484 // necessary. | |
| 485 // Lossy webp is encoded as YUV (so RGBA and BGRA are the same cost). | |
| 486 // Lossless webp is encoded as BGRA. This means decoding to BGRA is | |
| 487 // either faster or the same cost as RGBA. | |
| 488 mode = MODE_BGRA; | |
| 489 } | |
| 490 WebPInitDecBuffer(&decoder_buffer_); | |
| 491 decoder_buffer_.colorspace = mode; | |
| 492 decoder_buffer_.u.RGBA.stride = | |
| 493 Size().Width() * sizeof(ImageFrame::PixelData); | |
| 494 decoder_buffer_.u.RGBA.size = | |
| 495 decoder_buffer_.u.RGBA.stride * frame_rect.Height(); | |
| 496 decoder_buffer_.is_external_memory = 1; | |
| 497 decoder_ = WebPINewDecoder(&decoder_buffer_); | |
| 498 if (!decoder_) | |
| 499 return SetFailed(); | |
| 500 } | |
| 501 | |
| 502 decoder_buffer_.u.RGBA.rgba = reinterpret_cast<uint8_t*>( | |
| 503 buffer.GetAddr(frame_rect.X(), frame_rect.Y())); | |
| 504 | |
| 505 switch (WebPIUpdate(decoder_, data_bytes, data_size)) { | |
| 506 case VP8_STATUS_OK: | |
| 507 ApplyPostProcessing(frame_index); | |
| 508 buffer.SetHasAlpha((format_flags_ & ALPHA_FLAG) || | |
| 509 frame_background_has_alpha_); | |
| 510 buffer.SetStatus(ImageFrame::kFrameComplete); | |
| 511 ClearDecoder(); | |
| 512 return true; | |
| 513 case VP8_STATUS_SUSPENDED: | |
| 514 if (!IsAllDataReceived() && !FrameIsCompleteAtIndex(frame_index)) { | |
| 515 ApplyPostProcessing(frame_index); | |
| 516 return false; | |
| 517 } | |
| 518 // FALLTHROUGH | |
| 519 default: | |
| 520 Clear(); | |
| 521 return SetFailed(); | |
| 522 } | |
| 523 } | |
| 524 | |
| 525 } // namespace blink | |
| OLD | NEW |