| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (C) 2010 Google Inc. All rights reserved. | 2 * Copyright (C) 2010 Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions | 5 * modification, are permitted provided that the following conditions |
| 6 * are met: | 6 * are met: |
| 7 * | 7 * |
| 8 * 1. Redistributions of source code must retain the above copyright | 8 * 1. Redistributions of source code must retain the above copyright |
| 9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
| 10 * 2. Redistributions in binary form must reproduce the above copyright | 10 * 2. Redistributions in binary form must reproduce the above copyright |
| (...skipping 10 matching lines...) Expand all Loading... |
| 21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES | 21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES |
| 22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; | 22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; |
| 23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND | 23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND |
| 24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF | 25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
| 26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 */ | 27 */ |
| 28 | 28 |
| 29 #include "platform/image-decoders/webp/WEBPImageDecoder.h" | 29 #include "platform/image-decoders/webp/WEBPImageDecoder.h" |
| 30 | 30 |
| 31 #include "build/build_config.h" | 31 #include <limits> |
| 32 #include "third_party/skia/include/core/SkData.h" | 32 #include "platform/image-decoders/SegmentStream.h" |
| 33 | 33 #include "platform/wtf/NotFound.h" |
| 34 #if defined(ARCH_CPU_BIG_ENDIAN) | 34 #include "platform/wtf/PtrUtil.h" |
| 35 #error Blink assumes a little-endian target. | 35 #include "third_party/skia/include/core/SkImageInfo.h" |
| 36 #endif | |
| 37 | |
| 38 #if SK_B32_SHIFT // Output little-endian RGBA pixels (Android). | |
| 39 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { | |
| 40 return hasAlpha ? MODE_rgbA : MODE_RGBA; | |
| 41 } | |
| 42 #else // Output little-endian BGRA pixels. | |
| 43 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { | |
| 44 return hasAlpha ? MODE_bgrA : MODE_BGRA; | |
| 45 } | |
| 46 #endif | |
| 47 | |
| 48 namespace { | |
| 49 | |
| 50 // Returns two point ranges (<left, width> pairs) at row |canvasY| which belong | |
| 51 // to |src| but not |dst|. A range is empty if its width is 0. | |
| 52 inline void findBlendRangeAtRow(const blink::IntRect& src, | |
| 53 const blink::IntRect& dst, | |
| 54 int canvasY, | |
| 55 int& left1, | |
| 56 int& width1, | |
| 57 int& left2, | |
| 58 int& width2) { | |
| 59 SECURITY_DCHECK(canvasY >= src.Y() && canvasY < src.MaxY()); | |
| 60 left1 = -1; | |
| 61 width1 = 0; | |
| 62 left2 = -1; | |
| 63 width2 = 0; | |
| 64 | |
| 65 if (canvasY < dst.Y() || canvasY >= dst.MaxY() || src.X() >= dst.MaxX() || | |
| 66 src.MaxX() <= dst.X()) { | |
| 67 left1 = src.X(); | |
| 68 width1 = src.Width(); | |
| 69 return; | |
| 70 } | |
| 71 | |
| 72 if (src.X() < dst.X()) { | |
| 73 left1 = src.X(); | |
| 74 width1 = dst.X() - src.X(); | |
| 75 } | |
| 76 | |
| 77 if (src.MaxX() > dst.MaxX()) { | |
| 78 left2 = dst.MaxX(); | |
| 79 width2 = src.MaxX() - dst.MaxX(); | |
| 80 } | |
| 81 } | |
| 82 | |
| 83 // alphaBlendPremultiplied and alphaBlendNonPremultiplied are separate methods, | |
| 84 // even though they only differ by one line. This is done so that the compiler | |
| 85 // can inline BlendSrcOverDstPremultiplied() and BlensSrcOverDstRaw() calls. | |
| 86 // For GIF images, this optimization reduces decoding time by 15% for 3MB | |
| 87 // images. | |
| 88 void alphaBlendPremultiplied(blink::ImageFrame& src, | |
| 89 blink::ImageFrame& dst, | |
| 90 int canvasY, | |
| 91 int left, | |
| 92 int width) { | |
| 93 for (int x = 0; x < width; ++x) { | |
| 94 int canvasX = left + x; | |
| 95 blink::ImageFrame::PixelData* pixel = src.GetAddr(canvasX, canvasY); | |
| 96 if (SkGetPackedA32(*pixel) != 0xff) { | |
| 97 blink::ImageFrame::PixelData prevPixel = *dst.GetAddr(canvasX, canvasY); | |
| 98 blink::ImageFrame::BlendSrcOverDstPremultiplied(pixel, prevPixel); | |
| 99 } | |
| 100 } | |
| 101 } | |
| 102 | |
| 103 void alphaBlendNonPremultiplied(blink::ImageFrame& src, | |
| 104 blink::ImageFrame& dst, | |
| 105 int canvasY, | |
| 106 int left, | |
| 107 int width) { | |
| 108 for (int x = 0; x < width; ++x) { | |
| 109 int canvasX = left + x; | |
| 110 blink::ImageFrame::PixelData* pixel = src.GetAddr(canvasX, canvasY); | |
| 111 if (SkGetPackedA32(*pixel) != 0xff) { | |
| 112 blink::ImageFrame::PixelData prevPixel = *dst.GetAddr(canvasX, canvasY); | |
| 113 blink::ImageFrame::BlendSrcOverDstRaw(pixel, prevPixel); | |
| 114 } | |
| 115 } | |
| 116 } | |
| 117 | |
| 118 } // namespace | |
| 119 | 36 |
| 120 namespace blink { | 37 namespace blink { |
| 121 | 38 |
| 122 WEBPImageDecoder::WEBPImageDecoder(AlphaOption alpha_option, | 39 WEBPImageDecoder::WEBPImageDecoder(AlphaOption alpha_option, |
| 123 const ColorBehavior& color_behavior, | 40 const ColorBehavior& color_behavior, |
| 124 size_t max_decoded_bytes) | 41 size_t max_decoded_bytes) |
| 125 : ImageDecoder(alpha_option, color_behavior, max_decoded_bytes), | 42 : ImageDecoder(alpha_option, color_behavior, max_decoded_bytes), |
| 126 decoder_(0), | 43 codec_(), |
| 127 format_flags_(0), | 44 segment_stream_(nullptr) {} |
| 128 frame_background_has_alpha_(false), | 45 |
| 129 demux_(0), | 46 WEBPImageDecoder::~WEBPImageDecoder() = default; |
| 130 demux_state_(WEBP_DEMUX_PARSING_HEADER), | 47 |
| 131 have_already_parsed_this_data_(false), | 48 void WEBPImageDecoder::OnSetData(SegmentReader* data) { |
| 132 repetition_count_(kAnimationLoopOnce), | 49 if (!data) { |
| 133 decoded_height_(0) { | 50 if (segment_stream_) |
| 134 blend_function_ = (alpha_option == kAlphaPremultiplied) | 51 segment_stream_->SetReader(nullptr); |
| 135 ? alphaBlendPremultiplied | 52 return; |
| 136 : alphaBlendNonPremultiplied; | 53 } |
| 137 } | 54 |
| 138 | 55 std::unique_ptr<SegmentStream> segment_stream; |
| 139 WEBPImageDecoder::~WEBPImageDecoder() { | 56 if (!segment_stream_) { |
| 140 Clear(); | 57 segment_stream = base::MakeUnique<SegmentStream>(); |
| 141 } | 58 segment_stream_ = segment_stream.get(); |
| 142 | 59 } |
| 143 void WEBPImageDecoder::Clear() { | 60 |
| 144 WebPDemuxDelete(demux_); | 61 segment_stream_->SetReader(std::move(data)); |
| 145 demux_ = 0; | 62 |
| 146 consolidated_data_.reset(); | 63 if (!codec_) { |
| 147 ClearDecoder(); | 64 SkCodec::Result codec_creation_result; |
| 148 } | 65 codec_ = SkCodec::MakeFromStream(std::move(segment_stream), |
| 149 | 66 &codec_creation_result, nullptr); |
| 150 void WEBPImageDecoder::ClearDecoder() { | 67 switch (codec_creation_result) { |
| 151 WebPIDelete(decoder_); | 68 case SkCodec::kSuccess: { |
| 152 decoder_ = 0; | 69 // SkCodec::MakeFromStream will read enough of the image to get the |
| 153 decoded_height_ = 0; | 70 // image size. |
| 154 frame_background_has_alpha_ = false; | 71 SkImageInfo image_info = codec_->getInfo(); |
| 155 } | 72 SetSize(image_info.width(), image_info.height()); |
| 156 | 73 return; |
| 157 void WEBPImageDecoder::OnSetData(SegmentReader*) { | 74 } |
| 158 have_already_parsed_this_data_ = false; | 75 case SkCodec::kIncompleteInput: |
| 76 // |segment_stream_|'s ownership is passed into MakeFromStream. |
| 77 // It is deleted if MakeFromStream fails. |
| 78 // If MakeFromStream fails, we set |segment_stream_| to null so |
| 79 // we aren't pointing to reclaimed memory. |
| 80 segment_stream_ = nullptr; |
| 81 return; |
| 82 default: |
| 83 SetFailed(); |
| 84 return; |
| 85 } |
| 86 } |
| 159 } | 87 } |
| 160 | 88 |
| 161 int WEBPImageDecoder::RepetitionCount() const { | 89 int WEBPImageDecoder::RepetitionCount() const { |
| 162 return Failed() ? kAnimationLoopOnce : repetition_count_; | 90 if (!codec_ || segment_stream_->IsCleared()) |
| 91 return repetition_count_; |
| 92 |
| 93 DCHECK(!Failed()); |
| 94 |
| 95 // SkCodec will parse forward in the file if the repetition count has not |
| 96 // been seen yet. |
| 97 int repetition_count = codec_->getRepetitionCount(); |
| 98 |
| 99 switch (repetition_count) { |
| 100 case 0: { |
| 101 // SkCodec returns 0 for both still images and animated images which |
| 102 // only play once. |
| 103 if (IsAllDataReceived() && codec_->getFrameCount() == 1) { |
| 104 repetition_count_ = kAnimationNone; |
| 105 break; |
| 106 } |
| 107 |
| 108 repetition_count_ = kAnimationLoopOnce; |
| 109 break; |
| 110 } |
| 111 case SkCodec::kRepetitionCountInfinite: |
| 112 repetition_count_ = kAnimationLoopInfinite; |
| 113 break; |
| 114 default: |
| 115 repetition_count_ = repetition_count; |
| 116 break; |
| 117 } |
| 118 |
| 119 return repetition_count_; |
| 163 } | 120 } |
| 164 | 121 |
| 165 bool WEBPImageDecoder::FrameIsReceivedAtIndex(size_t index) const { | 122 bool WEBPImageDecoder::FrameIsReceivedAtIndex(size_t index) const { |
| 166 if (!demux_ || demux_state_ <= WEBP_DEMUX_PARSING_HEADER) | 123 SkCodec::FrameInfo frame_info; |
| 124 if (!codec_ || !codec_->getFrameInfo(index, &frame_info)) |
| 167 return false; | 125 return false; |
| 168 if (!(format_flags_ & ANIMATION_FLAG)) | 126 return frame_info.fFullyReceived; |
| 169 return ImageDecoder::FrameIsReceivedAtIndex(index); | |
| 170 bool frame_is_received_at_index = index < frame_buffer_cache_.size(); | |
| 171 return frame_is_received_at_index; | |
| 172 } | 127 } |
| 173 | 128 |
| 174 float WEBPImageDecoder::FrameDurationAtIndex(size_t index) const { | 129 float WEBPImageDecoder::FrameDurationAtIndex(size_t index) const { |
| 175 return index < frame_buffer_cache_.size() | 130 if (index < frame_buffer_cache_.size()) |
| 176 ? frame_buffer_cache_[index].Duration() | 131 return frame_buffer_cache_[index].Duration(); |
| 177 : 0; | 132 return 0; |
| 178 } | 133 } |
| 179 | 134 |
| 180 bool WEBPImageDecoder::UpdateDemuxer() { | 135 bool WEBPImageDecoder::SetFailed() { |
| 181 if (Failed()) | 136 segment_stream_ = nullptr; |
| 182 return false; | 137 codec_.reset(); |
| 183 | 138 return ImageDecoder::SetFailed(); |
| 184 if (have_already_parsed_this_data_) | 139 } |
| 185 return true; | 140 |
| 186 | 141 size_t WEBPImageDecoder::ClearCacheExceptFrame(size_t index) { |
| 187 have_already_parsed_this_data_ = true; | 142 // SkCodec attempts to report the earliest possible required frame, but it is |
| 188 | 143 // possible that frame has been evicted, while a later frame (which could also |
| 189 const unsigned kWebpHeaderSize = 30; | 144 // be used as the required frame) is still cached. Try to preserve a frame |
| 190 if (data_->size() < kWebpHeaderSize) | 145 // that is still cached. |
| 191 return false; // Await VP8X header so WebPDemuxPartial succeeds. | 146 if (frame_buffer_cache_.size() <= 1) |
| 192 | 147 return 0; |
| 193 WebPDemuxDelete(demux_); | 148 |
| 194 consolidated_data_ = data_->GetAsSkData(); | 149 size_t index2 = kNotFound; |
| 195 WebPData input_data = { | 150 if (index < frame_buffer_cache_.size()) { |
| 196 reinterpret_cast<const uint8_t*>(consolidated_data_->data()), | 151 const ImageFrame& frame = frame_buffer_cache_[index]; |
| 197 consolidated_data_->size()}; | 152 if (frame.RequiredPreviousFrameIndex() != kNotFound && |
| 198 demux_ = WebPDemuxPartial(&input_data, &demux_state_); | 153 !FrameStatusSufficientForSuccessors(index)) |
| 199 if (!demux_ || (IsAllDataReceived() && demux_state_ != WEBP_DEMUX_DONE)) { | 154 index2 = GetViableReferenceFrameIndex(index); |
| 200 if (!demux_) | 155 } |
| 201 consolidated_data_.reset(); | 156 |
| 202 return SetFailed(); | 157 return ClearCacheExceptTwoFrames(index, index2); |
| 203 } | 158 } |
| 204 | 159 |
| 205 DCHECK_GT(demux_state_, WEBP_DEMUX_PARSING_HEADER); | 160 size_t WEBPImageDecoder::DecodeFrameCount() { |
| 206 if (!WebPDemuxGetI(demux_, WEBP_FF_FRAME_COUNT)) | 161 if (!codec_ || segment_stream_->IsCleared()) |
| 207 return false; // Wait until the encoded image frame data arrives. | 162 return frame_buffer_cache_.size(); |
| 208 | 163 |
| 209 if (!IsDecodedSizeAvailable()) { | 164 return codec_->getFrameCount(); |
| 210 int width = WebPDemuxGetI(demux_, WEBP_FF_CANVAS_WIDTH); | 165 } |
| 211 int height = WebPDemuxGetI(demux_, WEBP_FF_CANVAS_HEIGHT); | 166 |
| 212 if (!SetSize(width, height)) | 167 void WEBPImageDecoder::InitializeNewFrame(size_t index) { |
| 213 return SetFailed(); | 168 DCHECK(codec_); |
| 214 | 169 |
| 215 format_flags_ = WebPDemuxGetI(demux_, WEBP_FF_FORMAT_FLAGS); | 170 ImageFrame& frame = frame_buffer_cache_[index]; |
| 216 if (!(format_flags_ & ANIMATION_FLAG)) { | 171 // SkCodec does not inform us if only a portion of the image was updated |
| 217 repetition_count_ = kAnimationNone; | 172 // in the current frame. Because of this, rather than correctly filling in |
| 173 // the frame rect, we set the frame rect to be the image's full size. |
| 174 // The original frame rect is not used, anyway. |
| 175 IntSize full_image_size = Size(); |
| 176 frame.SetOriginalFrameRect(IntRect(IntPoint(), full_image_size)); |
| 177 |
| 178 SkCodec::FrameInfo frame_info; |
| 179 bool frame_info_received = codec_->getFrameInfo(index, &frame_info); |
| 180 DCHECK(frame_info_received); |
| 181 frame.SetDuration(frame_info.fDuration); |
| 182 size_t required_previous_frame_index; |
| 183 if (frame_info.fRequiredFrame == SkCodec::kNone) { |
| 184 required_previous_frame_index = WTF::kNotFound; |
| 185 } else { |
| 186 required_previous_frame_index = |
| 187 static_cast<size_t>(frame_info.fRequiredFrame); |
| 188 } |
| 189 frame.SetRequiredPreviousFrameIndex(required_previous_frame_index); |
| 190 |
| 191 ImageFrame::DisposalMethod disposal_method = ImageFrame::kDisposeNotSpecified; |
| 192 switch (frame_info.fDisposalMethod) { |
| 193 case SkCodecAnimation::DisposalMethod::kKeep: |
| 194 disposal_method = ImageFrame::kDisposeKeep; |
| 195 break; |
| 196 case SkCodecAnimation::DisposalMethod::kRestoreBGColor: |
| 197 disposal_method = ImageFrame::kDisposeOverwriteBgcolor; |
| 198 break; |
| 199 default: |
| 200 break; |
| 201 } |
| 202 frame.SetDisposalMethod(disposal_method); |
| 203 } |
| 204 |
| 205 void WEBPImageDecoder::Decode(size_t index) { |
| 206 if (!codec_ || segment_stream_->IsCleared()) |
| 207 return; |
| 208 |
| 209 DCHECK(!Failed()); |
| 210 |
| 211 DCHECK_LT(index, frame_buffer_cache_.size()); |
| 212 |
| 213 UpdateAggressivePurging(index); |
| 214 SkImageInfo image_info = codec_->getInfo() |
| 215 .makeColorType(kN32_SkColorType) |
| 216 .makeColorSpace(ColorSpaceForSkImages()); |
| 217 |
| 218 SkCodec::Options options; |
| 219 options.fFrameIndex = index; |
| 220 options.fPriorFrame = SkCodec::kNone; |
| 221 options.fZeroInitialized = SkCodec::kNo_ZeroInitialized; |
| 222 |
| 223 ImageFrame& frame = frame_buffer_cache_[index]; |
| 224 if (frame.GetStatus() == ImageFrame::kFrameEmpty) { |
| 225 size_t required_previous_frame_index = frame.RequiredPreviousFrameIndex(); |
| 226 if (required_previous_frame_index == kNotFound) { |
| 227 frame.AllocatePixelData(Size().Width(), Size().Height(), |
| 228 ColorSpaceForSkImages()); |
| 229 frame.ZeroFillPixelData(); |
| 218 } else { | 230 } else { |
| 219 // Since we have parsed at least one frame, even if partially, | 231 size_t previous_frame_index = GetViableReferenceFrameIndex(index); |
| 220 // the global animation (ANIM) properties have been read since | 232 if (previous_frame_index == kNotFound) { |
| 221 // an ANIM chunk must precede the ANMF frame chunks. | 233 previous_frame_index = required_previous_frame_index; |
| 222 repetition_count_ = WebPDemuxGetI(demux_, WEBP_FF_LOOP_COUNT); | 234 Decode(previous_frame_index); |
| 223 // Repetition count is always <= 16 bits. | 235 } |
| 224 DCHECK_EQ(repetition_count_, repetition_count_ & 0xffff); | 236 |
| 225 if (!repetition_count_) | 237 // We try to reuse |previous_frame| as starting state to avoid copying. |
| 226 repetition_count_ = kAnimationLoopInfinite; | 238 // If CanReusePreviousFrameBuffer returns false, we must copy the data |
| 227 // FIXME: Implement ICC profile support for animated images. | 239 // since |previous_frame| is necessary to decode this or later frames. |
| 228 format_flags_ &= ~ICCP_FLAG; | 240 // In that case copy the data instead. |
| 229 } | 241 ImageFrame& previous_frame = frame_buffer_cache_[previous_frame_index]; |
| 230 | 242 if (!frame.TakeBitmapDataIfWritable(&previous_frame) && |
| 231 if ((format_flags_ & ICCP_FLAG) && !IgnoresColorSpace()) | 243 !frame.CopyBitmapData(previous_frame)) { |
| 232 ReadColorProfile(); | 244 SetFailed(); |
| 233 } | 245 return; |
| 234 | 246 } |
| 235 DCHECK(IsDecodedSizeAvailable()); | 247 options.fPriorFrame = previous_frame_index; |
| 236 | 248 } |
| 237 size_t frame_count = WebPDemuxGetI(demux_, WEBP_FF_FRAME_COUNT); | 249 } |
| 238 UpdateAggressivePurging(frame_count); | 250 |
| 239 | 251 if (frame.GetStatus() == ImageFrame::kFrameAllocated) { |
| 240 return true; | 252 SkCodec::Result start_incremental_decode_result = |
| 241 } | 253 codec_->startIncrementalDecode(image_info, frame.Bitmap().getPixels(), |
| 242 | 254 frame.Bitmap().rowBytes(), &options); |
| 243 void WEBPImageDecoder::OnInitFrameBuffer(size_t frame_index) { | 255 switch (start_incremental_decode_result) { |
| 244 // ImageDecoder::InitFrameBuffer does a DCHECK if |frame_index| exists. | 256 case SkCodec::kSuccess: |
| 245 ImageFrame& buffer = frame_buffer_cache_[frame_index]; | 257 break; |
| 246 | 258 case SkCodec::kIncompleteInput: |
| 247 const size_t required_previous_frame_index = | 259 return; |
| 248 buffer.RequiredPreviousFrameIndex(); | 260 default: |
| 249 if (required_previous_frame_index == kNotFound) { | 261 SetFailed(); |
| 250 frame_background_has_alpha_ = | 262 return; |
| 251 !buffer.OriginalFrameRect().Contains(IntRect(IntPoint(), Size())); | 263 } |
| 252 } else { | 264 frame.SetStatus(ImageFrame::kFramePartial); |
| 253 const ImageFrame& prev_buffer = | 265 } |
| 254 frame_buffer_cache_[required_previous_frame_index]; | 266 |
| 255 frame_background_has_alpha_ = | 267 SkCodec::Result incremental_decode_result = codec_->incrementalDecode(); |
| 256 prev_buffer.HasAlpha() || (prev_buffer.GetDisposalMethod() == | 268 switch (incremental_decode_result) { |
| 257 ImageFrame::kDisposeOverwriteBgcolor); | 269 case SkCodec::kSuccess: { |
| 258 } | 270 SkCodec::FrameInfo frame_info; |
| 259 | 271 bool frame_info_received = codec_->getFrameInfo(index, &frame_info); |
| 260 // The buffer is transparent outside the decoded area while the image is | 272 DCHECK(frame_info_received); |
| 261 // loading. The correct alpha value for the frame will be set when it is fully | 273 frame.SetHasAlpha(!SkAlphaTypeIsOpaque(frame_info.fAlphaType)); |
| 262 // decoded. | 274 frame.SetPixelsChanged(true); |
| 263 buffer.SetHasAlpha(true); | 275 frame.SetStatus(ImageFrame::kFrameComplete); |
| 264 } | 276 PostDecodeProcessing(index); |
| 265 | 277 break; |
| 266 bool WEBPImageDecoder::CanReusePreviousFrameBuffer(size_t frame_index) const { | 278 } |
| 267 DCHECK(frame_index < frame_buffer_cache_.size()); | 279 case SkCodec::kIncompleteInput: |
| 268 return frame_buffer_cache_[frame_index].GetAlphaBlendSource() != | 280 frame.SetPixelsChanged(true); |
| 269 ImageFrame::kBlendAtopPreviousFrame; | 281 if (FrameIsReceivedAtIndex(index) || IsAllDataReceived()) { |
| 270 } | 282 SetFailed(); |
| 271 | 283 } |
| 272 void WEBPImageDecoder::ClearFrameBuffer(size_t frame_index) { | 284 break; |
| 273 if (demux_ && demux_state_ >= WEBP_DEMUX_PARSED_HEADER && | 285 default: |
| 274 frame_buffer_cache_[frame_index].GetStatus() == | |
| 275 ImageFrame::kFramePartial) { | |
| 276 // Clear the decoder state so that this partial frame can be decoded again | |
| 277 // when requested. | |
| 278 ClearDecoder(); | |
| 279 } | |
| 280 ImageDecoder::ClearFrameBuffer(frame_index); | |
| 281 } | |
| 282 | |
| 283 void WEBPImageDecoder::ReadColorProfile() { | |
| 284 WebPChunkIterator chunk_iterator; | |
| 285 if (!WebPDemuxGetChunk(demux_, "ICCP", 1, &chunk_iterator)) { | |
| 286 WebPDemuxReleaseChunkIterator(&chunk_iterator); | |
| 287 return; | |
| 288 } | |
| 289 | |
| 290 const char* profile_data = | |
| 291 reinterpret_cast<const char*>(chunk_iterator.chunk.bytes); | |
| 292 size_t profile_size = chunk_iterator.chunk.size; | |
| 293 | |
| 294 SetEmbeddedColorProfile(profile_data, profile_size); | |
| 295 | |
| 296 WebPDemuxReleaseChunkIterator(&chunk_iterator); | |
| 297 } | |
| 298 | |
| 299 void WEBPImageDecoder::ApplyPostProcessing(size_t frame_index) { | |
| 300 ImageFrame& buffer = frame_buffer_cache_[frame_index]; | |
| 301 int width; | |
| 302 int decoded_height; | |
| 303 if (!WebPIDecGetRGB(decoder_, &decoded_height, &width, 0, 0)) | |
| 304 return; // See also https://bugs.webkit.org/show_bug.cgi?id=74062 | |
| 305 if (decoded_height <= 0) | |
| 306 return; | |
| 307 | |
| 308 const IntRect& frame_rect = buffer.OriginalFrameRect(); | |
| 309 SECURITY_DCHECK(width == frame_rect.Width()); | |
| 310 SECURITY_DCHECK(decoded_height <= frame_rect.Height()); | |
| 311 const int left = frame_rect.X(); | |
| 312 const int top = frame_rect.Y(); | |
| 313 | |
| 314 // TODO (msarett): | |
| 315 // Here we apply the color space transformation to the dst space. | |
| 316 // It does not really make sense to transform to a gamma-encoded | |
| 317 // space and then immediately after, perform a linear premultiply | |
| 318 // and linear blending. Can we find a way to perform the | |
| 319 // premultiplication and blending in a linear space? | |
| 320 SkColorSpaceXform* xform = ColorTransform(); | |
| 321 if (xform) { | |
| 322 const SkColorSpaceXform::ColorFormat kSrcFormat = | |
| 323 SkColorSpaceXform::kBGRA_8888_ColorFormat; | |
| 324 const SkColorSpaceXform::ColorFormat kDstFormat = | |
| 325 SkColorSpaceXform::kRGBA_8888_ColorFormat; | |
| 326 for (int y = decoded_height_; y < decoded_height; ++y) { | |
| 327 const int canvas_y = top + y; | |
| 328 uint8_t* row = reinterpret_cast<uint8_t*>(buffer.GetAddr(left, canvas_y)); | |
| 329 xform->apply(kDstFormat, row, kSrcFormat, row, width, | |
| 330 kUnpremul_SkAlphaType); | |
| 331 | |
| 332 uint8_t* pixel = row; | |
| 333 for (int x = 0; x < width; ++x, pixel += 4) { | |
| 334 const int canvas_x = left + x; | |
| 335 buffer.SetRGBA(canvas_x, canvas_y, pixel[0], pixel[1], pixel[2], | |
| 336 pixel[3]); | |
| 337 } | |
| 338 } | |
| 339 } | |
| 340 | |
| 341 // During the decoding of the current frame, we may have set some pixels to be | |
| 342 // transparent (i.e. alpha < 255). If the alpha blend source was | |
| 343 // 'BlendAtopPreviousFrame', the values of these pixels should be determined | |
| 344 // by blending them against the pixels of the corresponding previous frame. | |
| 345 // Compute the correct opaque values now. | |
| 346 // FIXME: This could be avoided if libwebp decoder had an API that used the | |
| 347 // previous required frame to do the alpha-blending by itself. | |
| 348 if ((format_flags_ & ANIMATION_FLAG) && frame_index && | |
| 349 buffer.GetAlphaBlendSource() == ImageFrame::kBlendAtopPreviousFrame && | |
| 350 buffer.RequiredPreviousFrameIndex() != kNotFound) { | |
| 351 ImageFrame& prev_buffer = frame_buffer_cache_[frame_index - 1]; | |
| 352 DCHECK_EQ(prev_buffer.GetStatus(), ImageFrame::kFrameComplete); | |
| 353 ImageFrame::DisposalMethod prev_disposal_method = | |
| 354 prev_buffer.GetDisposalMethod(); | |
| 355 if (prev_disposal_method == ImageFrame::kDisposeKeep) { | |
| 356 // Blend transparent pixels with pixels in previous canvas. | |
| 357 for (int y = decoded_height_; y < decoded_height; ++y) { | |
| 358 blend_function_(buffer, prev_buffer, top + y, left, width); | |
| 359 } | |
| 360 } else if (prev_disposal_method == ImageFrame::kDisposeOverwriteBgcolor) { | |
| 361 const IntRect& prev_rect = prev_buffer.OriginalFrameRect(); | |
| 362 // We need to blend a transparent pixel with the starting value (from just | |
| 363 // after the InitFrame() call). If the pixel belongs to prev_rect, the | |
| 364 // starting value was fully transparent, so this is a no-op. Otherwise, we | |
| 365 // need to blend against the pixel from the previous canvas. | |
| 366 for (int y = decoded_height_; y < decoded_height; ++y) { | |
| 367 int canvas_y = top + y; | |
| 368 int left1, width1, left2, width2; | |
| 369 findBlendRangeAtRow(frame_rect, prev_rect, canvas_y, left1, width1, | |
| 370 left2, width2); | |
| 371 if (width1 > 0) | |
| 372 blend_function_(buffer, prev_buffer, canvas_y, left1, width1); | |
| 373 if (width2 > 0) | |
| 374 blend_function_(buffer, prev_buffer, canvas_y, left2, width2); | |
| 375 } | |
| 376 } | |
| 377 } | |
| 378 | |
| 379 decoded_height_ = decoded_height; | |
| 380 buffer.SetPixelsChanged(true); | |
| 381 } | |
| 382 | |
| 383 size_t WEBPImageDecoder::DecodeFrameCount() { | |
| 384 // If UpdateDemuxer() fails, return the existing number of frames. This way | |
| 385 // if we get halfway through the image before decoding fails, we won't | |
| 386 // suddenly start reporting that the image has zero frames. | |
| 387 return UpdateDemuxer() ? WebPDemuxGetI(demux_, WEBP_FF_FRAME_COUNT) | |
| 388 : frame_buffer_cache_.size(); | |
| 389 } | |
| 390 | |
| 391 void WEBPImageDecoder::InitializeNewFrame(size_t index) { | |
| 392 if (!(format_flags_ & ANIMATION_FLAG)) { | |
| 393 DCHECK(!index); | |
| 394 return; | |
| 395 } | |
| 396 WebPIterator animated_frame; | |
| 397 WebPDemuxGetFrame(demux_, index + 1, &animated_frame); | |
| 398 DCHECK_EQ(animated_frame.complete, 1); | |
| 399 ImageFrame* buffer = &frame_buffer_cache_[index]; | |
| 400 IntRect frame_rect(animated_frame.x_offset, animated_frame.y_offset, | |
| 401 animated_frame.width, animated_frame.height); | |
| 402 buffer->SetOriginalFrameRect( | |
| 403 Intersection(frame_rect, IntRect(IntPoint(), Size()))); | |
| 404 buffer->SetDuration(animated_frame.duration); | |
| 405 buffer->SetDisposalMethod(animated_frame.dispose_method == | |
| 406 WEBP_MUX_DISPOSE_BACKGROUND | |
| 407 ? ImageFrame::kDisposeOverwriteBgcolor | |
| 408 : ImageFrame::kDisposeKeep); | |
| 409 buffer->SetAlphaBlendSource(animated_frame.blend_method == WEBP_MUX_BLEND | |
| 410 ? ImageFrame::kBlendAtopPreviousFrame | |
| 411 : ImageFrame::kBlendAtopBgcolor); | |
| 412 buffer->SetRequiredPreviousFrameIndex( | |
| 413 FindRequiredPreviousFrame(index, !animated_frame.has_alpha)); | |
| 414 WebPDemuxReleaseIterator(&animated_frame); | |
| 415 } | |
| 416 | |
| 417 void WEBPImageDecoder::Decode(size_t index) { | |
| 418 if (Failed()) | |
| 419 return; | |
| 420 | |
| 421 Vector<size_t> frames_to_decode = FindFramesToDecode(index); | |
| 422 | |
| 423 DCHECK(demux_); | |
| 424 for (auto i = frames_to_decode.rbegin(); i != frames_to_decode.rend(); ++i) { | |
| 425 if ((format_flags_ & ANIMATION_FLAG) && !InitFrameBuffer(*i)) { | |
| 426 SetFailed(); | 286 SetFailed(); |
| 427 return; | 287 return; |
| 428 } | 288 } |
| 429 | 289 } |
| 430 WebPIterator webp_frame; | 290 |
| 431 if (!WebPDemuxGetFrame(demux_, *i + 1, &webp_frame)) { | 291 size_t WEBPImageDecoder::GetViableReferenceFrameIndex( |
| 432 SetFailed(); | 292 size_t dependent_index) const { |
| 433 } else { | 293 DCHECK_LT(dependent_index, frame_buffer_cache_.size()); |
| 434 DecodeSingleFrame(webp_frame.fragment.bytes, webp_frame.fragment.size, | 294 |
| 435 *i); | 295 size_t required_previous_frame_index = |
| 436 WebPDemuxReleaseIterator(&webp_frame); | 296 frame_buffer_cache_[dependent_index].RequiredPreviousFrameIndex(); |
| 437 } | 297 |
| 438 if (Failed()) | 298 // Any frame in the range [|required_previous_frame_index|, |dependent_index|) |
| 439 return; | 299 // can be provided as the prior frame to SkCodec. |
| 440 | 300 // |
| 441 // If this returns false, we need more data to continue decoding. | 301 // SkCodec sets SkCodec::FrameInfo::fRequiredFrame to the earliest frame which |
| 442 if (!PostDecodeProcessing(*i)) | 302 // can be used. This might come up when several frames update the same |
| 443 break; | 303 // subregion. If the same subregion is about to be overwritten, it doesn't |
| 444 } | 304 // matter which frame in that chain is provided. |
| 445 | 305 DCHECK_NE(required_previous_frame_index, kNotFound); |
| 446 // It is also a fatal error if all data is received and we have decoded all | 306 // Loop backwards because the frames most likely to be in cache are the most |
| 447 // frames available but the file is truncated. | 307 // recent. |
| 448 if (index >= frame_buffer_cache_.size() - 1 && IsAllDataReceived() && | 308 for (size_t i = dependent_index - 1; i != required_previous_frame_index; |
| 449 demux_ && demux_state_ != WEBP_DEMUX_DONE) | 309 i--) { |
| 450 SetFailed(); | 310 const ImageFrame& frame = frame_buffer_cache_[i]; |
| 451 } | 311 |
| 452 | 312 if (frame.GetStatus() == ImageFrame::kFrameComplete) { |
| 453 bool WEBPImageDecoder::DecodeSingleFrame(const uint8_t* data_bytes, | 313 return i; |
| 454 size_t data_size, | 314 } |
| 455 size_t frame_index) { | 315 } |
| 456 if (Failed()) | 316 |
| 457 return false; | 317 return kNotFound; |
| 458 | |
| 459 DCHECK(IsDecodedSizeAvailable()); | |
| 460 | |
| 461 DCHECK_GT(frame_buffer_cache_.size(), frame_index); | |
| 462 ImageFrame& buffer = frame_buffer_cache_[frame_index]; | |
| 463 DCHECK_NE(buffer.GetStatus(), ImageFrame::kFrameComplete); | |
| 464 | |
| 465 if (buffer.GetStatus() == ImageFrame::kFrameEmpty) { | |
| 466 if (!buffer.AllocatePixelData(Size().Width(), Size().Height(), | |
| 467 ColorSpaceForSkImages())) | |
| 468 return SetFailed(); | |
| 469 buffer.ZeroFillPixelData(); | |
| 470 buffer.SetStatus(ImageFrame::kFramePartial); | |
| 471 // The buffer is transparent outside the decoded area while the image is | |
| 472 // loading. The correct alpha value for the frame will be set when it is | |
| 473 // fully decoded. | |
| 474 buffer.SetHasAlpha(true); | |
| 475 buffer.SetOriginalFrameRect(IntRect(IntPoint(), Size())); | |
| 476 } | |
| 477 | |
| 478 const IntRect& frame_rect = buffer.OriginalFrameRect(); | |
| 479 if (!decoder_) { | |
| 480 WEBP_CSP_MODE mode = outputMode(format_flags_ & ALPHA_FLAG); | |
| 481 if (!premultiply_alpha_) | |
| 482 mode = outputMode(false); | |
| 483 if (ColorTransform()) { | |
| 484 // Swizzling between RGBA and BGRA is zero cost in a color transform. | |
| 485 // So when we have a color transform, we should decode to whatever is | |
| 486 // easiest for libwebp, and then let the color transform swizzle if | |
| 487 // necessary. | |
| 488 // Lossy webp is encoded as YUV (so RGBA and BGRA are the same cost). | |
| 489 // Lossless webp is encoded as BGRA. This means decoding to BGRA is | |
| 490 // either faster or the same cost as RGBA. | |
| 491 mode = MODE_BGRA; | |
| 492 } | |
| 493 WebPInitDecBuffer(&decoder_buffer_); | |
| 494 decoder_buffer_.colorspace = mode; | |
| 495 decoder_buffer_.u.RGBA.stride = | |
| 496 Size().Width() * sizeof(ImageFrame::PixelData); | |
| 497 decoder_buffer_.u.RGBA.size = | |
| 498 decoder_buffer_.u.RGBA.stride * frame_rect.Height(); | |
| 499 decoder_buffer_.is_external_memory = 1; | |
| 500 decoder_ = WebPINewDecoder(&decoder_buffer_); | |
| 501 if (!decoder_) | |
| 502 return SetFailed(); | |
| 503 } | |
| 504 | |
| 505 decoder_buffer_.u.RGBA.rgba = reinterpret_cast<uint8_t*>( | |
| 506 buffer.GetAddr(frame_rect.X(), frame_rect.Y())); | |
| 507 | |
| 508 switch (WebPIUpdate(decoder_, data_bytes, data_size)) { | |
| 509 case VP8_STATUS_OK: | |
| 510 ApplyPostProcessing(frame_index); | |
| 511 buffer.SetHasAlpha((format_flags_ & ALPHA_FLAG) || | |
| 512 frame_background_has_alpha_); | |
| 513 buffer.SetStatus(ImageFrame::kFrameComplete); | |
| 514 ClearDecoder(); | |
| 515 return true; | |
| 516 case VP8_STATUS_SUSPENDED: | |
| 517 if (!IsAllDataReceived() && !FrameIsReceivedAtIndex(frame_index)) { | |
| 518 ApplyPostProcessing(frame_index); | |
| 519 return false; | |
| 520 } | |
| 521 // FALLTHROUGH | |
| 522 default: | |
| 523 Clear(); | |
| 524 return SetFailed(); | |
| 525 } | |
| 526 } | 318 } |
| 527 | 319 |
| 528 } // namespace blink | 320 } // namespace blink |
| OLD | NEW |