Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(210)

Side by Side Diff: Source/core/platform/image-decoders/webp/WEBPImageDecoder.cpp

Issue 13980003: Add animation support for WebP images (Closed) Base URL: https://chromium.googlesource.com/chromium/blink.git@master
Patch Set: Create and use zeroFillPixelData() Created 7 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (C) 2010 Google Inc. All rights reserved. 2 * Copyright (C) 2010 Google Inc. All rights reserved.
3 * 3 *
4 * Redistribution and use in source and binary forms, with or without 4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions 5 * modification, are permitted provided that the following conditions
6 * are met: 6 * are met:
7 * 7 *
8 * 1. Redistributions of source code must retain the above copyright 8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer. 9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright 10 * 2. Redistributions in binary form must reproduce the above copyright
(...skipping 15 matching lines...) Expand all
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */ 27 */
28 28
29 #include "config.h" 29 #include "config.h"
30 #include "core/platform/image-decoders/webp/WEBPImageDecoder.h" 30 #include "core/platform/image-decoders/webp/WEBPImageDecoder.h"
31 31
32 #include "core/platform/PlatformInstrumentation.h" 32 #include "core/platform/PlatformInstrumentation.h"
33 33
34 #ifdef QCMS_WEBP_COLOR_CORRECTION 34 #ifdef QCMS_WEBP_COLOR_CORRECTION
35 #include "qcms.h" 35 #include "qcms.h"
36 #include "webp/demux.h"
37 #else
38 #undef ICCP_FLAG
39 #define ICCP_FLAG 0
40 #endif 36 #endif
41 37
42 // Backward emulation for earlier versions than 0.1.99. 38 #ifdef WEBP_ICC_ANIMATION_SUPPORT
39 #include "RuntimeEnabledFeatures.h"
40 #include "webp/format_constants.h"
41 #endif
42
43 #if (WEBP_DECODER_ABI_VERSION < 0x0163) 43 #if (WEBP_DECODER_ABI_VERSION < 0x0163)
44 // Backward emulation for versions earlier than 0.1.99.
44 #define MODE_rgbA MODE_RGBA 45 #define MODE_rgbA MODE_RGBA
45 #define MODE_bgrA MODE_BGRA 46 #define MODE_bgrA MODE_BGRA
47 #define ALPHA_FLAG 0
48 #elif (WEBP_DECODER_ABI_VERSION <= 0x0200)
49 // Backward emulation for versions earlier than 0.3.0.
50 #define ALPHA_FLAG 0x000010
46 #endif 51 #endif
47 52
48 #if CPU(BIG_ENDIAN) || CPU(MIDDLE_ENDIAN) 53 #if CPU(BIG_ENDIAN) || CPU(MIDDLE_ENDIAN)
49 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_rgbA : M ODE_RGBA; } 54 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_rgbA : M ODE_RGBA; }
50 #elif SK_B32_SHIFT 55 #elif SK_B32_SHIFT
51 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_rgbA : M ODE_RGBA; } 56 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_rgbA : M ODE_RGBA; }
52 #else // LITTLE_ENDIAN, output BGRA pixels. 57 #else // LITTLE_ENDIAN, output BGRA pixels.
53 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_bgrA : M ODE_BGRA; } 58 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_bgrA : M ODE_BGRA; }
54 #endif 59 #endif
55 60
56 namespace WebCore { 61 namespace WebCore {
57 62
58 WEBPImageDecoder::WEBPImageDecoder(ImageSource::AlphaOption alphaOption, 63 WEBPImageDecoder::WEBPImageDecoder(ImageSource::AlphaOption alphaOption,
59 ImageSource::GammaAndColorProfileOption gamma AndColorProfileOption) 64 ImageSource::GammaAndColorProfileOption gamma AndColorProfileOption)
60 : ImageDecoder(alphaOption, gammaAndColorProfileOption) 65 : ImageDecoder(alphaOption, gammaAndColorProfileOption)
61 , m_decoder(0) 66 , m_decoder(0)
62 , m_hasAlpha(false)
63 , m_formatFlags(0) 67 , m_formatFlags(0)
68 , m_frameBackgroundHasAlpha(false)
64 #ifdef QCMS_WEBP_COLOR_CORRECTION 69 #ifdef QCMS_WEBP_COLOR_CORRECTION
65 , m_haveReadProfile(false) 70 , m_haveReadProfile(false)
66 , m_transform(0) 71 , m_transform(0)
72 #endif
73 #ifdef WEBP_ICC_ANIMATION_SUPPORT
74 , m_demux(0)
75 , m_demuxState(WEBP_DEMUX_PARSING_HEADER)
76 , m_haveAlreadyParsedThisData(false)
77 , m_haveReadAnimationParameters(false)
78 , m_repetitionCount(cAnimationLoopOnce)
67 , m_decodedHeight(0) 79 , m_decodedHeight(0)
68 #endif 80 #endif
69 { 81 {
70 WebPInitDecBuffer(&m_decoderBuffer);
71 } 82 }
72 83
73 WEBPImageDecoder::~WEBPImageDecoder() 84 WEBPImageDecoder::~WEBPImageDecoder()
74 { 85 {
75 clear(); 86 clear();
76 } 87 }
77 88
78 void WEBPImageDecoder::clear() 89 void WEBPImageDecoder::clear()
79 { 90 {
80 #ifdef QCMS_WEBP_COLOR_CORRECTION 91 #ifdef QCMS_WEBP_COLOR_CORRECTION
81 if (m_transform) 92 if (m_transform)
82 qcms_transform_release(m_transform); 93 qcms_transform_release(m_transform);
83 m_transform = 0; 94 m_transform = 0;
84 #endif 95 #endif
85 WebPFreeDecBuffer(&m_decoderBuffer); 96 #ifdef WEBP_ICC_ANIMATION_SUPPORT
86 if (m_decoder) 97 WebPDemuxDelete(m_demux);
87 WebPIDelete(m_decoder); 98 m_demux = 0;
99 #endif
100 clearDecoder();
101 }
102
103 void WEBPImageDecoder::clearDecoder()
104 {
105 WebPIDelete(m_decoder);
88 m_decoder = 0; 106 m_decoder = 0;
107 #ifdef WEBP_ICC_ANIMATION_SUPPORT
108 m_decodedHeight = 0;
109 m_frameBackgroundHasAlpha = false;
110 #endif
89 } 111 }
90 112
91 bool WEBPImageDecoder::isSizeAvailable() 113 bool WEBPImageDecoder::isSizeAvailable()
92 { 114 {
93 if (!ImageDecoder::isSizeAvailable()) 115 if (!ImageDecoder::isSizeAvailable()) {
94 decode(true); 116 #ifdef WEBP_ICC_ANIMATION_SUPPORT
95 117 updateDemuxer();
118 #else
119 decode(reinterpret_cast<const uint8_t*>(m_data->data()), m_data->size(), true, 0);
120 #endif
121 }
96 return ImageDecoder::isSizeAvailable(); 122 return ImageDecoder::isSizeAvailable();
97 } 123 }
98 124
99 ImageFrame* WEBPImageDecoder::frameBufferAtIndex(size_t index) 125 size_t WEBPImageDecoder::frameCount()
100 { 126 {
101 if (index) 127 #ifdef WEBP_ICC_ANIMATION_SUPPORT
128 if (!updateDemuxer())
102 return 0; 129 return 0;
103 130 #else
104 if (m_frameBufferCache.isEmpty()) { 131 if (m_frameBufferCache.isEmpty()) {
105 m_frameBufferCache.resize(1); 132 m_frameBufferCache.resize(1);
106 m_frameBufferCache[0].setPremultiplyAlpha(m_premultiplyAlpha); 133 m_frameBufferCache[0].setPremultiplyAlpha(m_premultiplyAlpha);
107 } 134 }
108 135 #endif
109 ImageFrame& frame = m_frameBufferCache[0]; 136 return m_frameBufferCache.size();
110 if (frame.status() != ImageFrame::FrameComplete) { 137 }
111 PlatformInstrumentation::willDecodeImage("WEBP"); 138
112 decode(false); 139 ImageFrame* WEBPImageDecoder::frameBufferAtIndex(size_t index)
113 PlatformInstrumentation::didDecodeImage(); 140 {
114 } 141 if (index >= frameCount())
142 return 0;
143
144 ImageFrame& frame = m_frameBufferCache[index];
145 if (frame.status() == ImageFrame::FrameComplete)
146 return &frame;
147
148 #ifdef WEBP_ICC_ANIMATION_SUPPORT
149 if (RuntimeEnabledFeatures::animatedWebPEnabled()) {
150 Vector<size_t> framesToDecode;
151 size_t frameToDecode = index;
152 do {
153 framesToDecode.append(frameToDecode);
154 frameToDecode = m_frameBufferCache[frameToDecode].requiredPreviousFr ameIndex();
155 } while (frameToDecode != notFound && m_frameBufferCache[frameToDecode]. status() != ImageFrame::FrameComplete);
156
157 ASSERT(m_demux);
158 for (size_t i = framesToDecode.size(); i > 0; --i) {
159 size_t frameIndex = framesToDecode[i - 1];
160 WebPIterator webpFrame;
161 if (!WebPDemuxGetFrame(m_demux, frameIndex + 1, &webpFrame))
162 return 0;
163 if ((m_formatFlags & ANIMATION_FLAG) && !initFrameBuffer(webpFrame, frameIndex)) {
164 WebPDemuxReleaseIterator(&webpFrame);
165 return 0;
166 }
167 PlatformInstrumentation::willDecodeImage("WEBP");
168 decode(webpFrame.fragment.bytes, webpFrame.fragment.size, false, fra meIndex);
169 PlatformInstrumentation::didDecodeImage();
170 WebPDemuxReleaseIterator(&webpFrame);
171
172 // We need more data to continue decoding.
173 if (m_frameBufferCache[frameIndex].status() != ImageFrame::FrameComp lete)
174 break;
175 }
176
177 // It is also a fatal error if all data is received and we have decoded all
178 // frames available but the file is truncated.
179 if (index >= m_frameBufferCache.size() - 1 && isAllDataReceived() && m_d emux && m_demuxState != WEBP_DEMUX_DONE)
180 setFailed();
181
182 return &frame;
183 }
184 #endif
185
186 ASSERT(!index);
187 PlatformInstrumentation::willDecodeImage("WEBP");
188 decode(reinterpret_cast<const uint8_t*>(m_data->data()), m_data->size(), fal se, index);
189 PlatformInstrumentation::didDecodeImage();
115 return &frame; 190 return &frame;
116 } 191 }
117 192
193 #ifdef WEBP_ICC_ANIMATION_SUPPORT
194
195 void WEBPImageDecoder::setData(SharedBuffer* data, bool allDataReceived)
196 {
197 if (failed())
198 return;
199
200 ImageDecoder::setData(data, allDataReceived);
201
202 if (m_demuxState != WEBP_DEMUX_DONE)
203 m_haveAlreadyParsedThisData = false;
204 }
205
206 int WEBPImageDecoder::repetitionCount() const
207 {
208 return failed() ? cAnimationLoopOnce : m_repetitionCount;
209 }
210
211 bool WEBPImageDecoder::frameIsCompleteAtIndex(size_t index) const
212 {
213 if (!RuntimeEnabledFeatures::animatedWebPEnabled())
214 return ImageDecoder::frameIsCompleteAtIndex(index);
215 if (!m_demux || m_demuxState <= WEBP_DEMUX_PARSING_HEADER)
216 return false;
217 if (!(m_formatFlags & ANIMATION_FLAG))
218 return ImageDecoder::frameIsCompleteAtIndex(index);
219 bool frameIsLoadedAtIndex = index < m_frameBufferCache.size();
220 return frameIsLoadedAtIndex;
221 }
222
223 float WEBPImageDecoder::frameDurationAtIndex(size_t index) const
224 {
225 return index < m_frameBufferCache.size() ? m_frameBufferCache[index].duratio n() : 0;
226 }
227
228 bool WEBPImageDecoder::updateDemuxer()
229 {
230 if (m_haveAlreadyParsedThisData)
231 return true;
232
233 m_haveAlreadyParsedThisData = true;
234
235 if (m_data->size() < RIFF_HEADER_SIZE + CHUNK_HEADER_SIZE)
236 return false; // Wait for headers so that WebPDemuxPartial doesn't retur n null.
237
238 WebPDemuxDelete(m_demux);
239 WebPData inputData = { reinterpret_cast<const uint8_t*>(m_data->data()), m_d ata->size() };
240 m_demux = WebPDemuxPartial(&inputData, &m_demuxState);
241 if (!m_demux)
242 return setFailed();
243
244 if (m_demuxState <= WEBP_DEMUX_PARSING_HEADER)
245 return false; // Not enough data for parsing canvas width/height yet.
246
247 bool hasAnimation = (m_formatFlags & ANIMATION_FLAG);
248 if (!ImageDecoder::isSizeAvailable()) {
249 m_formatFlags = WebPDemuxGetI(m_demux, WEBP_FF_FORMAT_FLAGS);
250 hasAnimation = (m_formatFlags & ANIMATION_FLAG);
251 if (hasAnimation && !RuntimeEnabledFeatures::animatedWebPEnabled())
252 return setFailed();
253 if (!setSize(WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_WIDTH), WebPDemuxGetI (m_demux, WEBP_FF_CANVAS_HEIGHT)))
254 return setFailed();
255 }
256 ASSERT(ImageDecoder::isSizeAvailable());
257 const size_t newFrameCount = WebPDemuxGetI(m_demux, WEBP_FF_FRAME_COUNT);
258 if (hasAnimation && !m_haveReadAnimationParameters && newFrameCount) {
259 // As we have parsed at least one frame (even if partially),
260 // we must already have parsed the animation properties.
261 // This is because ANIM chunk always precedes ANMF chunks.
262 m_repetitionCount = WebPDemuxGetI(m_demux, WEBP_FF_LOOP_COUNT);
263 ASSERT(m_repetitionCount == (m_repetitionCount & 0xffff)); // Loop count is always <= 16 bits.
264 if (!m_repetitionCount)
265 m_repetitionCount = cAnimationLoopInfinite;
266 m_haveReadAnimationParameters = true;
267 }
268 const size_t oldFrameCount = m_frameBufferCache.size();
269 if (newFrameCount > oldFrameCount) {
270 m_frameBufferCache.resize(newFrameCount);
271 for (size_t i = oldFrameCount; i < newFrameCount; ++i) {
272 m_frameBufferCache[i].setPremultiplyAlpha(m_premultiplyAlpha);
273 if (!hasAnimation) {
274 ASSERT(!i);
275 m_frameBufferCache[i].setRequiredPreviousFrameIndex(notFound);
276 continue;
277 }
278 WebPIterator animatedFrame;
279 WebPDemuxGetFrame(m_demux, i + 1, &animatedFrame);
280 ASSERT(animatedFrame.complete == 1);
281 m_frameBufferCache[i].setDuration(animatedFrame.duration);
282 m_frameBufferCache[i].setDisposalMethod(animatedFrame.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND ? ImageFrame::DisposeOverwriteBgcolor : ImageFra me::DisposeKeep);
283 WebPDemuxReleaseIterator(&animatedFrame);
284 m_frameBufferCache[i].setRequiredPreviousFrameIndex(findRequiredPrev iousFrame(i));
285 }
286 }
287 return true;
288 }
289
290 bool WEBPImageDecoder::initFrameBuffer(const WebPIterator& frame, size_t frameIn dex)
291 {
292 ImageFrame& buffer = m_frameBufferCache[frameIndex];
293 if (buffer.status() != ImageFrame::FrameEmpty) // Already initialized.
294 return true;
295
296 // Initialize the frame rect in our buffer.
297 IntRect frameRect(frame.x_offset, frame.y_offset, frame.width, frame.height) ;
298
299 // Make sure the frameRect doesn't extend outside the buffer.
300 if (frameRect.maxX() > size().width())
301 frameRect.setWidth(size().width() - frame.x_offset);
302 if (frameRect.maxY() > size().height())
303 frameRect.setHeight(size().height() - frame.y_offset);
304 buffer.setOriginalFrameRect(frameRect);
305
306 const size_t requiredPreviousFrameIndex = buffer.requiredPreviousFrameIndex( );
307 if (requiredPreviousFrameIndex == notFound) {
308 // This frame doesn't rely on any previous data.
309 if (!buffer.setSize(size().width(), size().height()))
310 return setFailed();
311 m_frameBackgroundHasAlpha = !frameRect.contains(IntRect(IntPoint(), size ()));
312 } else {
313 const ImageFrame& prevBuffer = m_frameBufferCache[requiredPreviousFrameI ndex];
314 ASSERT(prevBuffer.status() == ImageFrame::FrameComplete);
315
316 // Preserve the last frame as the starting state for this frame.
317 if (!buffer.copyBitmapData(prevBuffer))
318 return setFailed();
319
320 if (prevBuffer.disposalMethod() == ImageFrame::DisposeOverwriteBgcolor) {
321 // We want to clear the previous frame to transparent, without
322 // affecting pixels in the image outside of the frame.
323 const IntRect& prevRect = prevBuffer.originalFrameRect();
324 ASSERT(!prevRect.contains(IntRect(IntPoint(), size())));
325 buffer.zeroFillFrameRect(prevRect);
326 }
327
328 m_frameBackgroundHasAlpha = prevBuffer.hasAlpha() || (prevBuffer.disposa lMethod() == ImageFrame::DisposeOverwriteBgcolor);
329 }
330
331 buffer.setStatus(ImageFrame::FramePartial);
332 // The buffer is transparent outside the decoded area while the image is loa ding.
333 // The correct value of 'hasAlpha' for the frame will be set when it is full y decoded.
334 buffer.setHasAlpha(true);
335 return true;
336 }
337
338 size_t WEBPImageDecoder::clearCacheExceptFrame(size_t clearExceptFrame)
339 {
340 // If |clearExceptFrame| has status FrameComplete, we preserve that frame.
341 // Otherwise, we preserve a previous frame with status FrameComplete whose d ata is required
342 // to decode |clearExceptFrame|, either in initFrameBuffer() or ApplyPostPro cessing().
343 // All other frames can be cleared.
344 while ((clearExceptFrame < m_frameBufferCache.size()) && (m_frameBufferCache [clearExceptFrame].status() != ImageFrame::FrameComplete))
345 clearExceptFrame = m_frameBufferCache[clearExceptFrame].requiredPrevious FrameIndex();
346
347 return ImageDecoder::clearCacheExceptFrame(clearExceptFrame);
348 }
349
350 void WEBPImageDecoder::clearFrameBuffer(size_t frameIndex)
351 {
352 if (m_demux && m_demuxState >= WEBP_DEMUX_PARSED_HEADER && m_frameBufferCach e[frameIndex].status() == ImageFrame::FramePartial) {
353 // Clear the decoder state so that this partial frame can be decoded aga in when requested.
354 clearDecoder();
355 }
356 ImageDecoder::clearFrameBuffer(frameIndex);
357 }
358
359 #endif // WEBP_ICC_ANIMATION_SUPPORT
360
118 #ifdef QCMS_WEBP_COLOR_CORRECTION 361 #ifdef QCMS_WEBP_COLOR_CORRECTION
119 362
120 void WEBPImageDecoder::createColorTransform(const char* data, size_t size) 363 void WEBPImageDecoder::createColorTransform(const char* data, size_t size)
121 { 364 {
122 if (m_transform) 365 if (m_transform)
123 qcms_transform_release(m_transform); 366 qcms_transform_release(m_transform);
124 m_transform = 0; 367 m_transform = 0;
125 368
126 qcms_profile* deviceProfile = ImageDecoder::qcmsOutputDeviceProfile(); 369 qcms_profile* deviceProfile = ImageDecoder::qcmsOutputDeviceProfile();
127 if (!deviceProfile) 370 if (!deviceProfile)
128 return; 371 return;
129 qcms_profile* inputProfile = qcms_profile_from_memory(data, size); 372 qcms_profile* inputProfile = qcms_profile_from_memory(data, size);
130 if (!inputProfile) 373 if (!inputProfile)
131 return; 374 return;
132 375
133 // We currently only support color profiles for RGB profiled images. 376 // We currently only support color profiles for RGB profiled images.
134 ASSERT(icSigRgbData == qcms_profile_get_color_space(inputProfile)); 377 ASSERT(icSigRgbData == qcms_profile_get_color_space(inputProfile));
135 // The input image pixels are RGBA format. 378 // The input image pixels are RGBA format.
136 qcms_data_type format = QCMS_DATA_RGBA_8; 379 qcms_data_type format = QCMS_DATA_RGBA_8;
137 // FIXME: Don't force perceptual intent if the image profile contains an int ent. 380 // FIXME: Don't force perceptual intent if the image profile contains an int ent.
138 m_transform = qcms_transform_create(inputProfile, format, deviceProfile, QCM S_DATA_RGBA_8, QCMS_INTENT_PERCEPTUAL); 381 m_transform = qcms_transform_create(inputProfile, format, deviceProfile, QCM S_DATA_RGBA_8, QCMS_INTENT_PERCEPTUAL);
139 382
140 qcms_profile_release(inputProfile); 383 qcms_profile_release(inputProfile);
141 } 384 }
142 385
143 void WEBPImageDecoder::readColorProfile(const uint8_t* data, size_t size) 386 void WEBPImageDecoder::readColorProfile()
144 { 387 {
145 WebPChunkIterator chunkIterator; 388 WebPChunkIterator chunkIterator;
146 WebPData inputData = { data, size }; 389 if (!WebPDemuxGetChunk(m_demux, "ICCP", 1, &chunkIterator)) {
147 WebPDemuxState state;
148
149 WebPDemuxer* demuxer = WebPDemuxPartial(&inputData, &state);
150 if (!WebPDemuxGetChunk(demuxer, "ICCP", 1, &chunkIterator)) {
151 WebPDemuxReleaseChunkIterator(&chunkIterator); 390 WebPDemuxReleaseChunkIterator(&chunkIterator);
152 WebPDemuxDelete(demuxer);
153 return; 391 return;
154 } 392 }
155 393
156 const char* profileData = reinterpret_cast<const char*>(chunkIterator.chunk. bytes); 394 const char* profileData = reinterpret_cast<const char*>(chunkIterator.chunk. bytes);
157 size_t profileSize = chunkIterator.chunk.size; 395 size_t profileSize = chunkIterator.chunk.size;
158 396
159 // Only accept RGB color profiles from input class devices. 397 // Only accept RGB color profiles from input class devices.
160 bool ignoreProfile = false; 398 bool ignoreProfile = false;
161 if (profileSize < ImageDecoder::iccColorProfileHeaderLength) 399 if (profileSize < ImageDecoder::iccColorProfileHeaderLength)
162 ignoreProfile = true; 400 ignoreProfile = true;
163 else if (!ImageDecoder::rgbColorProfile(profileData, profileSize)) 401 else if (!ImageDecoder::rgbColorProfile(profileData, profileSize))
164 ignoreProfile = true; 402 ignoreProfile = true;
165 else if (!ImageDecoder::inputDeviceColorProfile(profileData, profileSize)) 403 else if (!ImageDecoder::inputDeviceColorProfile(profileData, profileSize))
166 ignoreProfile = true; 404 ignoreProfile = true;
167 405
168 if (!ignoreProfile) 406 if (!ignoreProfile)
169 createColorTransform(profileData, profileSize); 407 createColorTransform(profileData, profileSize);
170 408
171 WebPDemuxReleaseChunkIterator(&chunkIterator); 409 WebPDemuxReleaseChunkIterator(&chunkIterator);
172 WebPDemuxDelete(demuxer);
173 } 410 }
174 411
175 void WEBPImageDecoder::applyColorProfile(const uint8_t* data, size_t dataSize, I mageFrame& buffer) 412 #endif // QCMS_WEBP_COLOR_CORRECTION
413
414 #ifdef WEBP_ICC_ANIMATION_SUPPORT
415 void WEBPImageDecoder::applyPostProcessing(size_t frameIndex)
176 { 416 {
417 ImageFrame& buffer = m_frameBufferCache[frameIndex];
177 int width; 418 int width;
178 int decodedHeight; 419 int decodedHeight;
179 if (!WebPIDecGetRGB(m_decoder, &decodedHeight, &width, 0, 0)) 420 if (!WebPIDecGetRGB(m_decoder, &decodedHeight, &width, 0, 0))
180 return; // See also https://bugs.webkit.org/show_bug.cgi?id=74062 421 return; // See also https://bugs.webkit.org/show_bug.cgi?id=74062
181 if (decodedHeight <= 0) 422 if (decodedHeight <= 0)
182 return; 423 return;
183 424
184 if (!m_haveReadProfile) { 425 const IntRect& frameRect = buffer.originalFrameRect();
185 readColorProfile(data, dataSize); 426 ASSERT_WITH_SECURITY_IMPLICATION(width == frameRect.width());
186 m_haveReadProfile = true; 427 ASSERT_WITH_SECURITY_IMPLICATION(decodedHeight <= frameRect.height());
428 const int left = frameRect.x();
429 const int top = frameRect.y();
430
431 #ifdef QCMS_WEBP_COLOR_CORRECTION
432 if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) {
433 if (!m_haveReadProfile) {
434 readColorProfile();
435 m_haveReadProfile = true;
436 }
437 for (int y = m_decodedHeight; y < decodedHeight; ++y) {
438 const int canvasY = top + y;
439 uint8_t* row = reinterpret_cast<uint8_t*>(buffer.getAddr(left, canva sY));
440 if (qcms_transform* transform = colorTransform())
441 qcms_transform_data_type(transform, row, row, width, QCMS_OUTPUT _RGBX);
442 uint8_t* pixel = row;
443 for (int x = 0; x < width; ++x, pixel += 4) {
444 const int canvasX = left + x;
445 buffer.setRGBA(canvasX, canvasY, pixel[0], pixel[1], pixel[2], p ixel[3]);
446 }
447 }
187 } 448 }
449 #endif // QCMS_WEBP_COLOR_CORRECTION
188 450
189 ASSERT(width == size().width()); 451 // During the decoding of current frame, we may have set some pixels to be t ransparent (i.e. alpha < 255).
190 ASSERT(decodedHeight <= size().height()); 452 // However, the value of each of these pixels should have been determined by blending it against the value
191 453 // of that pixel in the previous frame. So, we correct these pixels based on disposal method of the previous
192 for (int y = m_decodedHeight; y < decodedHeight; ++y) { 454 // frame and the previous frame buffer.
193 uint8_t* row = reinterpret_cast<uint8_t*>(buffer.getAddr(0, y)); 455 // FIXME: This could be avoided if libwebp decoder had an API that used the previous required frame
194 if (qcms_transform* transform = colorTransform()) 456 // to do the alpha-blending by itself.
195 qcms_transform_data_type(transform, row, row, width, QCMS_OUTPUT_RGB X); 457 if ((m_formatFlags & ANIMATION_FLAG) && frameIndex) {
196 uint8_t* pixel = row; 458 ImageFrame& prevBuffer = m_frameBufferCache[frameIndex - 1];
197 for (int x = 0; x < width; ++x, pixel += 4) 459 ImageFrame::FrameDisposalMethod prevMethod = prevBuffer.disposalMethod() ;
198 buffer.setRGBA(x, y, pixel[0], pixel[1], pixel[2], pixel[3]); 460 if (prevMethod == ImageFrame::DisposeKeep) { // Restore transparent pixe ls to pixels in previous canvas.
461 ASSERT(prevBuffer.status() == ImageFrame::FrameComplete);
462 for (int y = m_decodedHeight; y < decodedHeight; ++y) {
463 const int canvasY = top + y;
464 for (int x = 0; x < width; ++x) {
465 const int canvasX = left + x;
466 ImageFrame::PixelData& pixel = *buffer.getAddr(canvasX, canv asY);
467 // FIXME: Use alpha-blending when alpha is between 0 and 255 .
468 // Alpha-blending is being implemented in: https://bugs.webk it.org/show_bug.cgi?id=17022
469 if (!((pixel >> SK_A32_SHIFT) & 0xff)) {
470 ImageFrame::PixelData prevPixel = *prevBuffer.getAddr(ca nvasX, canvasY);
471 pixel = prevPixel;
472 }
473 }
474 }
475 } else if (prevMethod == ImageFrame::DisposeOverwriteBgcolor && buffer.r equiredPreviousFrameIndex() != notFound) {
476 // Note: if the requiredPreviousFrameIndex is |notFound|, there's no thing to do.
477 ASSERT(prevBuffer.status() == ImageFrame::FrameComplete);
478 const IntRect& prevRect = prevBuffer.originalFrameRect();
479 // We need to restore transparent pixels to as they were just after initFrame() call. That is:
480 // * Transparent if it belongs to prevRect <-- This is a no-op.
481 // * Pixel in the previous canvas otherwise <-- Need to restore.
482 for (int y = m_decodedHeight; y < decodedHeight; ++y) {
483 const int canvasY = top + y;
484 for (int x = 0; x < width; ++x) {
485 const int canvasX = left + x;
486 ImageFrame::PixelData& pixel = *buffer.getAddr(canvasX, canv asY);
487 // FIXME: Use alpha-blending when alpha is between 0 and 255 .
488 if (!((pixel >> SK_A32_SHIFT) & 0xff) && !prevRect.contains( IntPoint(canvasX, canvasY))) {
489 ImageFrame::PixelData prevPixel = *prevBuffer.getAddr(ca nvasX, canvasY);
490 pixel = prevPixel;
491 }
492 }
493 }
494 }
199 } 495 }
200 496
201 m_decodedHeight = decodedHeight; 497 m_decodedHeight = decodedHeight;
202 } 498 }
499 #endif // WEBP_ICC_ANIMATION_SUPPORT
203 500
204 #endif // QCMS_WEBP_COLOR_CORRECTION 501 bool WEBPImageDecoder::decode(const uint8_t* dataBytes, size_t dataSize, bool on lySize, size_t frameIndex)
205
206 bool WEBPImageDecoder::decode(bool onlySize)
207 { 502 {
208 if (failed()) 503 if (failed())
209 return false; 504 return false;
210 505
211 const uint8_t* dataBytes = reinterpret_cast<const uint8_t*>(m_data->data());
212 const size_t dataSize = m_data->size();
213
214 if (!ImageDecoder::isSizeAvailable()) { 506 if (!ImageDecoder::isSizeAvailable()) {
215 static const size_t imageHeaderSize = 30; 507 static const size_t imageHeaderSize = 30;
216 if (dataSize < imageHeaderSize) 508 if (dataSize < imageHeaderSize)
217 return false; 509 return false;
218 int width, height; 510 int width, height;
219 #ifdef QCMS_WEBP_COLOR_CORRECTION 511 #if (WEBP_DECODER_ABI_VERSION >= 0x0163)
220 WebPData inputData = { dataBytes, dataSize };
221 WebPDemuxState state;
222 WebPDemuxer* demuxer = WebPDemuxPartial(&inputData, &state);
223 if (!demuxer)
224 return setFailed();
225
226 width = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
227 height = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
228 m_formatFlags = WebPDemuxGetI(demuxer, WEBP_FF_FORMAT_FLAGS);
229 m_hasAlpha = !!(m_formatFlags & ALPHA_FLAG);
230
231 WebPDemuxDelete(demuxer);
232 if (state <= WEBP_DEMUX_PARSING_HEADER)
233 return false;
234 #elif (WEBP_DECODER_ABI_VERSION >= 0x0163)
235 WebPBitstreamFeatures features; 512 WebPBitstreamFeatures features;
236 if (WebPGetFeatures(dataBytes, dataSize, &features) != VP8_STATUS_OK) 513 if (WebPGetFeatures(dataBytes, dataSize, &features) != VP8_STATUS_OK)
237 return setFailed(); 514 return setFailed();
238 width = features.width; 515 width = features.width;
239 height = features.height; 516 height = features.height;
240 m_hasAlpha = features.has_alpha; 517 m_formatFlags = features.has_alpha ? ALPHA_FLAG : 0;
241 #else 518 #else
242 // Earlier version won't be able to display WebP files with alpha. 519 // Earlier version won't be able to display WebP files with alpha.
243 if (!WebPGetInfo(dataBytes, dataSize, &width, &height)) 520 if (!WebPGetInfo(dataBytes, dataSize, &width, &height))
244 return setFailed(); 521 return setFailed();
245 m_hasAlpha = false;
246 #endif 522 #endif
247 if (!setSize(width, height)) 523 if (!setSize(width, height))
248 return setFailed(); 524 return setFailed();
249 } 525 }
250 526
251 ASSERT(ImageDecoder::isSizeAvailable()); 527 ASSERT(ImageDecoder::isSizeAvailable());
252 if (onlySize) 528 if (onlySize)
253 return true; 529 return true;
254 530
255 ASSERT(!m_frameBufferCache.isEmpty()); 531 ASSERT(m_frameBufferCache.size() > frameIndex);
256 ImageFrame& buffer = m_frameBufferCache[0]; 532 ImageFrame& buffer = m_frameBufferCache[frameIndex];
257 ASSERT(buffer.status() != ImageFrame::FrameComplete); 533 ASSERT(buffer.status() != ImageFrame::FrameComplete);
258 534
259 if (buffer.status() == ImageFrame::FrameEmpty) { 535 if (buffer.status() == ImageFrame::FrameEmpty) {
260 if (!buffer.setSize(size().width(), size().height())) 536 if (!buffer.setSize(size().width(), size().height()))
261 return setFailed(); 537 return setFailed();
262 buffer.setStatus(ImageFrame::FramePartial); 538 buffer.setStatus(ImageFrame::FramePartial);
263 buffer.setHasAlpha(m_hasAlpha); 539 // The buffer is transparent outside the decoded area while the image is loading.
540 // The correct value of 'hasAlpha' for the frame will be set when it is fully decoded.
541 buffer.setHasAlpha(true);
264 buffer.setOriginalFrameRect(IntRect(IntPoint(), size())); 542 buffer.setOriginalFrameRect(IntRect(IntPoint(), size()));
265 } 543 }
266 544
545 const IntRect& frameRect = buffer.originalFrameRect();
267 if (!m_decoder) { 546 if (!m_decoder) {
268 WEBP_CSP_MODE mode = outputMode(m_hasAlpha); 547 WEBP_CSP_MODE mode = outputMode(m_formatFlags & ALPHA_FLAG);
269 if (!m_premultiplyAlpha) 548 if (!m_premultiplyAlpha)
270 mode = outputMode(false); 549 mode = outputMode(false);
550 #ifdef QCMS_WEBP_COLOR_CORRECTION
271 if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) 551 if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile())
272 mode = MODE_RGBA; // Decode to RGBA for input to libqcms. 552 mode = MODE_RGBA; // Decode to RGBA for input to libqcms.
553 #endif
554 WebPInitDecBuffer(&m_decoderBuffer);
273 m_decoderBuffer.colorspace = mode; 555 m_decoderBuffer.colorspace = mode;
274 m_decoderBuffer.u.RGBA.stride = size().width() * sizeof(ImageFrame::Pixe lData); 556 m_decoderBuffer.u.RGBA.stride = size().width() * sizeof(ImageFrame::Pixe lData);
275 m_decoderBuffer.u.RGBA.size = m_decoderBuffer.u.RGBA.stride * size().hei ght(); 557 m_decoderBuffer.u.RGBA.size = m_decoderBuffer.u.RGBA.stride * frameRect. height();
276 m_decoderBuffer.is_external_memory = 1; 558 m_decoderBuffer.is_external_memory = 1;
277 m_decoder = WebPINewDecoder(&m_decoderBuffer); 559 m_decoder = WebPINewDecoder(&m_decoderBuffer);
278 if (!m_decoder) 560 if (!m_decoder)
279 return setFailed(); 561 return setFailed();
280 } 562 }
281 563
282 m_decoderBuffer.u.RGBA.rgba = reinterpret_cast<uint8_t*>(buffer.getAddr(0, 0 )); 564 m_decoderBuffer.u.RGBA.rgba = reinterpret_cast<uint8_t*>(buffer.getAddr(fram eRect.x(), frameRect.y()));
283 565
284 switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) { 566 switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) {
285 case VP8_STATUS_OK: 567 case VP8_STATUS_OK:
286 if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) 568 applyPostProcessing(frameIndex);
287 applyColorProfile(dataBytes, dataSize, buffer); 569 buffer.setHasAlpha((m_formatFlags & ALPHA_FLAG) || m_frameBackgroundHasA lpha);
288 buffer.setStatus(ImageFrame::FrameComplete); 570 buffer.setStatus(ImageFrame::FrameComplete);
289 clear(); 571 clearDecoder();
290 return true; 572 return true;
291 case VP8_STATUS_SUSPENDED: 573 case VP8_STATUS_SUSPENDED:
292 if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) 574 applyPostProcessing(frameIndex);
293 applyColorProfile(dataBytes, dataSize, buffer);
294 return false; 575 return false;
295 default: 576 default:
296 clear(); 577 clear();
297 return setFailed(); 578 return setFailed();
298 } 579 }
299 } 580 }
300 581
301 } // namespace WebCore 582 } // namespace WebCore
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698