OLD | NEW |
| (Empty) |
1 /* | |
2 * Copyright (C) 2010 Google Inc. All rights reserved. | |
3 * | |
4 * Redistribution and use in source and binary forms, with or without | |
5 * modification, are permitted provided that the following conditions | |
6 * are met: | |
7 * | |
8 * 1. Redistributions of source code must retain the above copyright | |
9 * notice, this list of conditions and the following disclaimer. | |
10 * 2. Redistributions in binary form must reproduce the above copyright | |
11 * notice, this list of conditions and the following disclaimer in the | |
12 * documentation and/or other materials provided with the distribution. | |
13 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of | |
14 * its contributors may be used to endorse or promote products derived | |
15 * from this software without specific prior written permission. | |
16 * | |
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY | |
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED | |
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | |
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY | |
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES | |
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; | |
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND | |
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF | |
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
27 */ | |
28 | |
29 #include "config.h" | |
30 #include "core/platform/image-decoders/webp/WEBPImageDecoder.h" | |
31 | |
32 #include "RuntimeEnabledFeatures.h" | |
33 #include "platform/PlatformInstrumentation.h" | |
34 | |
35 #if USE(QCMSLIB) | |
36 #include "qcms.h" | |
37 #endif | |
38 | |
39 #if CPU(BIG_ENDIAN) || CPU(MIDDLE_ENDIAN) | |
40 #error Blink assumes a little-endian target. | |
41 #endif | |
42 | |
43 #if SK_B32_SHIFT // Output little-endian RGBA pixels (Android). | |
44 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_rgbA : M
ODE_RGBA; } | |
45 #else // Output little-endian BGRA pixels. | |
46 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_bgrA : M
ODE_BGRA; } | |
47 #endif | |
48 | |
49 namespace WebCore { | |
50 | |
51 WEBPImageDecoder::WEBPImageDecoder(ImageSource::AlphaOption alphaOption, | |
52 ImageSource::GammaAndColorProfileOption gammaAndColorProfileOption, | |
53 size_t maxDecodedBytes) | |
54 : ImageDecoder(alphaOption, gammaAndColorProfileOption, maxDecodedBytes) | |
55 , m_decoder(0) | |
56 , m_formatFlags(0) | |
57 , m_frameBackgroundHasAlpha(false) | |
58 #if USE(QCMSLIB) | |
59 , m_haveReadProfile(false) | |
60 , m_transform(0) | |
61 #endif | |
62 , m_demux(0) | |
63 , m_demuxState(WEBP_DEMUX_PARSING_HEADER) | |
64 , m_haveAlreadyParsedThisData(false) | |
65 , m_haveReadAnimationParameters(false) | |
66 , m_repetitionCount(cAnimationLoopOnce) | |
67 , m_decodedHeight(0) | |
68 { | |
69 } | |
70 | |
71 WEBPImageDecoder::~WEBPImageDecoder() | |
72 { | |
73 clear(); | |
74 } | |
75 | |
76 void WEBPImageDecoder::clear() | |
77 { | |
78 #if USE(QCMSLIB) | |
79 if (m_transform) | |
80 qcms_transform_release(m_transform); | |
81 m_transform = 0; | |
82 #endif | |
83 WebPDemuxDelete(m_demux); | |
84 m_demux = 0; | |
85 clearDecoder(); | |
86 } | |
87 | |
88 void WEBPImageDecoder::clearDecoder() | |
89 { | |
90 WebPIDelete(m_decoder); | |
91 m_decoder = 0; | |
92 m_decodedHeight = 0; | |
93 m_frameBackgroundHasAlpha = false; | |
94 } | |
95 | |
96 bool WEBPImageDecoder::isSizeAvailable() | |
97 { | |
98 if (!ImageDecoder::isSizeAvailable()) | |
99 updateDemuxer(); | |
100 | |
101 return ImageDecoder::isSizeAvailable(); | |
102 } | |
103 | |
104 size_t WEBPImageDecoder::frameCount() | |
105 { | |
106 if (!updateDemuxer()) | |
107 return 0; | |
108 | |
109 return m_frameBufferCache.size(); | |
110 } | |
111 | |
112 ImageFrame* WEBPImageDecoder::frameBufferAtIndex(size_t index) | |
113 { | |
114 if (index >= frameCount()) | |
115 return 0; | |
116 | |
117 ImageFrame& frame = m_frameBufferCache[index]; | |
118 if (frame.status() == ImageFrame::FrameComplete) | |
119 return &frame; | |
120 | |
121 if (RuntimeEnabledFeatures::animatedWebPEnabled()) { | |
122 Vector<size_t> framesToDecode; | |
123 size_t frameToDecode = index; | |
124 do { | |
125 framesToDecode.append(frameToDecode); | |
126 frameToDecode = m_frameBufferCache[frameToDecode].requiredPreviousFr
ameIndex(); | |
127 } while (frameToDecode != kNotFound && m_frameBufferCache[frameToDecode]
.status() != ImageFrame::FrameComplete); | |
128 | |
129 ASSERT(m_demux); | |
130 for (size_t i = framesToDecode.size(); i > 0; --i) { | |
131 size_t frameIndex = framesToDecode[i - 1]; | |
132 if ((m_formatFlags & ANIMATION_FLAG) && !initFrameBuffer(frameIndex)
) | |
133 return 0; | |
134 WebPIterator webpFrame; | |
135 if (!WebPDemuxGetFrame(m_demux, frameIndex + 1, &webpFrame)) | |
136 return 0; | |
137 PlatformInstrumentation::willDecodeImage("WEBP"); | |
138 decode(webpFrame.fragment.bytes, webpFrame.fragment.size, false, fra
meIndex); | |
139 PlatformInstrumentation::didDecodeImage(); | |
140 WebPDemuxReleaseIterator(&webpFrame); | |
141 | |
142 if (failed()) | |
143 return 0; | |
144 | |
145 // We need more data to continue decoding. | |
146 if (m_frameBufferCache[frameIndex].status() != ImageFrame::FrameComp
lete) | |
147 break; | |
148 } | |
149 | |
150 // It is also a fatal error if all data is received and we have decoded
all | |
151 // frames available but the file is truncated. | |
152 if (index >= m_frameBufferCache.size() - 1 && isAllDataReceived() && m_d
emux && m_demuxState != WEBP_DEMUX_DONE) | |
153 setFailed(); | |
154 | |
155 frame.notifyBitmapIfPixelsChanged(); | |
156 return &frame; | |
157 } | |
158 | |
159 ASSERT(!index); | |
160 PlatformInstrumentation::willDecodeImage("WEBP"); | |
161 decode(reinterpret_cast<const uint8_t*>(m_data->data()), m_data->size(), fal
se, index); | |
162 PlatformInstrumentation::didDecodeImage(); | |
163 return failed() ? 0 : &frame; | |
164 } | |
165 | |
166 void WEBPImageDecoder::setData(SharedBuffer* data, bool allDataReceived) | |
167 { | |
168 if (failed()) | |
169 return; | |
170 ImageDecoder::setData(data, allDataReceived); | |
171 m_haveAlreadyParsedThisData = false; | |
172 } | |
173 | |
174 int WEBPImageDecoder::repetitionCount() const | |
175 { | |
176 return failed() ? cAnimationLoopOnce : m_repetitionCount; | |
177 } | |
178 | |
179 bool WEBPImageDecoder::frameIsCompleteAtIndex(size_t index) const | |
180 { | |
181 if (!RuntimeEnabledFeatures::animatedWebPEnabled()) | |
182 return ImageDecoder::frameIsCompleteAtIndex(index); | |
183 if (!m_demux || m_demuxState <= WEBP_DEMUX_PARSING_HEADER) | |
184 return false; | |
185 if (!(m_formatFlags & ANIMATION_FLAG)) | |
186 return ImageDecoder::frameIsCompleteAtIndex(index); | |
187 bool frameIsLoadedAtIndex = index < m_frameBufferCache.size(); | |
188 return frameIsLoadedAtIndex; | |
189 } | |
190 | |
191 float WEBPImageDecoder::frameDurationAtIndex(size_t index) const | |
192 { | |
193 return index < m_frameBufferCache.size() ? m_frameBufferCache[index].duratio
n() : 0; | |
194 } | |
195 | |
196 bool WEBPImageDecoder::updateDemuxer() | |
197 { | |
198 if (failed()) | |
199 return false; | |
200 | |
201 if (m_haveAlreadyParsedThisData) | |
202 return true; | |
203 | |
204 m_haveAlreadyParsedThisData = true; | |
205 | |
206 const unsigned webpHeaderSize = 20; | |
207 if (m_data->size() < webpHeaderSize) | |
208 return false; // Wait for headers so that WebPDemuxPartial doesn't retur
n null. | |
209 | |
210 WebPDemuxDelete(m_demux); | |
211 WebPData inputData = { reinterpret_cast<const uint8_t*>(m_data->data()), m_d
ata->size() }; | |
212 m_demux = WebPDemuxPartial(&inputData, &m_demuxState); | |
213 if (!m_demux) | |
214 return setFailed(); | |
215 | |
216 if (m_demuxState <= WEBP_DEMUX_PARSING_HEADER) | |
217 return false; // Not enough data for parsing canvas width/height yet. | |
218 | |
219 bool hasAnimation = (m_formatFlags & ANIMATION_FLAG); | |
220 if (!ImageDecoder::isSizeAvailable()) { | |
221 m_formatFlags = WebPDemuxGetI(m_demux, WEBP_FF_FORMAT_FLAGS); | |
222 hasAnimation = (m_formatFlags & ANIMATION_FLAG); | |
223 if (hasAnimation && !RuntimeEnabledFeatures::animatedWebPEnabled()) | |
224 return setFailed(); | |
225 if (!setSize(WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_WIDTH), WebPDemuxGetI
(m_demux, WEBP_FF_CANVAS_HEIGHT))) | |
226 return setFailed(); | |
227 } | |
228 | |
229 ASSERT(ImageDecoder::isSizeAvailable()); | |
230 const size_t newFrameCount = WebPDemuxGetI(m_demux, WEBP_FF_FRAME_COUNT); | |
231 if (hasAnimation && !m_haveReadAnimationParameters && newFrameCount) { | |
232 // As we have parsed at least one frame (even if partially), | |
233 // we must already have parsed the animation properties. | |
234 // This is because ANIM chunk always precedes ANMF chunks. | |
235 m_repetitionCount = WebPDemuxGetI(m_demux, WEBP_FF_LOOP_COUNT); | |
236 ASSERT(m_repetitionCount == (m_repetitionCount & 0xffff)); // Loop count
is always <= 16 bits. | |
237 if (!m_repetitionCount) | |
238 m_repetitionCount = cAnimationLoopInfinite; | |
239 m_haveReadAnimationParameters = true; | |
240 } | |
241 | |
242 const size_t oldFrameCount = m_frameBufferCache.size(); | |
243 if (newFrameCount > oldFrameCount) { | |
244 m_frameBufferCache.resize(newFrameCount); | |
245 for (size_t i = oldFrameCount; i < newFrameCount; ++i) { | |
246 m_frameBufferCache[i].setPremultiplyAlpha(m_premultiplyAlpha); | |
247 if (!hasAnimation) { | |
248 ASSERT(!i); | |
249 m_frameBufferCache[i].setRequiredPreviousFrameIndex(kNotFound); | |
250 continue; | |
251 } | |
252 WebPIterator animatedFrame; | |
253 WebPDemuxGetFrame(m_demux, i + 1, &animatedFrame); | |
254 ASSERT(animatedFrame.complete == 1); | |
255 m_frameBufferCache[i].setDuration(animatedFrame.duration); | |
256 m_frameBufferCache[i].setDisposalMethod(animatedFrame.dispose_method
== WEBP_MUX_DISPOSE_BACKGROUND ? ImageFrame::DisposeOverwriteBgcolor : ImageFra
me::DisposeKeep); | |
257 m_frameBufferCache[i].setAlphaBlendSource(animatedFrame.blend_method
== WEBP_MUX_BLEND ? ImageFrame::BlendAtopPreviousFrame : ImageFrame::BlendAtopB
gcolor); | |
258 IntRect frameRect(animatedFrame.x_offset, animatedFrame.y_offset, an
imatedFrame.width, animatedFrame.height); | |
259 // Make sure the frameRect doesn't extend outside the buffer. | |
260 if (frameRect.maxX() > size().width()) | |
261 frameRect.setWidth(size().width() - animatedFrame.x_offset); | |
262 if (frameRect.maxY() > size().height()) | |
263 frameRect.setHeight(size().height() - animatedFrame.y_offset); | |
264 m_frameBufferCache[i].setOriginalFrameRect(frameRect); | |
265 m_frameBufferCache[i].setRequiredPreviousFrameIndex(findRequiredPrev
iousFrame(i, !animatedFrame.has_alpha)); | |
266 WebPDemuxReleaseIterator(&animatedFrame); | |
267 } | |
268 } | |
269 | |
270 return true; | |
271 } | |
272 | |
273 bool WEBPImageDecoder::initFrameBuffer(size_t frameIndex) | |
274 { | |
275 ImageFrame& buffer = m_frameBufferCache[frameIndex]; | |
276 if (buffer.status() != ImageFrame::FrameEmpty) // Already initialized. | |
277 return true; | |
278 | |
279 const size_t requiredPreviousFrameIndex = buffer.requiredPreviousFrameIndex(
); | |
280 if (requiredPreviousFrameIndex == kNotFound) { | |
281 // This frame doesn't rely on any previous data. | |
282 if (!buffer.setSize(size().width(), size().height())) | |
283 return setFailed(); | |
284 m_frameBackgroundHasAlpha = !buffer.originalFrameRect().contains(IntRect
(IntPoint(), size())); | |
285 } else { | |
286 const ImageFrame& prevBuffer = m_frameBufferCache[requiredPreviousFrameI
ndex]; | |
287 ASSERT(prevBuffer.status() == ImageFrame::FrameComplete); | |
288 | |
289 // Preserve the last frame as the starting state for this frame. | |
290 if (!buffer.copyBitmapData(prevBuffer)) | |
291 return setFailed(); | |
292 | |
293 if (prevBuffer.disposalMethod() == ImageFrame::DisposeOverwriteBgcolor)
{ | |
294 // We want to clear the previous frame to transparent, without | |
295 // affecting pixels in the image outside of the frame. | |
296 const IntRect& prevRect = prevBuffer.originalFrameRect(); | |
297 ASSERT(!prevRect.contains(IntRect(IntPoint(), size()))); | |
298 buffer.zeroFillFrameRect(prevRect); | |
299 } | |
300 | |
301 m_frameBackgroundHasAlpha = prevBuffer.hasAlpha() || (prevBuffer.disposa
lMethod() == ImageFrame::DisposeOverwriteBgcolor); | |
302 } | |
303 | |
304 buffer.setStatus(ImageFrame::FramePartial); | |
305 // The buffer is transparent outside the decoded area while the image is loa
ding. | |
306 // The correct value of 'hasAlpha' for the frame will be set when it is full
y decoded. | |
307 buffer.setHasAlpha(true); | |
308 return true; | |
309 } | |
310 | |
311 size_t WEBPImageDecoder::clearCacheExceptFrame(size_t clearExceptFrame) | |
312 { | |
313 // If |clearExceptFrame| has status FrameComplete, we preserve that frame. | |
314 // Otherwise, we preserve a previous frame with status FrameComplete whose d
ata is required | |
315 // to decode |clearExceptFrame|, either in initFrameBuffer() or ApplyPostPro
cessing(). | |
316 // All other frames can be cleared. | |
317 while ((clearExceptFrame < m_frameBufferCache.size()) && (m_frameBufferCache
[clearExceptFrame].status() != ImageFrame::FrameComplete)) | |
318 clearExceptFrame = m_frameBufferCache[clearExceptFrame].requiredPrevious
FrameIndex(); | |
319 | |
320 return ImageDecoder::clearCacheExceptFrame(clearExceptFrame); | |
321 } | |
322 | |
323 void WEBPImageDecoder::clearFrameBuffer(size_t frameIndex) | |
324 { | |
325 if (m_demux && m_demuxState >= WEBP_DEMUX_PARSED_HEADER && m_frameBufferCach
e[frameIndex].status() == ImageFrame::FramePartial) { | |
326 // Clear the decoder state so that this partial frame can be decoded aga
in when requested. | |
327 clearDecoder(); | |
328 } | |
329 ImageDecoder::clearFrameBuffer(frameIndex); | |
330 } | |
331 | |
332 #if USE(QCMSLIB) | |
333 | |
334 void WEBPImageDecoder::createColorTransform(const char* data, size_t size) | |
335 { | |
336 if (m_transform) | |
337 qcms_transform_release(m_transform); | |
338 m_transform = 0; | |
339 | |
340 qcms_profile* deviceProfile = ImageDecoder::qcmsOutputDeviceProfile(); | |
341 if (!deviceProfile) | |
342 return; | |
343 qcms_profile* inputProfile = qcms_profile_from_memory(data, size); | |
344 if (!inputProfile) | |
345 return; | |
346 | |
347 // We currently only support color profiles for RGB profiled images. | |
348 ASSERT(icSigRgbData == qcms_profile_get_color_space(inputProfile)); | |
349 // The input image pixels are RGBA format. | |
350 qcms_data_type format = QCMS_DATA_RGBA_8; | |
351 // FIXME: Don't force perceptual intent if the image profile contains an int
ent. | |
352 m_transform = qcms_transform_create(inputProfile, format, deviceProfile, QCM
S_DATA_RGBA_8, QCMS_INTENT_PERCEPTUAL); | |
353 | |
354 qcms_profile_release(inputProfile); | |
355 } | |
356 | |
357 void WEBPImageDecoder::readColorProfile() | |
358 { | |
359 WebPChunkIterator chunkIterator; | |
360 if (!WebPDemuxGetChunk(m_demux, "ICCP", 1, &chunkIterator)) { | |
361 WebPDemuxReleaseChunkIterator(&chunkIterator); | |
362 return; | |
363 } | |
364 | |
365 const char* profileData = reinterpret_cast<const char*>(chunkIterator.chunk.
bytes); | |
366 size_t profileSize = chunkIterator.chunk.size; | |
367 | |
368 // Only accept RGB color profiles from input class devices. | |
369 bool ignoreProfile = false; | |
370 if (profileSize < ImageDecoder::iccColorProfileHeaderLength) | |
371 ignoreProfile = true; | |
372 else if (!ImageDecoder::rgbColorProfile(profileData, profileSize)) | |
373 ignoreProfile = true; | |
374 else if (!ImageDecoder::inputDeviceColorProfile(profileData, profileSize)) | |
375 ignoreProfile = true; | |
376 | |
377 if (!ignoreProfile) | |
378 createColorTransform(profileData, profileSize); | |
379 | |
380 WebPDemuxReleaseChunkIterator(&chunkIterator); | |
381 } | |
382 | |
383 #endif // USE(QCMSLIB) | |
384 | |
385 void WEBPImageDecoder::applyPostProcessing(size_t frameIndex) | |
386 { | |
387 ImageFrame& buffer = m_frameBufferCache[frameIndex]; | |
388 int width; | |
389 int decodedHeight; | |
390 if (!WebPIDecGetRGB(m_decoder, &decodedHeight, &width, 0, 0)) | |
391 return; // See also https://bugs.webkit.org/show_bug.cgi?id=74062 | |
392 if (decodedHeight <= 0) | |
393 return; | |
394 | |
395 const IntRect& frameRect = buffer.originalFrameRect(); | |
396 ASSERT_WITH_SECURITY_IMPLICATION(width == frameRect.width()); | |
397 ASSERT_WITH_SECURITY_IMPLICATION(decodedHeight <= frameRect.height()); | |
398 const int left = frameRect.x(); | |
399 const int top = frameRect.y(); | |
400 | |
401 #if USE(QCMSLIB) | |
402 if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) { | |
403 if (!m_haveReadProfile) { | |
404 readColorProfile(); | |
405 m_haveReadProfile = true; | |
406 } | |
407 for (int y = m_decodedHeight; y < decodedHeight; ++y) { | |
408 const int canvasY = top + y; | |
409 uint8_t* row = reinterpret_cast<uint8_t*>(buffer.getAddr(left, canva
sY)); | |
410 if (qcms_transform* transform = colorTransform()) | |
411 qcms_transform_data_type(transform, row, row, width, QCMS_OUTPUT
_RGBX); | |
412 uint8_t* pixel = row; | |
413 for (int x = 0; x < width; ++x, pixel += 4) { | |
414 const int canvasX = left + x; | |
415 buffer.setRGBA(canvasX, canvasY, pixel[0], pixel[1], pixel[2], p
ixel[3]); | |
416 } | |
417 } | |
418 } | |
419 #endif // USE(QCMSLIB) | |
420 | |
421 // During the decoding of current frame, we may have set some pixels to be t
ransparent (i.e. alpha < 255). | |
422 // However, the value of each of these pixels should have been determined by
blending it against the value | |
423 // of that pixel in the previous frame if alpha blend source was 'BlendAtopP
reviousFrame'. So, we correct these | |
424 // pixels based on disposal method of the previous frame and the previous fr
ame buffer. | |
425 // FIXME: This could be avoided if libwebp decoder had an API that used the
previous required frame | |
426 // to do the alpha-blending by itself. | |
427 if ((m_formatFlags & ANIMATION_FLAG) && frameIndex && buffer.alphaBlendSourc
e() == ImageFrame::BlendAtopPreviousFrame && buffer.requiredPreviousFrameIndex()
!= kNotFound) { | |
428 ImageFrame& prevBuffer = m_frameBufferCache[frameIndex - 1]; | |
429 ASSERT(prevBuffer.status() == ImageFrame::FrameComplete); | |
430 ImageFrame::DisposalMethod prevDisposalMethod = prevBuffer.disposalMetho
d(); | |
431 if (prevDisposalMethod == ImageFrame::DisposeKeep) { // Restore transpar
ent pixels to pixels in previous canvas. | |
432 for (int y = m_decodedHeight; y < decodedHeight; ++y) { | |
433 const int canvasY = top + y; | |
434 for (int x = 0; x < width; ++x) { | |
435 const int canvasX = left + x; | |
436 ImageFrame::PixelData& pixel = *buffer.getAddr(canvasX, canv
asY); | |
437 // FIXME: Use alpha-blending when alpha is between 0 and 255
. | |
438 // Alpha-blending is being implemented in: https://bugs.webk
it.org/show_bug.cgi?id=17022 | |
439 if (!((pixel >> SK_A32_SHIFT) & 0xff)) { | |
440 ImageFrame::PixelData prevPixel = *prevBuffer.getAddr(ca
nvasX, canvasY); | |
441 pixel = prevPixel; | |
442 } | |
443 } | |
444 } | |
445 } else if (prevDisposalMethod == ImageFrame::DisposeOverwriteBgcolor) { | |
446 const IntRect& prevRect = prevBuffer.originalFrameRect(); | |
447 // We need to restore transparent pixels to as they were just after
initFrame() call. That is: | |
448 // * Transparent if it belongs to prevRect <-- This is a no-op. | |
449 // * Pixel in the previous canvas otherwise <-- Need to restore. | |
450 for (int y = m_decodedHeight; y < decodedHeight; ++y) { | |
451 const int canvasY = top + y; | |
452 for (int x = 0; x < width; ++x) { | |
453 const int canvasX = left + x; | |
454 ImageFrame::PixelData& pixel = *buffer.getAddr(canvasX, canv
asY); | |
455 // FIXME: Use alpha-blending when alpha is between 0 and 255
. | |
456 if (!((pixel >> SK_A32_SHIFT) & 0xff) && !prevRect.contains(
IntPoint(canvasX, canvasY))) { | |
457 ImageFrame::PixelData prevPixel = *prevBuffer.getAddr(ca
nvasX, canvasY); | |
458 pixel = prevPixel; | |
459 } | |
460 } | |
461 } | |
462 } | |
463 } | |
464 | |
465 m_decodedHeight = decodedHeight; | |
466 buffer.setPixelsChanged(true); | |
467 } | |
468 | |
469 bool WEBPImageDecoder::decode(const uint8_t* dataBytes, size_t dataSize, bool on
lySize, size_t frameIndex) | |
470 { | |
471 if (failed()) | |
472 return false; | |
473 | |
474 if (!ImageDecoder::isSizeAvailable()) { | |
475 static const size_t imageHeaderSize = 30; | |
476 if (dataSize < imageHeaderSize) | |
477 return false; | |
478 int width, height; | |
479 WebPBitstreamFeatures features; | |
480 if (WebPGetFeatures(dataBytes, dataSize, &features) != VP8_STATUS_OK) | |
481 return setFailed(); | |
482 width = features.width; | |
483 height = features.height; | |
484 m_formatFlags = features.has_alpha ? ALPHA_FLAG : 0; | |
485 if (!setSize(width, height)) | |
486 return setFailed(); | |
487 } | |
488 | |
489 ASSERT(ImageDecoder::isSizeAvailable()); | |
490 if (onlySize) | |
491 return true; | |
492 | |
493 ASSERT(m_frameBufferCache.size() > frameIndex); | |
494 ImageFrame& buffer = m_frameBufferCache[frameIndex]; | |
495 ASSERT(buffer.status() != ImageFrame::FrameComplete); | |
496 | |
497 if (buffer.status() == ImageFrame::FrameEmpty) { | |
498 if (!buffer.setSize(size().width(), size().height())) | |
499 return setFailed(); | |
500 buffer.setStatus(ImageFrame::FramePartial); | |
501 // The buffer is transparent outside the decoded area while the image is
loading. | |
502 // The correct value of 'hasAlpha' for the frame will be set when it is
fully decoded. | |
503 buffer.setHasAlpha(true); | |
504 buffer.setOriginalFrameRect(IntRect(IntPoint(), size())); | |
505 } | |
506 | |
507 const IntRect& frameRect = buffer.originalFrameRect(); | |
508 if (!m_decoder) { | |
509 WEBP_CSP_MODE mode = outputMode(m_formatFlags & ALPHA_FLAG); | |
510 if (!m_premultiplyAlpha) | |
511 mode = outputMode(false); | |
512 #if USE(QCMSLIB) | |
513 if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) | |
514 mode = MODE_RGBA; // Decode to RGBA for input to libqcms. | |
515 #endif | |
516 WebPInitDecBuffer(&m_decoderBuffer); | |
517 m_decoderBuffer.colorspace = mode; | |
518 m_decoderBuffer.u.RGBA.stride = size().width() * sizeof(ImageFrame::Pixe
lData); | |
519 m_decoderBuffer.u.RGBA.size = m_decoderBuffer.u.RGBA.stride * frameRect.
height(); | |
520 m_decoderBuffer.is_external_memory = 1; | |
521 m_decoder = WebPINewDecoder(&m_decoderBuffer); | |
522 if (!m_decoder) | |
523 return setFailed(); | |
524 } | |
525 | |
526 m_decoderBuffer.u.RGBA.rgba = reinterpret_cast<uint8_t*>(buffer.getAddr(fram
eRect.x(), frameRect.y())); | |
527 | |
528 switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) { | |
529 case VP8_STATUS_OK: | |
530 applyPostProcessing(frameIndex); | |
531 buffer.setHasAlpha((m_formatFlags & ALPHA_FLAG) || m_frameBackgroundHasA
lpha); | |
532 buffer.setStatus(ImageFrame::FrameComplete); | |
533 clearDecoder(); | |
534 return true; | |
535 case VP8_STATUS_SUSPENDED: | |
536 if (!isAllDataReceived() && !frameIsCompleteAtIndex(frameIndex)) { | |
537 applyPostProcessing(frameIndex); | |
538 return false; | |
539 } | |
540 // FALLTHROUGH | |
541 default: | |
542 clear(); | |
543 return setFailed(); | |
544 } | |
545 } | |
546 | |
547 } // namespace WebCore | |
OLD | NEW |