Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright (C) 2010 Google Inc. All rights reserved. | 2 * Copyright (C) 2010 Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions | 5 * modification, are permitted provided that the following conditions |
| 6 * are met: | 6 * are met: |
| 7 * | 7 * |
| 8 * 1. Redistributions of source code must retain the above copyright | 8 * 1. Redistributions of source code must retain the above copyright |
| 9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
| 10 * 2. Redistributions in binary form must reproduce the above copyright | 10 * 2. Redistributions in binary form must reproduce the above copyright |
| (...skipping 15 matching lines...) Expand all Loading... | |
| 26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 */ | 27 */ |
| 28 | 28 |
| 29 #include "config.h" | 29 #include "config.h" |
| 30 #include "core/platform/image-decoders/webp/WEBPImageDecoder.h" | 30 #include "core/platform/image-decoders/webp/WEBPImageDecoder.h" |
| 31 | 31 |
| 32 #include "core/platform/PlatformInstrumentation.h" | 32 #include "core/platform/PlatformInstrumentation.h" |
| 33 | 33 |
| 34 #ifdef QCMS_WEBP_COLOR_CORRECTION | 34 #ifdef QCMS_WEBP_COLOR_CORRECTION |
| 35 #include "qcms.h" | 35 #include "qcms.h" |
| 36 #include "webp/demux.h" | |
| 37 #else | |
| 38 #undef ICCP_FLAG | |
| 39 #define ICCP_FLAG 0 | |
| 40 #endif | 36 #endif |
| 41 | 37 |
| 42 // Backward emulation for earlier versions than 0.1.99. | 38 #ifdef WEBP_ICC_ANIMATION_SUPPORT |
| 39 #include "RuntimeEnabledFeatures.h" | |
| 40 #include "webp/format_constants.h" | |
| 41 #endif | |
| 42 | |
| 43 #if (WEBP_DECODER_ABI_VERSION < 0x0163) | 43 #if (WEBP_DECODER_ABI_VERSION < 0x0163) |
| 44 // Backward emulation for versions earlier than 0.1.99. | |
| 44 #define MODE_rgbA MODE_RGBA | 45 #define MODE_rgbA MODE_RGBA |
| 45 #define MODE_bgrA MODE_BGRA | 46 #define MODE_bgrA MODE_BGRA |
| 47 #define ALPHA_FLAG 0 | |
| 48 #elif (WEBP_DECODER_ABI_VERSION <= 0x0200) | |
| 49 // Backward emulation for versions earlier than 0.3.0. | |
| 50 #define ALPHA_FLAG 0x000010 | |
| 46 #endif | 51 #endif |
| 47 | 52 |
| 48 #if CPU(BIG_ENDIAN) || CPU(MIDDLE_ENDIAN) | 53 #if CPU(BIG_ENDIAN) || CPU(MIDDLE_ENDIAN) |
| 49 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_rgbA : M ODE_RGBA; } | 54 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_rgbA : M ODE_RGBA; } |
| 50 #elif SK_B32_SHIFT | 55 #elif SK_B32_SHIFT |
| 51 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_rgbA : M ODE_RGBA; } | 56 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_rgbA : M ODE_RGBA; } |
| 52 #else // LITTLE_ENDIAN, output BGRA pixels. | 57 #else // LITTLE_ENDIAN, output BGRA pixels. |
| 53 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_bgrA : M ODE_BGRA; } | 58 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_bgrA : M ODE_BGRA; } |
| 54 #endif | 59 #endif |
| 55 | 60 |
| 56 namespace WebCore { | 61 namespace WebCore { |
| 57 | 62 |
| 58 WEBPImageDecoder::WEBPImageDecoder(ImageSource::AlphaOption alphaOption, | 63 WEBPImageDecoder::WEBPImageDecoder(ImageSource::AlphaOption alphaOption, |
| 59 ImageSource::GammaAndColorProfileOption gamma AndColorProfileOption) | 64 ImageSource::GammaAndColorProfileOption gamma AndColorProfileOption) |
| 60 : ImageDecoder(alphaOption, gammaAndColorProfileOption) | 65 : ImageDecoder(alphaOption, gammaAndColorProfileOption) |
| 61 , m_decoder(0) | 66 , m_decoder(0) |
| 62 , m_hasAlpha(false) | |
| 63 , m_formatFlags(0) | 67 , m_formatFlags(0) |
| 64 #ifdef QCMS_WEBP_COLOR_CORRECTION | 68 #ifdef QCMS_WEBP_COLOR_CORRECTION |
| 65 , m_haveReadProfile(false) | 69 , m_haveReadProfile(false) |
| 66 , m_transform(0) | 70 , m_transform(0) |
| 71 #endif | |
| 72 #ifdef WEBP_ICC_ANIMATION_SUPPORT | |
| 73 , m_demux(0) | |
| 74 , m_demuxState(WEBP_DEMUX_PARSING_HEADER) | |
| 75 , m_haveAlreadyParsedThisData(false) | |
| 76 , m_haveReadAnimationParameters(false) | |
| 77 , m_repetitionCount(cAnimationLoopOnce) | |
| 67 , m_decodedHeight(0) | 78 , m_decodedHeight(0) |
| 68 #endif | 79 #endif |
| 69 { | 80 { |
| 70 WebPInitDecBuffer(&m_decoderBuffer); | |
| 71 } | 81 } |
| 72 | 82 |
| 73 WEBPImageDecoder::~WEBPImageDecoder() | 83 WEBPImageDecoder::~WEBPImageDecoder() |
| 74 { | 84 { |
| 75 clear(); | 85 clear(); |
| 76 } | 86 } |
| 77 | 87 |
| 78 void WEBPImageDecoder::clear() | 88 void WEBPImageDecoder::clear() |
| 79 { | 89 { |
| 80 #ifdef QCMS_WEBP_COLOR_CORRECTION | 90 #ifdef QCMS_WEBP_COLOR_CORRECTION |
| 81 if (m_transform) | 91 if (m_transform) |
| 82 qcms_transform_release(m_transform); | 92 qcms_transform_release(m_transform); |
| 83 m_transform = 0; | 93 m_transform = 0; |
| 84 #endif | 94 #endif |
| 85 WebPFreeDecBuffer(&m_decoderBuffer); | 95 #ifdef WEBP_ICC_ANIMATION_SUPPORT |
| 86 if (m_decoder) | 96 WebPDemuxDelete(m_demux); |
| 87 WebPIDelete(m_decoder); | 97 m_demux = 0; |
| 98 #endif | |
| 99 clearDecoder(); | |
| 100 } | |
| 101 | |
| 102 void WEBPImageDecoder::clearDecoder() | |
| 103 { | |
| 104 WebPIDelete(m_decoder); | |
| 88 m_decoder = 0; | 105 m_decoder = 0; |
| 106 #ifdef WEBP_ICC_ANIMATION_SUPPORT | |
| 107 m_decodedHeight = 0; | |
| 108 #endif | |
| 89 } | 109 } |
| 90 | 110 |
| 91 bool WEBPImageDecoder::isSizeAvailable() | 111 bool WEBPImageDecoder::isSizeAvailable() |
| 92 { | 112 { |
| 93 if (!ImageDecoder::isSizeAvailable()) | 113 if (!ImageDecoder::isSizeAvailable()) { |
| 94 decode(true); | 114 #ifdef WEBP_ICC_ANIMATION_SUPPORT |
| 95 | 115 updateDemuxer(); |
| 116 #else | |
| 117 decode(reinterpret_cast<const uint8_t*>(m_data->data()), m_data->size(), true, 0); | |
| 118 #endif | |
| 119 } | |
| 96 return ImageDecoder::isSizeAvailable(); | 120 return ImageDecoder::isSizeAvailable(); |
| 97 } | 121 } |
| 98 | 122 |
| 99 ImageFrame* WEBPImageDecoder::frameBufferAtIndex(size_t index) | 123 size_t WEBPImageDecoder::frameCount() |
| 100 { | 124 { |
| 101 if (index) | 125 #ifdef WEBP_ICC_ANIMATION_SUPPORT |
| 126 if (!updateDemuxer()) | |
| 102 return 0; | 127 return 0; |
| 103 | 128 #else |
| 104 if (m_frameBufferCache.isEmpty()) { | 129 if (m_frameBufferCache.isEmpty()) { |
| 105 m_frameBufferCache.resize(1); | 130 m_frameBufferCache.resize(1); |
| 106 m_frameBufferCache[0].setPremultiplyAlpha(m_premultiplyAlpha); | 131 m_frameBufferCache[0].setPremultiplyAlpha(m_premultiplyAlpha); |
| 107 } | 132 } |
| 108 | 133 #endif |
| 109 ImageFrame& frame = m_frameBufferCache[0]; | 134 return m_frameBufferCache.size(); |
| 110 if (frame.status() != ImageFrame::FrameComplete) { | 135 } |
| 111 PlatformInstrumentation::willDecodeImage("WEBP"); | 136 |
| 112 decode(false); | 137 ImageFrame* WEBPImageDecoder::frameBufferAtIndex(size_t index) |
| 113 PlatformInstrumentation::didDecodeImage(); | 138 { |
| 114 } | 139 if (index >= frameCount()) |
| 140 return 0; | |
| 141 | |
| 142 ImageFrame& frame = m_frameBufferCache[index]; | |
| 143 if (frame.status() == ImageFrame::FrameComplete) | |
| 144 return &frame; | |
| 145 | |
| 146 #ifdef WEBP_ICC_ANIMATION_SUPPORT | |
| 147 if (RuntimeEnabledFeatures::animatedWebPEnabled()) { | |
| 148 Vector<size_t> framesToDecode; | |
| 149 size_t frameToDecode = index; | |
| 150 do { | |
| 151 framesToDecode.append(frameToDecode); | |
| 152 frameToDecode = m_frameBufferCache[frameToDecode].requiredPreviousFr ameIndex(); | |
| 153 } while (frameToDecode != notFound && m_frameBufferCache[frameToDecode]. status() != ImageFrame::FrameComplete); | |
| 154 | |
| 155 ASSERT(m_demux); | |
| 156 for (size_t i = framesToDecode.size(); i > 0; --i) { | |
| 157 size_t frameIndex = framesToDecode[i - 1]; | |
| 158 WebPIterator webpFrame; | |
| 159 if (!WebPDemuxGetFrame(m_demux, frameIndex + 1, &webpFrame)) | |
| 160 return 0; | |
| 161 if ((m_formatFlags & ANIMATION_FLAG) && !initFrameBuffer(webpFrame, frameIndex)) { | |
| 162 WebPDemuxReleaseIterator(&webpFrame); | |
| 163 return 0; | |
| 164 } | |
| 165 PlatformInstrumentation::willDecodeImage("WEBP"); | |
| 166 decode(webpFrame.fragment.bytes, webpFrame.fragment.size, false, fra meIndex); | |
| 167 PlatformInstrumentation::didDecodeImage(); | |
| 168 WebPDemuxReleaseIterator(&webpFrame); | |
| 169 | |
| 170 // We need more data to continue decoding. | |
| 171 if (m_frameBufferCache[frameIndex].status() != ImageFrame::FrameComp lete) | |
| 172 break; | |
| 173 } | |
| 174 | |
| 175 // It is also a fatal error if all data is received and we have decoded all | |
| 176 // frames available but the file is truncated. | |
| 177 if (index >= m_frameBufferCache.size() - 1 && isAllDataReceived() && m_d emux && m_demuxState != WEBP_DEMUX_DONE) | |
| 178 setFailed(); | |
| 179 | |
| 180 return &frame; | |
| 181 } | |
| 182 #endif | |
| 183 | |
| 184 ASSERT(!index); | |
| 185 PlatformInstrumentation::willDecodeImage("WEBP"); | |
| 186 decode(reinterpret_cast<const uint8_t*>(m_data->data()), m_data->size(), fal se, index); | |
| 187 PlatformInstrumentation::didDecodeImage(); | |
| 115 return &frame; | 188 return &frame; |
| 116 } | 189 } |
| 117 | 190 |
| 191 #ifdef WEBP_ICC_ANIMATION_SUPPORT | |
| 192 | |
| 193 void WEBPImageDecoder::setData(SharedBuffer* data, bool allDataReceived) | |
| 194 { | |
| 195 if (failed()) | |
| 196 return; | |
| 197 | |
| 198 ImageDecoder::setData(data, allDataReceived); | |
| 199 | |
| 200 if (m_demuxState != WEBP_DEMUX_DONE) | |
| 201 m_haveAlreadyParsedThisData = false; | |
| 202 } | |
| 203 | |
| 204 int WEBPImageDecoder::repetitionCount() const | |
| 205 { | |
| 206 return failed() ? cAnimationLoopOnce : m_repetitionCount; | |
| 207 } | |
| 208 | |
| 209 bool WEBPImageDecoder::frameIsCompleteAtIndex(size_t index) const | |
| 210 { | |
| 211 if (!RuntimeEnabledFeatures::animatedWebPEnabled()) | |
| 212 return ImageDecoder::frameIsCompleteAtIndex(index); | |
| 213 if (!m_demux || m_demuxState <= WEBP_DEMUX_PARSING_HEADER) | |
| 214 return false; | |
| 215 if (!(m_formatFlags & ANIMATION_FLAG)) | |
| 216 return ImageDecoder::frameIsCompleteAtIndex(index); | |
| 217 const bool frameIsLoadedAtIndex = index < m_frameBufferCache.size(); | |
|
Noel Gordon
2013/06/18 06:26:35
You and constedness, s/const//
urvang (Google)
2013/06/18 09:12:31
Done.
| |
| 218 return frameIsLoadedAtIndex; | |
| 219 } | |
| 220 | |
| 221 float WEBPImageDecoder::frameDurationAtIndex(size_t index) const | |
| 222 { | |
| 223 return index < m_frameBufferCache.size() ? m_frameBufferCache[index].duratio n() : 0; | |
| 224 } | |
| 225 | |
| 226 bool WEBPImageDecoder::updateDemuxer() | |
| 227 { | |
| 228 if (m_haveAlreadyParsedThisData) | |
| 229 return true; | |
| 230 | |
| 231 m_haveAlreadyParsedThisData = true; | |
| 232 | |
| 233 static const size_t minSizeForDemux = RIFF_HEADER_SIZE + CHUNK_HEADER_SIZE; | |
| 234 if (m_data->size() < minSizeForDemux) | |
| 235 return false; // Wait for headers so that WebPDemuxPartial doesn't retur n null. | |
| 236 | |
| 237 WebPDemuxDelete(m_demux); | |
| 238 WebPData inputData = { reinterpret_cast<const uint8_t*>(m_data->data()), m_d ata->size() }; | |
| 239 m_demux = WebPDemuxPartial(&inputData, &m_demuxState); | |
| 240 if (!m_demux) | |
| 241 return setFailed(); // Must be a failure as we have at least 'minSizeFor Demux' bytes. | |
| 242 | |
| 243 if (m_demuxState <= WEBP_DEMUX_PARSING_HEADER) | |
| 244 return false; // Not enough data for parsing canvas width/height yet. | |
| 245 | |
| 246 bool hasAnimation = (m_formatFlags & ANIMATION_FLAG); | |
| 247 if (!ImageDecoder::isSizeAvailable()) { | |
| 248 m_formatFlags = WebPDemuxGetI(m_demux, WEBP_FF_FORMAT_FLAGS); | |
| 249 hasAnimation = (m_formatFlags & ANIMATION_FLAG); | |
| 250 if (hasAnimation && !RuntimeEnabledFeatures::animatedWebPEnabled()) | |
| 251 return setFailed(); | |
| 252 if (!setSize(WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_WIDTH), WebPDemuxGetI (m_demux, WEBP_FF_CANVAS_HEIGHT))) | |
| 253 return setFailed(); | |
| 254 } | |
| 255 ASSERT(ImageDecoder::isSizeAvailable()); | |
| 256 const size_t newFrameCount = WebPDemuxGetI(m_demux, WEBP_FF_FRAME_COUNT); | |
| 257 if (hasAnimation && !m_haveReadAnimationParameters && (newFrameCount >= 1)) { | |
| 258 // As we have parsed at least one frame (even if partially), | |
| 259 // we must already have parsed the animation properties. | |
| 260 // This is because ANIM chunk always precedes ANMF chunks. | |
| 261 m_repetitionCount = WebPDemuxGetI(m_demux, WEBP_FF_LOOP_COUNT); | |
| 262 ASSERT(m_repetitionCount == (m_repetitionCount & 0xffff)); // Loop count is always <= 16 bits. | |
| 263 if (!m_repetitionCount) | |
| 264 m_repetitionCount = cAnimationLoopInfinite; | |
| 265 m_haveReadAnimationParameters = true; | |
| 266 } | |
| 267 const size_t oldFrameCount = m_frameBufferCache.size(); | |
| 268 if (newFrameCount > oldFrameCount) { | |
| 269 m_frameBufferCache.resize(newFrameCount); | |
| 270 for (size_t i = oldFrameCount; i < newFrameCount; ++i) { | |
| 271 m_frameBufferCache[i].setPremultiplyAlpha(m_premultiplyAlpha); | |
| 272 if (!hasAnimation) { | |
| 273 ASSERT(!i); | |
| 274 m_frameBufferCache[i].setRequiredPreviousFrameIndex(notFound); | |
| 275 continue; | |
| 276 } | |
| 277 WebPIterator animatedFrame; | |
| 278 WebPDemuxGetFrame(m_demux, i + 1, &animatedFrame); | |
| 279 ASSERT(animatedFrame.complete == 1); | |
| 280 m_frameBufferCache[i].setDuration(animatedFrame.duration); | |
| 281 m_frameBufferCache[i].setDisposalMethod(animatedFrame.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND ? ImageFrame::DisposeOverwriteBgcolor : ImageFra me::DisposeKeep); | |
| 282 WebPDemuxReleaseIterator(&animatedFrame); | |
| 283 m_frameBufferCache[i].setRequiredPreviousFrameIndex(findRequiredPrev iousFrame(i)); | |
| 284 } | |
| 285 } | |
| 286 return true; | |
| 287 } | |
| 288 | |
| 289 bool WEBPImageDecoder::initFrameBuffer(const WebPIterator& frame, size_t frameIn dex) | |
| 290 { | |
| 291 ImageFrame& buffer = m_frameBufferCache[frameIndex]; | |
| 292 if (buffer.status() != ImageFrame::FrameEmpty) // Already initialized. | |
| 293 return true; | |
| 294 | |
| 295 // Initialize the frame rect in our buffer. | |
| 296 IntRect frameRect(frame.x_offset, frame.y_offset, frame.width, frame.height) ; | |
| 297 | |
| 298 // Make sure the frameRect doesn't extend outside the buffer. | |
| 299 if (frameRect.maxX() > size().width()) | |
| 300 frameRect.setWidth(size().width() - frame.x_offset); | |
| 301 if (frameRect.maxY() > size().height()) | |
| 302 frameRect.setHeight(size().height() - frame.y_offset); | |
| 303 buffer.setOriginalFrameRect(frameRect); | |
| 304 | |
| 305 const size_t requiredPreviousFrameIndex = buffer.requiredPreviousFrameIndex( ); | |
| 306 if (requiredPreviousFrameIndex == notFound) { | |
| 307 // This frame doesn't rely on any previous data. | |
| 308 if (!buffer.setSize(size().width(), size().height())) | |
| 309 return setFailed(); | |
| 310 } else { | |
| 311 const ImageFrame& prevBuffer = m_frameBufferCache[requiredPreviousFrameI ndex]; | |
| 312 ASSERT(prevBuffer.status() == ImageFrame::FrameComplete); | |
| 313 | |
| 314 // Preserve the last frame as the starting state for this frame. | |
| 315 if (!buffer.copyBitmapData(prevBuffer)) | |
| 316 return setFailed(); | |
| 317 | |
| 318 if (prevBuffer.disposalMethod() == ImageFrame::DisposeOverwriteBgcolor) { | |
| 319 // We want to clear the previous frame to transparent, without | |
| 320 // affecting pixels in the image outside of the frame. | |
| 321 const IntRect& prevRect = prevBuffer.originalFrameRect(); | |
| 322 ASSERT(!prevRect.contains(IntRect(IntPoint(), size()))); | |
| 323 for (int y = prevRect.y(); y < prevRect.maxY(); ++y) { | |
| 324 for (int x = prevRect.x(); x < prevRect.maxX(); ++x) | |
| 325 buffer.setRGBA(x, y, 0, 0, 0, 0); | |
| 326 } | |
| 327 } | |
| 328 } | |
| 329 | |
| 330 buffer.setStatus(ImageFrame::FramePartial); | |
| 331 // The buffer is transparent outside the decoded area while the image is loa ding. | |
| 332 // The correct value of 'hasAlpha' for the frame will be set when it is full y decoded. | |
| 333 buffer.setHasAlpha(true); | |
| 334 return true; | |
| 335 } | |
| 336 | |
| 337 // Note: This overridden method has one subtle difference to the one in | |
| 338 // ImageDecoder class: we need to make sure we don't clear frames from which | |
| 339 // future applyPostProcessing() calls would copy bitmap data (in addition to | |
| 340 // initFrameBuffer() calls). | |
| 341 size_t WEBPImageDecoder::clearCacheExceptFrame(size_t clearExceptFrame) | |
| 342 { | |
| 343 // Don't clear if there are no frames or only one frame. | |
| 344 if (m_frameBufferCache.size() <= 1) | |
| 345 return 0; | |
| 346 | |
| 347 // If |clearExceptFrame| has status FrameComplete, we preserve that frame. | |
| 348 // Otherwise, we preserve a previous frame with status FrameComplete whose d ata is required to decode |clearExceptFrame|. | |
| 349 // All other frames can be cleared. | |
| 350 while ((clearExceptFrame < m_frameBufferCache.size()) && (m_frameBufferCache [clearExceptFrame].status() != ImageFrame::FrameComplete)) | |
| 351 clearExceptFrame = m_frameBufferCache[clearExceptFrame].requiredPrevious FrameIndex(); | |
| 352 | |
| 353 size_t frameBytesCleared = 0; | |
| 354 for (size_t i = 0; i < m_frameBufferCache.size(); ++i) { | |
| 355 if (i != clearExceptFrame) { | |
| 356 frameBytesCleared += frameBytesAtIndex(i); | |
| 357 clearFrameBuffer(i); | |
| 358 } | |
| 359 } | |
| 360 return frameBytesCleared; | |
| 361 } | |
| 362 | |
| 363 void WEBPImageDecoder::clearFrameBuffer(size_t frameIndex) | |
| 364 { | |
| 365 if (m_demux && m_demuxState >= WEBP_DEMUX_PARSED_HEADER && m_frameBufferCach e[frameIndex].status() == ImageFrame::FramePartial) { | |
| 366 // Clear the decoder state so that this partial frame can be decoded aga in when requested. | |
| 367 clearDecoder(); | |
| 368 } | |
| 369 ImageDecoder::clearFrameBuffer(frameIndex); | |
| 370 } | |
| 371 | |
| 372 #endif // WEBP_ICC_ANIMATION_SUPPORT | |
| 373 | |
| 118 #ifdef QCMS_WEBP_COLOR_CORRECTION | 374 #ifdef QCMS_WEBP_COLOR_CORRECTION |
| 119 | 375 |
| 120 void WEBPImageDecoder::createColorTransform(const char* data, size_t size) | 376 void WEBPImageDecoder::createColorTransform(const char* data, size_t size) |
| 121 { | 377 { |
| 122 if (m_transform) | 378 if (m_transform) |
| 123 qcms_transform_release(m_transform); | 379 qcms_transform_release(m_transform); |
| 124 m_transform = 0; | 380 m_transform = 0; |
| 125 | 381 |
| 126 qcms_profile* deviceProfile = ImageDecoder::qcmsOutputDeviceProfile(); | 382 qcms_profile* deviceProfile = ImageDecoder::qcmsOutputDeviceProfile(); |
| 127 if (!deviceProfile) | 383 if (!deviceProfile) |
| 128 return; | 384 return; |
| 129 qcms_profile* inputProfile = qcms_profile_from_memory(data, size); | 385 qcms_profile* inputProfile = qcms_profile_from_memory(data, size); |
| 130 if (!inputProfile) | 386 if (!inputProfile) |
| 131 return; | 387 return; |
| 132 | 388 |
| 133 // We currently only support color profiles for RGB profiled images. | 389 // We currently only support color profiles for RGB profiled images. |
| 134 ASSERT(icSigRgbData == qcms_profile_get_color_space(inputProfile)); | 390 ASSERT(icSigRgbData == qcms_profile_get_color_space(inputProfile)); |
| 135 // The input image pixels are RGBA format. | 391 // The input image pixels are RGBA format. |
| 136 qcms_data_type format = QCMS_DATA_RGBA_8; | 392 qcms_data_type format = QCMS_DATA_RGBA_8; |
| 137 // FIXME: Don't force perceptual intent if the image profile contains an int ent. | 393 // FIXME: Don't force perceptual intent if the image profile contains an int ent. |
| 138 m_transform = qcms_transform_create(inputProfile, format, deviceProfile, QCM S_DATA_RGBA_8, QCMS_INTENT_PERCEPTUAL); | 394 m_transform = qcms_transform_create(inputProfile, format, deviceProfile, QCM S_DATA_RGBA_8, QCMS_INTENT_PERCEPTUAL); |
| 139 | 395 |
| 140 qcms_profile_release(inputProfile); | 396 qcms_profile_release(inputProfile); |
| 141 } | 397 } |
| 142 | 398 |
| 143 void WEBPImageDecoder::readColorProfile(const uint8_t* data, size_t size) | 399 void WEBPImageDecoder::readColorProfile() |
| 144 { | 400 { |
| 145 WebPChunkIterator chunkIterator; | 401 WebPChunkIterator chunkIterator; |
| 146 WebPData inputData = { data, size }; | 402 if (!WebPDemuxGetChunk(m_demux, "ICCP", 1, &chunkIterator)) { |
| 147 WebPDemuxState state; | |
| 148 | |
| 149 WebPDemuxer* demuxer = WebPDemuxPartial(&inputData, &state); | |
| 150 if (!WebPDemuxGetChunk(demuxer, "ICCP", 1, &chunkIterator)) { | |
| 151 WebPDemuxReleaseChunkIterator(&chunkIterator); | 403 WebPDemuxReleaseChunkIterator(&chunkIterator); |
| 152 WebPDemuxDelete(demuxer); | |
| 153 return; | 404 return; |
| 154 } | 405 } |
| 155 | 406 |
| 156 const char* profileData = reinterpret_cast<const char*>(chunkIterator.chunk. bytes); | 407 const char* profileData = reinterpret_cast<const char*>(chunkIterator.chunk. bytes); |
| 157 size_t profileSize = chunkIterator.chunk.size; | 408 size_t profileSize = chunkIterator.chunk.size; |
| 158 | 409 |
| 159 // Only accept RGB color profiles from input class devices. | 410 // Only accept RGB color profiles from input class devices. |
| 160 bool ignoreProfile = false; | 411 bool ignoreProfile = false; |
| 161 if (profileSize < ImageDecoder::iccColorProfileHeaderLength) | 412 if (profileSize < ImageDecoder::iccColorProfileHeaderLength) |
| 162 ignoreProfile = true; | 413 ignoreProfile = true; |
| 163 else if (!ImageDecoder::rgbColorProfile(profileData, profileSize)) | 414 else if (!ImageDecoder::rgbColorProfile(profileData, profileSize)) |
| 164 ignoreProfile = true; | 415 ignoreProfile = true; |
| 165 else if (!ImageDecoder::inputDeviceColorProfile(profileData, profileSize)) | 416 else if (!ImageDecoder::inputDeviceColorProfile(profileData, profileSize)) |
| 166 ignoreProfile = true; | 417 ignoreProfile = true; |
| 167 | 418 |
| 168 if (!ignoreProfile) | 419 if (!ignoreProfile) |
| 169 createColorTransform(profileData, profileSize); | 420 createColorTransform(profileData, profileSize); |
| 170 | 421 |
| 171 WebPDemuxReleaseChunkIterator(&chunkIterator); | 422 WebPDemuxReleaseChunkIterator(&chunkIterator); |
| 172 WebPDemuxDelete(demuxer); | |
| 173 } | 423 } |
| 174 | 424 |
| 175 void WEBPImageDecoder::applyColorProfile(const uint8_t* data, size_t dataSize, I mageFrame& buffer) | 425 #endif // QCMS_WEBP_COLOR_CORRECTION |
| 426 | |
| 427 #ifdef WEBP_ICC_ANIMATION_SUPPORT | |
| 428 void WEBPImageDecoder::applyPostProcessing(size_t frameIndex) | |
| 176 { | 429 { |
| 430 ImageFrame& buffer = m_frameBufferCache[frameIndex]; | |
| 177 int width; | 431 int width; |
| 178 int decodedHeight; | 432 int decodedHeight; |
| 179 if (!WebPIDecGetRGB(m_decoder, &decodedHeight, &width, 0, 0)) | 433 if (!WebPIDecGetRGB(m_decoder, &decodedHeight, &width, 0, 0)) |
| 180 return; // See also https://bugs.webkit.org/show_bug.cgi?id=74062 | 434 return; // See also https://bugs.webkit.org/show_bug.cgi?id=74062 |
| 181 if (decodedHeight <= 0) | 435 if (decodedHeight <= 0) |
| 182 return; | 436 return; |
| 183 | 437 |
| 184 if (!m_haveReadProfile) { | 438 const IntRect& frameRect = buffer.originalFrameRect(); |
| 185 readColorProfile(data, dataSize); | 439 ASSERT_WITH_SECURITY_IMPLICATION(width == frameRect.width()); |
| 186 m_haveReadProfile = true; | 440 ASSERT_WITH_SECURITY_IMPLICATION(decodedHeight <= frameRect.height()); |
| 441 const int left = frameRect.x(); | |
| 442 const int top = frameRect.y(); | |
| 443 | |
| 444 #ifdef QCMS_WEBP_COLOR_CORRECTION | |
| 445 if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) { | |
| 446 if (!m_haveReadProfile) { | |
| 447 readColorProfile(); | |
| 448 m_haveReadProfile = true; | |
| 449 } | |
| 450 for (int y = m_decodedHeight; y < decodedHeight; ++y) { | |
| 451 const int canvasY = top + y; | |
| 452 uint8_t* row = reinterpret_cast<uint8_t*>(buffer.getAddr(left, canva sY)); | |
| 453 if (qcms_transform* transform = colorTransform()) | |
| 454 qcms_transform_data_type(transform, row, row, width, QCMS_OUTPUT _RGBX); | |
| 455 uint8_t* pixel = row; | |
| 456 for (int x = 0; x < width; ++x, pixel += 4) { | |
| 457 const int canvasX = left + x; | |
| 458 buffer.setRGBA(canvasX, canvasY, pixel[0], pixel[1], pixel[2], p ixel[3]); | |
| 459 } | |
| 460 } | |
| 187 } | 461 } |
| 462 #endif // QCMS_WEBP_COLOR_CORRECTION | |
| 188 | 463 |
| 189 ASSERT(width == size().width()); | 464 // During the decoding of current frame, we may have set some pixels to be t ransparent (i.e. alpha < 255). |
| 190 ASSERT(decodedHeight <= size().height()); | 465 // However, the value of each of these pixels should have been determined by blending it against the value |
| 191 | 466 // of that pixel in the previous frame. So, we correct these pixels based on disposal method of the previous |
| 192 for (int y = m_decodedHeight; y < decodedHeight; ++y) { | 467 // frame and the previous frame buffer. |
| 193 uint8_t* row = reinterpret_cast<uint8_t*>(buffer.getAddr(0, y)); | 468 if ((m_formatFlags & ANIMATION_FLAG) && frameIndex) { |
| 194 if (qcms_transform* transform = colorTransform()) | 469 ImageFrame& prevBuffer = m_frameBufferCache[frameIndex - 1]; |
| 195 qcms_transform_data_type(transform, row, row, width, QCMS_OUTPUT_RGB X); | 470 ImageFrame::FrameDisposalMethod prevMethod = prevBuffer.disposalMethod() ; |
| 196 uint8_t* pixel = row; | 471 if (prevMethod == ImageFrame::DisposeKeep) { // Restore transparent pixe ls to pixels in previous canvas. |
| 197 for (int x = 0; x < width; ++x, pixel += 4) | 472 ASSERT(prevBuffer.status() == ImageFrame::FrameComplete); |
| 198 buffer.setRGBA(x, y, pixel[0], pixel[1], pixel[2], pixel[3]); | 473 for (int y = m_decodedHeight; y < decodedHeight; ++y) { |
| 474 const int canvasY = top + y; | |
| 475 for (int x = 0; x < width; ++x) { | |
| 476 const int canvasX = left + x; | |
| 477 ImageFrame::PixelData& pixel = *buffer.getAddr(canvasX, canv asY); | |
| 478 // FIXME: Use alpha-blending when alpha is between 0 and 255 . | |
| 479 // Alpha-blending is being implemented in: https://bugs.webk it.org/show_bug.cgi?id=17022 | |
| 480 if (!((pixel >> SK_A32_SHIFT) & 0xff)) { | |
| 481 ImageFrame::PixelData prevPixel = *prevBuffer.getAddr(ca nvasX, canvasY); | |
| 482 pixel = prevPixel; | |
| 483 } | |
| 484 } | |
| 485 } | |
| 486 } else if (prevMethod == ImageFrame::DisposeOverwriteBgcolor && buffer.r equiredPreviousFrameIndex() != notFound) { | |
| 487 // Note: if the requiredPreviousFrameIndex is |notFound|, there's no thing to do. | |
| 488 ASSERT(prevBuffer.status() == ImageFrame::FrameComplete); | |
| 489 const IntRect& prevRect = prevBuffer.originalFrameRect(); | |
| 490 // We need to restore transparent pixels to as they were just after initFrame() call. That is: | |
| 491 // * Transparent if it belongs to prevRect <-- This is a no-op. | |
| 492 // * Pixel in the previous canvas otherwise <-- Need to restore. | |
| 493 for (int y = m_decodedHeight; y < decodedHeight; ++y) { | |
| 494 const int canvasY = top + y; | |
| 495 for (int x = 0; x < width; ++x) { | |
| 496 const int canvasX = left + x; | |
| 497 ImageFrame::PixelData& pixel = *buffer.getAddr(canvasX, canv asY); | |
| 498 // FIXME: Use alpha-blending when alpha is between 0 and 255 . | |
| 499 if (!((pixel >> SK_A32_SHIFT) & 0xff) && !prevRect.contains( IntPoint(canvasX, canvasY))) { | |
| 500 ImageFrame::PixelData prevPixel = *prevBuffer.getAddr(ca nvasX, canvasY); | |
| 501 pixel = prevPixel; | |
| 502 } | |
| 503 } | |
| 504 } | |
| 505 } | |
| 199 } | 506 } |
| 200 | 507 |
| 201 m_decodedHeight = decodedHeight; | 508 m_decodedHeight = decodedHeight; |
| 202 } | 509 } |
| 510 #endif // WEBP_ICC_ANIMATION_SUPPORT | |
| 203 | 511 |
| 204 #endif // QCMS_WEBP_COLOR_CORRECTION | 512 bool WEBPImageDecoder::decode(const uint8_t* dataBytes, size_t dataSize, bool on lySize, size_t frameIndex) |
| 205 | |
| 206 bool WEBPImageDecoder::decode(bool onlySize) | |
| 207 { | 513 { |
| 208 if (failed()) | 514 if (failed()) |
| 209 return false; | 515 return false; |
| 210 | 516 |
| 211 const uint8_t* dataBytes = reinterpret_cast<const uint8_t*>(m_data->data()); | |
| 212 const size_t dataSize = m_data->size(); | |
| 213 | |
| 214 if (!ImageDecoder::isSizeAvailable()) { | 517 if (!ImageDecoder::isSizeAvailable()) { |
| 215 static const size_t imageHeaderSize = 30; | 518 static const size_t imageHeaderSize = 30; |
| 216 if (dataSize < imageHeaderSize) | 519 if (dataSize < imageHeaderSize) |
| 217 return false; | 520 return false; |
| 218 int width, height; | 521 int width, height; |
| 219 #ifdef QCMS_WEBP_COLOR_CORRECTION | 522 #if (WEBP_DECODER_ABI_VERSION >= 0x0163) |
| 220 WebPData inputData = { dataBytes, dataSize }; | |
| 221 WebPDemuxState state; | |
| 222 WebPDemuxer* demuxer = WebPDemuxPartial(&inputData, &state); | |
| 223 if (!demuxer) | |
| 224 return setFailed(); | |
| 225 | |
| 226 width = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH); | |
| 227 height = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT); | |
| 228 m_formatFlags = WebPDemuxGetI(demuxer, WEBP_FF_FORMAT_FLAGS); | |
| 229 m_hasAlpha = !!(m_formatFlags & ALPHA_FLAG); | |
| 230 | |
| 231 WebPDemuxDelete(demuxer); | |
| 232 if (state <= WEBP_DEMUX_PARSING_HEADER) | |
| 233 return false; | |
| 234 #elif (WEBP_DECODER_ABI_VERSION >= 0x0163) | |
| 235 WebPBitstreamFeatures features; | 523 WebPBitstreamFeatures features; |
| 236 if (WebPGetFeatures(dataBytes, dataSize, &features) != VP8_STATUS_OK) | 524 if (WebPGetFeatures(dataBytes, dataSize, &features) != VP8_STATUS_OK) |
| 237 return setFailed(); | 525 return setFailed(); |
| 238 width = features.width; | 526 width = features.width; |
| 239 height = features.height; | 527 height = features.height; |
| 240 m_hasAlpha = features.has_alpha; | 528 m_formatFlags = features.has_alpha ? ALPHA_FLAG : 0; |
| 241 #else | 529 #else |
| 242 // Earlier version won't be able to display WebP files with alpha. | 530 // Earlier version won't be able to display WebP files with alpha. |
| 243 if (!WebPGetInfo(dataBytes, dataSize, &width, &height)) | 531 if (!WebPGetInfo(dataBytes, dataSize, &width, &height)) |
| 244 return setFailed(); | 532 return setFailed(); |
| 245 m_hasAlpha = false; | |
| 246 #endif | 533 #endif |
| 247 if (!setSize(width, height)) | 534 if (!setSize(width, height)) |
| 248 return setFailed(); | 535 return setFailed(); |
| 249 } | 536 } |
| 250 | 537 |
| 251 ASSERT(ImageDecoder::isSizeAvailable()); | 538 ASSERT(ImageDecoder::isSizeAvailable()); |
| 252 if (onlySize) | 539 if (onlySize) |
| 253 return true; | 540 return true; |
| 254 | 541 |
| 255 ASSERT(!m_frameBufferCache.isEmpty()); | 542 ASSERT(m_frameBufferCache.size() > frameIndex); |
| 256 ImageFrame& buffer = m_frameBufferCache[0]; | 543 ImageFrame& buffer = m_frameBufferCache[frameIndex]; |
| 257 ASSERT(buffer.status() != ImageFrame::FrameComplete); | 544 ASSERT(buffer.status() != ImageFrame::FrameComplete); |
| 258 | 545 |
| 259 if (buffer.status() == ImageFrame::FrameEmpty) { | 546 if (buffer.status() == ImageFrame::FrameEmpty) { |
| 260 if (!buffer.setSize(size().width(), size().height())) | 547 if (!buffer.setSize(size().width(), size().height())) |
| 261 return setFailed(); | 548 return setFailed(); |
| 262 buffer.setStatus(ImageFrame::FramePartial); | 549 buffer.setStatus(ImageFrame::FramePartial); |
| 263 buffer.setHasAlpha(m_hasAlpha); | 550 // The buffer is transparent outside the decoded area while the image is loading. |
| 551 // The correct value of 'hasAlpha' for the frame will be set when it is fully decoded. | |
| 552 buffer.setHasAlpha(true); | |
| 264 buffer.setOriginalFrameRect(IntRect(IntPoint(), size())); | 553 buffer.setOriginalFrameRect(IntRect(IntPoint(), size())); |
| 265 } | 554 } |
| 266 | 555 |
| 556 const IntRect& frameRect = buffer.originalFrameRect(); | |
| 267 if (!m_decoder) { | 557 if (!m_decoder) { |
| 268 WEBP_CSP_MODE mode = outputMode(m_hasAlpha); | 558 WEBP_CSP_MODE mode = outputMode(m_formatFlags & ALPHA_FLAG); |
| 269 if (!m_premultiplyAlpha) | 559 if (!m_premultiplyAlpha) |
| 270 mode = outputMode(false); | 560 mode = outputMode(false); |
| 561 #ifdef QCMS_WEBP_COLOR_CORRECTION | |
| 271 if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) | 562 if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) |
| 272 mode = MODE_RGBA; // Decode to RGBA for input to libqcms. | 563 mode = MODE_RGBA; // Decode to RGBA for input to libqcms. |
| 564 #endif | |
| 565 WebPInitDecBuffer(&m_decoderBuffer); | |
| 273 m_decoderBuffer.colorspace = mode; | 566 m_decoderBuffer.colorspace = mode; |
| 274 m_decoderBuffer.u.RGBA.stride = size().width() * sizeof(ImageFrame::Pixe lData); | 567 m_decoderBuffer.u.RGBA.stride = size().width() * sizeof(ImageFrame::Pixe lData); |
| 275 m_decoderBuffer.u.RGBA.size = m_decoderBuffer.u.RGBA.stride * size().hei ght(); | 568 m_decoderBuffer.u.RGBA.size = m_decoderBuffer.u.RGBA.stride * frameRect. height(); |
| 276 m_decoderBuffer.is_external_memory = 1; | 569 m_decoderBuffer.is_external_memory = 1; |
| 277 m_decoder = WebPINewDecoder(&m_decoderBuffer); | 570 m_decoder = WebPINewDecoder(&m_decoderBuffer); |
| 278 if (!m_decoder) | 571 if (!m_decoder) |
| 279 return setFailed(); | 572 return setFailed(); |
| 280 } | 573 } |
| 281 | 574 |
| 282 m_decoderBuffer.u.RGBA.rgba = reinterpret_cast<uint8_t*>(buffer.getAddr(0, 0 )); | 575 m_decoderBuffer.u.RGBA.rgba = reinterpret_cast<uint8_t*>(buffer.getAddr(fram eRect.x(), frameRect.y())); |
| 283 | 576 |
| 284 switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) { | 577 switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) { |
| 285 case VP8_STATUS_OK: | 578 case VP8_STATUS_OK: |
| 286 if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) | 579 applyPostProcessing(frameIndex); |
| 287 applyColorProfile(dataBytes, dataSize, buffer); | 580 buffer.setHasAlpha(m_formatFlags & ALPHA_FLAG); |
| 288 buffer.setStatus(ImageFrame::FrameComplete); | 581 buffer.setStatus(ImageFrame::FrameComplete); |
| 289 clear(); | 582 clearDecoder(); |
| 290 return true; | 583 return true; |
| 291 case VP8_STATUS_SUSPENDED: | 584 case VP8_STATUS_SUSPENDED: |
| 292 if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) | 585 applyPostProcessing(frameIndex); |
| 293 applyColorProfile(dataBytes, dataSize, buffer); | |
| 294 return false; | 586 return false; |
| 295 default: | 587 default: |
| 296 clear(); | 588 clear(); |
| 297 return setFailed(); | 589 return setFailed(); |
| 298 } | 590 } |
| 299 } | 591 } |
| 300 | 592 |
| 301 } // namespace WebCore | 593 } // namespace WebCore |
| OLD | NEW |