Chromium Code Reviews| Index: Source/platform/image-decoders/jpeg/JPEGImageDecoder.cpp |
| diff --git a/Source/platform/image-decoders/jpeg/JPEGImageDecoder.cpp b/Source/platform/image-decoders/jpeg/JPEGImageDecoder.cpp |
| index 90db0a868ff04a89f4320bbc189088933ca789e9..448fc4364a59fcd4b1e04fbbb4dd665a16e3e926 100644 |
| --- a/Source/platform/image-decoders/jpeg/JPEGImageDecoder.cpp |
| +++ b/Source/platform/image-decoders/jpeg/JPEGImageDecoder.cpp |
| @@ -242,6 +242,14 @@ static void readColorProfile(jpeg_decompress_struct* info, ColorProfile& colorPr |
| } |
| #endif |
| +static void computeUVSize(const jpeg_decompress_struct* info, int* width, int* height) |
| +{ |
|
Noel Gordon
2014/07/25 15:55:01
How about this: make this routine return an IntSiz
sugoi1
2014/07/25 16:56:48
Done.
|
| + int h = info->cur_comp_info[0]->h_samp_factor; |
|
Noel Gordon
2014/07/25 15:55:01
h -> u?
sugoi1
2014/07/25 16:56:48
In this case:
h -> horizontal
v -> vertical
|
| + int v = info->cur_comp_info[0]->v_samp_factor; |
| + *width = (info->output_width + h - 1) / h; |
| + *height = (info->output_height + v - 1) / v; |
| +} |
| + |
| class JPEGImageReader { |
| WTF_MAKE_FAST_ALLOCATED; |
| public: |
| @@ -347,10 +355,35 @@ public: |
| return false; // I/O suspension. |
| switch (m_info.jpeg_color_space) { |
| + case JCS_YCbCr: |
| + // libjpeg can convert YCbCr image pixels to RGB. |
| + m_info.out_color_space = rgbOutputColorSpace(); |
| + if (m_decoder->acceleratedYUVDecoding() |
|
Alpha Left Google
2014/07/24 23:30:56
What about by default output RGB and only change o
sugoi1
2014/07/25 02:55:13
Isn't that what's already happening here? RGB is s
Noel Gordon
2014/07/25 15:55:01
The code following here could be moved to it's own
sugoi1
2014/07/25 16:56:48
Already done in current patch
|
| + && (DCTSIZE == 8) |
| + && (m_info.num_components == 3) |
| + && (m_info.scale_denom <= 8) |
| + && (m_info.cur_comp_info[1]->h_samp_factor == 1) |
| + && (m_info.cur_comp_info[1]->v_samp_factor == 1) |
| + && (m_info.cur_comp_info[2]->h_samp_factor == 1) |
| + && (m_info.cur_comp_info[2]->v_samp_factor == 1)) { |
| + int h = m_info.cur_comp_info[0]->h_samp_factor; |
| + int v = m_info.cur_comp_info[0]->v_samp_factor; |
| + // Only set YUV mode if the format is supported |
|
Alpha Left Google
2014/07/24 22:07:27
nit: period at the end of sentence.
sugoi1
2014/07/24 23:16:37
Acknowledged.
|
| + // 4:4:4 : (h == 1) && (v == 1) |
| + // 4:4:0 : (h == 1) && (v == 2) |
| + // 4:2:2 : (h == 2) && (v == 1) |
| + // 4:2:0 : (h == 2) && (v == 2) |
| + // 4:1:1 : (h == 4) && (v == 1) |
| + // 4:1:0 : (h == 4) && (v == 2) |
| + if (((h == 1) || (h == 2) || (h == 4)) && ((v == 1) || (v == 2))) { |
| + m_info.out_color_space = JCS_YCbCr; |
| + m_info.raw_data_out = TRUE; |
| + } |
| + } |
| + break; |
| case JCS_GRAYSCALE: |
| case JCS_RGB: |
| - case JCS_YCbCr: |
| - // libjpeg can convert GRAYSCALE and YCbCr image pixels to RGB. |
| + // libjpeg can convert GRAYSCALE image pixels to RGB. |
| m_info.out_color_space = rgbOutputColorSpace(); |
| #if defined(TURBO_JPEG_RGB_SWIZZLE) |
| if (m_info.saw_JFIF_marker) |
| @@ -427,7 +460,7 @@ public: |
| // to jpeg_start_compress(). |
| // FIXME: note that some output color spaces do not need the samples |
| // buffer. Remove this allocation for those color spaces. |
| - m_samples = (*m_info.mem->alloc_sarray)(reinterpret_cast<j_common_ptr>(&m_info), JPOOL_IMAGE, m_info.output_width * 4, 1); |
| + m_samples = (*m_info.mem->alloc_sarray)(reinterpret_cast<j_common_ptr>(&m_info), JPOOL_IMAGE, m_info.output_width * 4, m_info.out_color_space == JCS_YCbCr ? 2 : 1); |
| // Start decompressor. |
| if (!jpeg_start_decompress(&m_info)) |
| @@ -597,6 +630,7 @@ JPEGImageDecoder::JPEGImageDecoder(ImageSource::AlphaOption alphaOption, |
| size_t maxDecodedBytes) |
| : ImageDecoder(alphaOption, gammaAndColorProfileOption, maxDecodedBytes) |
| , m_hasColorProfile(false) |
| + , m_acceleratedYUVDecoding(false) |
| { |
| } |
| @@ -629,6 +663,20 @@ void JPEGImageDecoder::setDecodedSize(unsigned width, unsigned height) |
| m_decodedSize = IntSize(width, height); |
| } |
| +IntSize JPEGImageDecoder::decodedSize(int component) const |
| +{ |
|
Noel Gordon
2014/07/25 15:55:01
Did you consider using a separate function that ex
sugoi1
2014/07/25 16:56:48
Already done in current patch.
|
| + if (((component == 1) || (component == 2)) && m_reader.get()) { // Asking for U or V |
| + const jpeg_decompress_struct* info = m_reader->info(); |
| + if (info && (info->out_color_space == JCS_YCbCr)) { |
| + int w, h; |
| + computeUVSize(info, &w, &h); |
| + return IntSize(w, h); |
| + } |
| + } |
| + |
| + return m_decodedSize; |
| +} |
| + |
| unsigned JPEGImageDecoder::desiredScaleNumerator() const |
| { |
| size_t originalBytes = size().width() * size().height() * 4; |
| @@ -644,6 +692,15 @@ unsigned JPEGImageDecoder::desiredScaleNumerator() const |
| return scaleNumerator; |
| } |
| +bool JPEGImageDecoder::doAcceleratedYUVDecoding() |
| +{ |
|
Noel Gordon
2014/07/25 15:55:01
Would yuvDecode() or decodeToYUV() better describe
sugoi1
2014/07/25 16:56:48
Accelerated has already been removed from the name
|
| + setAcceleratedYUVDecoding(true); |
| + PlatformInstrumentation::willDecodeImage("JPEG"); |
| + decode(false); |
| + PlatformInstrumentation::didDecodeImage(); |
| + return !failed(); |
| +} |
| + |
| ImageFrame* JPEGImageDecoder::frameBufferAtIndex(size_t index) |
| { |
| if (index) |
| @@ -671,28 +728,37 @@ bool JPEGImageDecoder::setFailed() |
| return ImageDecoder::setFailed(); |
| } |
| +void JPEGImageDecoder::setDecodingBuffers(OwnPtr<DecodingBuffers>& decodingBuffers) |
| +{ |
| + m_decodingBuffers = decodingBuffers.release(); |
| +} |
| + |
| template <J_COLOR_SPACE colorSpace> void setPixel(ImageFrame& buffer, ImageFrame::PixelData* pixel, JSAMPARRAY samples, int column) |
| { |
| - JSAMPLE* jsample = *samples + column * (colorSpace == JCS_RGB ? 3 : 4); |
| + ASSERT_NOT_REACHED(); |
| +} |
| - switch (colorSpace) { |
| - case JCS_RGB: |
| - buffer.setRGBARaw(pixel, jsample[0], jsample[1], jsample[2], 255); |
| - break; |
| - case JCS_CMYK: |
| - // Source is 'Inverted CMYK', output is RGB. |
| - // See: http://www.easyrgb.com/math.php?MATH=M12#text12 |
| - // Or: http://www.ilkeratalay.com/colorspacesfaq.php#rgb |
| - // From CMYK to CMY: |
| - // X = X * (1 - K ) + K [for X = C, M, or Y] |
| - // Thus, from Inverted CMYK to CMY is: |
| - // X = (1-iX) * (1 - (1-iK)) + (1-iK) => 1 - iX*iK |
| - // From CMY (0..1) to RGB (0..1): |
| - // R = 1 - C => 1 - (1 - iC*iK) => iC*iK [G and B similar] |
| - unsigned k = jsample[3]; |
| - buffer.setRGBARaw(pixel, jsample[0] * k / 255, jsample[1] * k / 255, jsample[2] * k / 255, 255); |
| - break; |
| - } |
| +template <> void setPixel<JCS_RGB>(ImageFrame& buffer, ImageFrame::PixelData* pixel, JSAMPARRAY samples, int column) |
|
Noel Gordon
2014/07/25 15:55:01
The setPixel() code from lines 736-762 changed, I'
sugoi1
2014/07/25 16:56:48
This is cleanup. Using template arguments in condi
|
| +{ |
| + JSAMPLE* jsample = *samples + column * 3; |
| + buffer.setRGBARaw(pixel, jsample[0], jsample[1], jsample[2], 255); |
| +} |
| + |
| +template <> void setPixel<JCS_CMYK>(ImageFrame& buffer, ImageFrame::PixelData* pixel, JSAMPARRAY samples, int column) |
| +{ |
| + JSAMPLE* jsample = *samples + column * 4; |
| + |
| + // Source is 'Inverted CMYK', output is RGB. |
| + // See: http://www.easyrgb.com/math.php?MATH=M12#text12 |
| + // Or: http://www.ilkeratalay.com/colorspacesfaq.php#rgb |
| + // From CMYK to CMY: |
| + // X = X * (1 - K ) + K [for X = C, M, or Y] |
| + // Thus, from Inverted CMYK to CMY is: |
| + // X = (1-iX) * (1 - (1-iK)) + (1-iK) => 1 - iX*iK |
| + // From CMY (0..1) to RGB (0..1): |
| + // R = 1 - C => 1 - (1 - iC*iK) => iC*iK [G and B similar] |
| + unsigned k = jsample[3]; |
| + buffer.setRGBARaw(pixel, jsample[0] * k / 255, jsample[1] * k / 255, jsample[2] * k / 255, 255); |
| } |
| template <J_COLOR_SPACE colorSpace> bool outputRows(JPEGImageReader* reader, ImageFrame& buffer) |
| @@ -721,6 +787,83 @@ template <J_COLOR_SPACE colorSpace> bool outputRows(JPEGImageReader* reader, Ima |
| return true; |
| } |
| +static bool outputRawData(JPEGImageReader* reader, DecodingBuffers* decodingBuffers) |
| +{ |
| + JSAMPARRAY samples = reader->samples(); |
| + jpeg_decompress_struct* info = reader->info(); |
| + JSAMPARRAY bufferraw[3]; |
| + JSAMPROW bufferraw2[32]; |
| + bufferraw[0] = &bufferraw2[0]; |
|
Alpha Left Google
2014/07/24 22:07:27
Please document why.
|
| + bufferraw[1] = &bufferraw2[16]; |
| + bufferraw[2] = &bufferraw2[24]; |
| + int yWidth = info->output_width; |
| + int yHeight = info->output_height; |
| + int yMaxH = yHeight - 1; |
| + int v = info->cur_comp_info[0]->v_samp_factor; |
| + int uvWidth(0), uvHeight(0); |
| + computeUVSize(info, &uvWidth, &uvHeight); |
| + int uvMaxH = uvHeight - 1; |
| + JSAMPROW outputY = static_cast<JSAMPROW>(decodingBuffers->getPlane(0)); |
| + JSAMPROW outputU = static_cast<JSAMPROW>(decodingBuffers->getPlane(1)); |
| + JSAMPROW outputV = static_cast<JSAMPROW>(decodingBuffers->getPlane(2)); |
| + size_t rowBytesY = decodingBuffers->getRowBytes(0); |
| + size_t rowBytesU = decodingBuffers->getRowBytes(1); |
| + size_t rowBytesV = decodingBuffers->getRowBytes(2); |
| + |
| + int scanlinesToRead = DCTSIZE * v; |
|
Alpha Left Google
2014/07/24 22:07:27
nit: This should be yScanlinesToRead.
sugoi1
2014/07/24 23:16:36
Acknowledged.
|
| + JSAMPROW yLastRow = *samples; |
| + JSAMPROW uLastRow = yLastRow + 2 * yWidth; |
| + JSAMPROW vLastRow = uLastRow + 2 * yWidth; |
| + JSAMPROW dummyRow = vLastRow + 2 * yWidth; |
| + |
| + while (info->output_scanline < info->output_height) { |
| + // Request 8 or 16 scanlines: returns 0 or more scanlines. |
| + bool hasYLastRow(false), hasUVLastRow(false); |
| + for (int i = 0; i < scanlinesToRead; ++i) { |
|
Alpha Left Google
2014/07/24 22:07:27
nit: Add a comment that this is reading the Y scan
sugoi1
2014/07/24 23:16:37
Acknowledged.
|
| + int scanline = (info->output_scanline + i); |
| + if (scanline < yMaxH) { |
| + bufferraw2[i] = &outputY[scanline * rowBytesY]; |
| + } else if (scanline == yMaxH) { |
| + bufferraw2[i] = yLastRow; |
| + hasYLastRow = true; |
| + } else { |
| + bufferraw2[i] = dummyRow; |
| + } |
| + } |
| + int scaledScanline = info->output_scanline / v; |
| + for (int i = 0; i < 8; ++i) { |
|
Alpha Left Google
2014/07/24 22:07:27
Add a comment saying this is reading the UV scanli
sugoi1
2014/07/24 23:16:36
Acknowledged.
|
| + int scanline = (scaledScanline + i); |
| + if (scanline < uvMaxH) { |
| + bufferraw2[16 + i] = &outputU[scanline * rowBytesU]; |
| + bufferraw2[24 + i] = &outputV[scanline * rowBytesV]; |
| + } else if (scanline == uvMaxH) { |
| + bufferraw2[16 + i] = uLastRow; |
| + bufferraw2[24 + i] = vLastRow; |
| + hasUVLastRow = true; |
| + } else { |
| + bufferraw2[16 + i] = dummyRow; |
| + bufferraw2[24 + i] = dummyRow; |
| + } |
| + } |
| + JDIMENSION scanlinesRead = jpeg_read_raw_data(info, bufferraw, scanlinesToRead); |
| + |
| + if (scanlinesRead == 0) |
| + return false; |
| + |
| + if (hasYLastRow) { |
| + memcpy(&outputY[yMaxH * rowBytesY], yLastRow, yWidth); |
|
Noel Gordon
2014/07/25 15:55:01
memcpy: it's perhaps the best way to kill decoding
sugoi1
2014/07/25 16:56:48
This is only the last row of a jpeg image, if the
|
| + } |
| + if (hasUVLastRow) { |
| + memcpy(&outputU[uvMaxH * rowBytesU], uLastRow, uvWidth); |
| + memcpy(&outputV[uvMaxH * rowBytesV], vLastRow, uvWidth); |
| + } |
| + } |
| + |
| + info->output_scanline = std::min(info->output_scanline, info->output_height); |
| + |
| + return true; |
| +} |
| + |
| bool JPEGImageDecoder::outputScanlines() |
| { |
| if (m_frameBufferCache.isEmpty()) |
| @@ -728,6 +871,10 @@ bool JPEGImageDecoder::outputScanlines() |
| jpeg_decompress_struct* info = m_reader->info(); |
| + if ((JCS_YCbCr == info->out_color_space) && (m_decodingBuffers.get())) { |
|
Alpha Left Google
2014/07/24 22:07:27
If you have set YUV decoding but didn't assign the
sugoi1
2014/07/24 23:16:37
If it fails for any reason, the caller should fall
|
| + return outputRawData(m_reader.get(), m_decodingBuffers.get()); |
| + } |
| + |
| // Initialize the framebuffer if needed. |
| ImageFrame& buffer = m_frameBufferCache[0]; |
| if (buffer.status() == ImageFrame::FrameEmpty) { |