| OLD | NEW |
| (Empty) |
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "content/common/gpu/media/vaapi_jpeg_decode_accelerator.h" | |
| 6 | |
| 7 #include <stddef.h> | |
| 8 #include <string.h> | |
| 9 #include <utility> | |
| 10 | |
| 11 #include "base/bind.h" | |
| 12 #include "base/logging.h" | |
| 13 #include "base/metrics/histogram.h" | |
| 14 #include "base/thread_task_runner_handle.h" | |
| 15 #include "base/trace_event/trace_event.h" | |
| 16 #include "content/common/gpu/media/shared_memory_region.h" | |
| 17 #include "content/common/gpu/media/vaapi_picture.h" | |
| 18 #include "gpu/ipc/service/gpu_channel.h" | |
| 19 #include "media/base/video_frame.h" | |
| 20 #include "media/filters/jpeg_parser.h" | |
| 21 #include "third_party/libyuv/include/libyuv.h" | |
| 22 | |
| 23 namespace content { | |
| 24 | |
| 25 namespace { | |
| 26 // UMA errors that the VaapiJpegDecodeAccelerator class reports. | |
| 27 enum VAJDADecoderFailure { | |
| 28 VAAPI_ERROR = 0, | |
| 29 // UMA requires that max must be greater than 1. | |
| 30 VAJDA_DECODER_FAILURES_MAX = 2, | |
| 31 }; | |
| 32 | |
| 33 static void ReportToUMA(VAJDADecoderFailure failure) { | |
| 34 UMA_HISTOGRAM_ENUMERATION("Media.VAJDA.DecoderFailure", failure, | |
| 35 VAJDA_DECODER_FAILURES_MAX); | |
| 36 } | |
| 37 | |
| 38 static unsigned int VaSurfaceFormatForJpeg( | |
| 39 const media::JpegFrameHeader& frame_header) { | |
| 40 // The range of sampling factor is [1, 4]. Pack them into integer to make the | |
| 41 // matching code simpler. For example, 0x211 means the sampling factor are 2, | |
| 42 // 1, 1 for 3 components. | |
| 43 unsigned int h = 0, v = 0; | |
| 44 for (int i = 0; i < frame_header.num_components; i++) { | |
| 45 DCHECK_LE(frame_header.components[i].horizontal_sampling_factor, 4); | |
| 46 DCHECK_LE(frame_header.components[i].vertical_sampling_factor, 4); | |
| 47 h = h << 4 | frame_header.components[i].horizontal_sampling_factor; | |
| 48 v = v << 4 | frame_header.components[i].vertical_sampling_factor; | |
| 49 } | |
| 50 | |
| 51 switch (frame_header.num_components) { | |
| 52 case 1: // Grey image | |
| 53 return VA_RT_FORMAT_YUV400; | |
| 54 | |
| 55 case 3: // Y Cb Cr color image | |
| 56 // See https://en.wikipedia.org/wiki/Chroma_subsampling for the | |
| 57 // definition of these numbers. | |
| 58 if (h == 0x211 && v == 0x211) | |
| 59 return VA_RT_FORMAT_YUV420; | |
| 60 | |
| 61 if (h == 0x211 && v == 0x111) | |
| 62 return VA_RT_FORMAT_YUV422; | |
| 63 | |
| 64 if (h == 0x111 && v == 0x111) | |
| 65 return VA_RT_FORMAT_YUV444; | |
| 66 | |
| 67 if (h == 0x411 && v == 0x111) | |
| 68 return VA_RT_FORMAT_YUV411; | |
| 69 } | |
| 70 DVLOG(1) << "Unsupported sampling factor: num_components=" | |
| 71 << frame_header.num_components << ", h=" << std::hex << h | |
| 72 << ", v=" << v; | |
| 73 | |
| 74 return 0; | |
| 75 } | |
| 76 | |
| 77 } // namespace | |
| 78 | |
| 79 VaapiJpegDecodeAccelerator::DecodeRequest::DecodeRequest( | |
| 80 int32_t bitstream_buffer_id, | |
| 81 std::unique_ptr<SharedMemoryRegion> shm, | |
| 82 const scoped_refptr<media::VideoFrame>& video_frame) | |
| 83 : bitstream_buffer_id(bitstream_buffer_id), | |
| 84 shm(std::move(shm)), | |
| 85 video_frame(video_frame) {} | |
| 86 | |
| 87 VaapiJpegDecodeAccelerator::DecodeRequest::~DecodeRequest() { | |
| 88 } | |
| 89 | |
| 90 void VaapiJpegDecodeAccelerator::NotifyError(int32_t bitstream_buffer_id, | |
| 91 Error error) { | |
| 92 DCHECK(task_runner_->BelongsToCurrentThread()); | |
| 93 DLOG(ERROR) << "Notifying of error " << error; | |
| 94 DCHECK(client_); | |
| 95 client_->NotifyError(bitstream_buffer_id, error); | |
| 96 } | |
| 97 | |
| 98 void VaapiJpegDecodeAccelerator::NotifyErrorFromDecoderThread( | |
| 99 int32_t bitstream_buffer_id, | |
| 100 Error error) { | |
| 101 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
| 102 task_runner_->PostTask(FROM_HERE, | |
| 103 base::Bind(&VaapiJpegDecodeAccelerator::NotifyError, | |
| 104 weak_this_, bitstream_buffer_id, error)); | |
| 105 } | |
| 106 | |
| 107 void VaapiJpegDecodeAccelerator::VideoFrameReady(int32_t bitstream_buffer_id) { | |
| 108 DCHECK(task_runner_->BelongsToCurrentThread()); | |
| 109 client_->VideoFrameReady(bitstream_buffer_id); | |
| 110 } | |
| 111 | |
| 112 VaapiJpegDecodeAccelerator::VaapiJpegDecodeAccelerator( | |
| 113 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner) | |
| 114 : task_runner_(base::ThreadTaskRunnerHandle::Get()), | |
| 115 io_task_runner_(io_task_runner), | |
| 116 decoder_thread_("VaapiJpegDecoderThread"), | |
| 117 va_surface_id_(VA_INVALID_SURFACE), | |
| 118 weak_this_factory_(this) { | |
| 119 weak_this_ = weak_this_factory_.GetWeakPtr(); | |
| 120 } | |
| 121 | |
| 122 VaapiJpegDecodeAccelerator::~VaapiJpegDecodeAccelerator() { | |
| 123 DCHECK(task_runner_->BelongsToCurrentThread()); | |
| 124 DVLOG(1) << "Destroying VaapiJpegDecodeAccelerator"; | |
| 125 | |
| 126 weak_this_factory_.InvalidateWeakPtrs(); | |
| 127 decoder_thread_.Stop(); | |
| 128 } | |
| 129 | |
| 130 bool VaapiJpegDecodeAccelerator::Initialize(Client* client) { | |
| 131 DCHECK(task_runner_->BelongsToCurrentThread()); | |
| 132 | |
| 133 client_ = client; | |
| 134 | |
| 135 vaapi_wrapper_ = | |
| 136 VaapiWrapper::Create(VaapiWrapper::kDecode, VAProfileJPEGBaseline, | |
| 137 base::Bind(&ReportToUMA, VAAPI_ERROR)); | |
| 138 | |
| 139 if (!vaapi_wrapper_.get()) { | |
| 140 DLOG(ERROR) << "Failed initializing VAAPI"; | |
| 141 return false; | |
| 142 } | |
| 143 | |
| 144 if (!decoder_thread_.Start()) { | |
| 145 DLOG(ERROR) << "Failed to start decoding thread."; | |
| 146 return false; | |
| 147 } | |
| 148 decoder_task_runner_ = decoder_thread_.task_runner(); | |
| 149 | |
| 150 return true; | |
| 151 } | |
| 152 | |
| 153 bool VaapiJpegDecodeAccelerator::OutputPicture( | |
| 154 VASurfaceID va_surface_id, | |
| 155 int32_t input_buffer_id, | |
| 156 const scoped_refptr<media::VideoFrame>& video_frame) { | |
| 157 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
| 158 | |
| 159 TRACE_EVENT1("jpeg", "VaapiJpegDecodeAccelerator::OutputPicture", | |
| 160 "input_buffer_id", input_buffer_id); | |
| 161 | |
| 162 DVLOG(3) << "Outputting VASurface " << va_surface_id | |
| 163 << " into video_frame associated with input buffer id " | |
| 164 << input_buffer_id; | |
| 165 | |
| 166 VAImage image; | |
| 167 VAImageFormat format; | |
| 168 const uint32_t kI420Fourcc = VA_FOURCC('I', '4', '2', '0'); | |
| 169 memset(&image, 0, sizeof(image)); | |
| 170 memset(&format, 0, sizeof(format)); | |
| 171 format.fourcc = kI420Fourcc; | |
| 172 format.byte_order = VA_LSB_FIRST; | |
| 173 format.bits_per_pixel = 12; // 12 for I420 | |
| 174 | |
| 175 uint8_t* mem = nullptr; | |
| 176 gfx::Size coded_size = video_frame->coded_size(); | |
| 177 if (!vaapi_wrapper_->GetVaImage(va_surface_id, &format, coded_size, &image, | |
| 178 reinterpret_cast<void**>(&mem))) { | |
| 179 DLOG(ERROR) << "Cannot get VAImage"; | |
| 180 return false; | |
| 181 } | |
| 182 | |
| 183 // Copy image content from VAImage to VideoFrame. | |
| 184 // The component order of VAImage I420 are Y, U, and V. | |
| 185 DCHECK_EQ(image.num_planes, 3u); | |
| 186 DCHECK_GE(image.width, coded_size.width()); | |
| 187 DCHECK_GE(image.height, coded_size.height()); | |
| 188 const uint8_t* src_y = mem + image.offsets[0]; | |
| 189 const uint8_t* src_u = mem + image.offsets[1]; | |
| 190 const uint8_t* src_v = mem + image.offsets[2]; | |
| 191 size_t src_y_stride = image.pitches[0]; | |
| 192 size_t src_u_stride = image.pitches[1]; | |
| 193 size_t src_v_stride = image.pitches[2]; | |
| 194 uint8_t* dst_y = video_frame->data(media::VideoFrame::kYPlane); | |
| 195 uint8_t* dst_u = video_frame->data(media::VideoFrame::kUPlane); | |
| 196 uint8_t* dst_v = video_frame->data(media::VideoFrame::kVPlane); | |
| 197 size_t dst_y_stride = video_frame->stride(media::VideoFrame::kYPlane); | |
| 198 size_t dst_u_stride = video_frame->stride(media::VideoFrame::kUPlane); | |
| 199 size_t dst_v_stride = video_frame->stride(media::VideoFrame::kVPlane); | |
| 200 | |
| 201 if (libyuv::I420Copy(src_y, src_y_stride, // Y | |
| 202 src_u, src_u_stride, // U | |
| 203 src_v, src_v_stride, // V | |
| 204 dst_y, dst_y_stride, // Y | |
| 205 dst_u, dst_u_stride, // U | |
| 206 dst_v, dst_v_stride, // V | |
| 207 coded_size.width(), coded_size.height())) { | |
| 208 DLOG(ERROR) << "I420Copy failed"; | |
| 209 return false; | |
| 210 } | |
| 211 | |
| 212 vaapi_wrapper_->ReturnVaImage(&image); | |
| 213 | |
| 214 task_runner_->PostTask( | |
| 215 FROM_HERE, base::Bind(&VaapiJpegDecodeAccelerator::VideoFrameReady, | |
| 216 weak_this_, input_buffer_id)); | |
| 217 | |
| 218 return true; | |
| 219 } | |
| 220 | |
| 221 void VaapiJpegDecodeAccelerator::DecodeTask( | |
| 222 const std::unique_ptr<DecodeRequest>& request) { | |
| 223 DVLOG(3) << __func__; | |
| 224 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | |
| 225 TRACE_EVENT0("jpeg", "DecodeTask"); | |
| 226 | |
| 227 media::JpegParseResult parse_result; | |
| 228 if (!media::ParseJpegPicture( | |
| 229 reinterpret_cast<const uint8_t*>(request->shm->memory()), | |
| 230 request->shm->size(), &parse_result)) { | |
| 231 DLOG(ERROR) << "ParseJpegPicture failed"; | |
| 232 NotifyErrorFromDecoderThread(request->bitstream_buffer_id, | |
| 233 PARSE_JPEG_FAILED); | |
| 234 return; | |
| 235 } | |
| 236 | |
| 237 unsigned int new_va_rt_format = | |
| 238 VaSurfaceFormatForJpeg(parse_result.frame_header); | |
| 239 if (!new_va_rt_format) { | |
| 240 DLOG(ERROR) << "Unsupported subsampling"; | |
| 241 NotifyErrorFromDecoderThread(request->bitstream_buffer_id, | |
| 242 UNSUPPORTED_JPEG); | |
| 243 return; | |
| 244 } | |
| 245 | |
| 246 // Reuse VASurface if size doesn't change. | |
| 247 gfx::Size new_coded_size(parse_result.frame_header.coded_width, | |
| 248 parse_result.frame_header.coded_height); | |
| 249 if (new_coded_size != coded_size_ || va_surface_id_ == VA_INVALID_SURFACE || | |
| 250 new_va_rt_format != va_rt_format_) { | |
| 251 vaapi_wrapper_->DestroySurfaces(); | |
| 252 va_surface_id_ = VA_INVALID_SURFACE; | |
| 253 va_rt_format_ = new_va_rt_format; | |
| 254 | |
| 255 std::vector<VASurfaceID> va_surfaces; | |
| 256 if (!vaapi_wrapper_->CreateSurfaces(va_rt_format_, new_coded_size, 1, | |
| 257 &va_surfaces)) { | |
| 258 LOG(ERROR) << "Create VA surface failed"; | |
| 259 NotifyErrorFromDecoderThread(request->bitstream_buffer_id, | |
| 260 PLATFORM_FAILURE); | |
| 261 return; | |
| 262 } | |
| 263 va_surface_id_ = va_surfaces[0]; | |
| 264 coded_size_ = new_coded_size; | |
| 265 } | |
| 266 | |
| 267 if (!VaapiJpegDecoder::Decode(vaapi_wrapper_.get(), parse_result, | |
| 268 va_surface_id_)) { | |
| 269 LOG(ERROR) << "Decode JPEG failed"; | |
| 270 NotifyErrorFromDecoderThread(request->bitstream_buffer_id, | |
| 271 PLATFORM_FAILURE); | |
| 272 return; | |
| 273 } | |
| 274 | |
| 275 if (!OutputPicture(va_surface_id_, request->bitstream_buffer_id, | |
| 276 request->video_frame)) { | |
| 277 LOG(ERROR) << "Output picture failed"; | |
| 278 NotifyErrorFromDecoderThread(request->bitstream_buffer_id, | |
| 279 PLATFORM_FAILURE); | |
| 280 return; | |
| 281 } | |
| 282 } | |
| 283 | |
| 284 void VaapiJpegDecodeAccelerator::Decode( | |
| 285 const media::BitstreamBuffer& bitstream_buffer, | |
| 286 const scoped_refptr<media::VideoFrame>& video_frame) { | |
| 287 DVLOG(3) << __func__; | |
| 288 DCHECK(io_task_runner_->BelongsToCurrentThread()); | |
| 289 TRACE_EVENT1("jpeg", "Decode", "input_id", bitstream_buffer.id()); | |
| 290 | |
| 291 DVLOG(4) << "Mapping new input buffer id: " << bitstream_buffer.id() | |
| 292 << " size: " << bitstream_buffer.size(); | |
| 293 | |
| 294 // SharedMemoryRegion will take over the |bitstream_buffer.handle()|. | |
| 295 std::unique_ptr<SharedMemoryRegion> shm( | |
| 296 new SharedMemoryRegion(bitstream_buffer, true)); | |
| 297 | |
| 298 if (bitstream_buffer.id() < 0) { | |
| 299 LOG(ERROR) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id(); | |
| 300 NotifyErrorFromDecoderThread(bitstream_buffer.id(), INVALID_ARGUMENT); | |
| 301 return; | |
| 302 } | |
| 303 | |
| 304 if (!shm->Map()) { | |
| 305 LOG(ERROR) << "Failed to map input buffer"; | |
| 306 NotifyErrorFromDecoderThread(bitstream_buffer.id(), UNREADABLE_INPUT); | |
| 307 return; | |
| 308 } | |
| 309 | |
| 310 std::unique_ptr<DecodeRequest> request( | |
| 311 new DecodeRequest(bitstream_buffer.id(), std::move(shm), video_frame)); | |
| 312 | |
| 313 decoder_task_runner_->PostTask( | |
| 314 FROM_HERE, base::Bind(&VaapiJpegDecodeAccelerator::DecodeTask, | |
| 315 base::Unretained(this), base::Passed(&request))); | |
| 316 } | |
| 317 | |
| 318 bool VaapiJpegDecodeAccelerator::IsSupported() { | |
| 319 return VaapiWrapper::IsJpegDecodeSupported(); | |
| 320 } | |
| 321 | |
| 322 } // namespace content | |
| OLD | NEW |