| OLD | NEW |
| (Empty) |
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "content/browser/renderer_host/media/video_capture_device_client.h" | |
| 6 | |
| 7 #include <algorithm> | |
| 8 #include <utility> | |
| 9 | |
| 10 #include "base/bind.h" | |
| 11 #include "base/command_line.h" | |
| 12 #include "base/location.h" | |
| 13 #include "base/memory/ptr_util.h" | |
| 14 #include "base/strings/stringprintf.h" | |
| 15 #include "base/trace_event/trace_event.h" | |
| 16 #include "build/build_config.h" | |
| 17 #include "content/browser/renderer_host/media/video_capture_buffer_handle.h" | |
| 18 #include "content/browser/renderer_host/media/video_capture_buffer_pool.h" | |
| 19 #include "content/browser/renderer_host/media/video_capture_controller.h" | |
| 20 #include "content/browser/renderer_host/media/video_capture_gpu_jpeg_decoder.h" | |
| 21 #include "media/base/bind_to_current_loop.h" | |
| 22 #include "media/base/media_switches.h" | |
| 23 #include "media/base/video_capture_types.h" | |
| 24 #include "media/base/video_frame.h" | |
| 25 #include "third_party/libyuv/include/libyuv.h" | |
| 26 | |
| 27 using media::VideoCaptureFormat; | |
| 28 using media::VideoFrame; | |
| 29 using media::VideoFrameMetadata; | |
| 30 | |
| 31 namespace content { | |
| 32 | |
| 33 // Class combining a Client::Buffer interface implementation and a pool buffer | |
| 34 // implementation to guarantee proper cleanup on destruction on our side. | |
| 35 class AutoReleaseBuffer : public media::VideoCaptureDevice::Client::Buffer { | |
| 36 public: | |
| 37 AutoReleaseBuffer(const scoped_refptr<VideoCaptureBufferPool>& pool, | |
| 38 int buffer_id) | |
| 39 : id_(buffer_id), | |
| 40 pool_(pool), | |
| 41 buffer_handle_(pool_->GetBufferHandle(buffer_id)) { | |
| 42 DCHECK(pool_.get()); | |
| 43 } | |
| 44 int id() const override { return id_; } | |
| 45 gfx::Size dimensions() const override { return buffer_handle_->dimensions(); } | |
| 46 size_t mapped_size() const override { return buffer_handle_->mapped_size(); } | |
| 47 void* data(int plane) override { return buffer_handle_->data(plane); } | |
| 48 ClientBuffer AsClientBuffer(int plane) override { | |
| 49 return buffer_handle_->AsClientBuffer(plane); | |
| 50 } | |
| 51 #if defined(OS_POSIX) && !defined(OS_MACOSX) | |
| 52 base::FileDescriptor AsPlatformFile() override { | |
| 53 return buffer_handle_->AsPlatformFile(); | |
| 54 } | |
| 55 #endif | |
| 56 | |
| 57 private: | |
| 58 ~AutoReleaseBuffer() override { pool_->RelinquishProducerReservation(id_); } | |
| 59 | |
| 60 const int id_; | |
| 61 const scoped_refptr<VideoCaptureBufferPool> pool_; | |
| 62 const std::unique_ptr<VideoCaptureBufferHandle> buffer_handle_; | |
| 63 }; | |
| 64 | |
| 65 VideoCaptureDeviceClient::VideoCaptureDeviceClient( | |
| 66 std::unique_ptr<VideoFrameReceiver> receiver, | |
| 67 const scoped_refptr<VideoCaptureBufferPool>& buffer_pool, | |
| 68 const VideoCaptureJpegDecoderFactoryCB& jpeg_decoder_factory) | |
| 69 : receiver_(std::move(receiver)), | |
| 70 jpeg_decoder_factory_callback_(jpeg_decoder_factory), | |
| 71 external_jpeg_decoder_initialized_(false), | |
| 72 buffer_pool_(buffer_pool), | |
| 73 use_gpu_memory_buffers_(base::CommandLine::ForCurrentProcess()->HasSwitch( | |
| 74 switches::kUseGpuMemoryBuffersForCapture)), | |
| 75 last_captured_pixel_format_(media::PIXEL_FORMAT_UNKNOWN) {} | |
| 76 | |
| 77 VideoCaptureDeviceClient::~VideoCaptureDeviceClient() { | |
| 78 // This should be on the platform auxiliary thread since | |
| 79 // |external_jpeg_decoder_| need to be destructed on the same thread as | |
| 80 // OnIncomingCapturedData. | |
| 81 } | |
| 82 | |
| 83 void VideoCaptureDeviceClient::OnIncomingCapturedData( | |
| 84 const uint8_t* data, | |
| 85 int length, | |
| 86 const VideoCaptureFormat& frame_format, | |
| 87 int rotation, | |
| 88 base::TimeTicks reference_time, | |
| 89 base::TimeDelta timestamp) { | |
| 90 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedData"); | |
| 91 DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage); | |
| 92 | |
| 93 if (last_captured_pixel_format_ != frame_format.pixel_format) { | |
| 94 OnLog("Pixel format: " + | |
| 95 media::VideoPixelFormatToString(frame_format.pixel_format)); | |
| 96 last_captured_pixel_format_ = frame_format.pixel_format; | |
| 97 | |
| 98 if (frame_format.pixel_format == media::PIXEL_FORMAT_MJPEG && | |
| 99 !external_jpeg_decoder_initialized_) { | |
| 100 external_jpeg_decoder_initialized_ = true; | |
| 101 external_jpeg_decoder_ = jpeg_decoder_factory_callback_.Run(); | |
| 102 external_jpeg_decoder_->Initialize(); | |
| 103 } | |
| 104 } | |
| 105 | |
| 106 if (!frame_format.IsValid()) | |
| 107 return; | |
| 108 | |
| 109 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest | |
| 110 // bit decomposition of {width, height}, grabbing the odd and even parts. | |
| 111 const int chopped_width = frame_format.frame_size.width() & 1; | |
| 112 const int chopped_height = frame_format.frame_size.height() & 1; | |
| 113 const int new_unrotated_width = frame_format.frame_size.width() & ~1; | |
| 114 const int new_unrotated_height = frame_format.frame_size.height() & ~1; | |
| 115 | |
| 116 int destination_width = new_unrotated_width; | |
| 117 int destination_height = new_unrotated_height; | |
| 118 if (rotation == 90 || rotation == 270) | |
| 119 std::swap(destination_width, destination_height); | |
| 120 | |
| 121 DCHECK_EQ(0, rotation % 90) | |
| 122 << " Rotation must be a multiple of 90, now: " << rotation; | |
| 123 libyuv::RotationMode rotation_mode = libyuv::kRotate0; | |
| 124 if (rotation == 90) | |
| 125 rotation_mode = libyuv::kRotate90; | |
| 126 else if (rotation == 180) | |
| 127 rotation_mode = libyuv::kRotate180; | |
| 128 else if (rotation == 270) | |
| 129 rotation_mode = libyuv::kRotate270; | |
| 130 | |
| 131 const gfx::Size dimensions(destination_width, destination_height); | |
| 132 const media::VideoPixelStorage output_pixel_storage = | |
| 133 use_gpu_memory_buffers_ ? media::PIXEL_STORAGE_GPUMEMORYBUFFER | |
| 134 : media::PIXEL_STORAGE_CPU; | |
| 135 uint8_t *y_plane_data, *u_plane_data, *v_plane_data; | |
| 136 std::unique_ptr<Buffer> buffer( | |
| 137 ReserveI420OutputBuffer(dimensions, output_pixel_storage, &y_plane_data, | |
| 138 &u_plane_data, &v_plane_data)); | |
| 139 #if DCHECK_IS_ON() | |
| 140 dropped_frame_counter_ = buffer.get() ? 0 : dropped_frame_counter_ + 1; | |
| 141 if (dropped_frame_counter_ >= kMaxDroppedFrames) | |
| 142 OnError(FROM_HERE, "Too many frames dropped"); | |
| 143 #endif | |
| 144 // Failed to reserve I420 output buffer, so drop the frame. | |
| 145 if (!buffer.get()) | |
| 146 return; | |
| 147 | |
| 148 const int yplane_stride = dimensions.width(); | |
| 149 const int uv_plane_stride = yplane_stride / 2; | |
| 150 int crop_x = 0; | |
| 151 int crop_y = 0; | |
| 152 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; | |
| 153 | |
| 154 bool flip = false; | |
| 155 switch (frame_format.pixel_format) { | |
| 156 case media::PIXEL_FORMAT_UNKNOWN: // Color format not set. | |
| 157 break; | |
| 158 case media::PIXEL_FORMAT_I420: | |
| 159 DCHECK(!chopped_width && !chopped_height); | |
| 160 origin_colorspace = libyuv::FOURCC_I420; | |
| 161 break; | |
| 162 case media::PIXEL_FORMAT_YV12: | |
| 163 DCHECK(!chopped_width && !chopped_height); | |
| 164 origin_colorspace = libyuv::FOURCC_YV12; | |
| 165 break; | |
| 166 case media::PIXEL_FORMAT_NV12: | |
| 167 DCHECK(!chopped_width && !chopped_height); | |
| 168 origin_colorspace = libyuv::FOURCC_NV12; | |
| 169 break; | |
| 170 case media::PIXEL_FORMAT_NV21: | |
| 171 DCHECK(!chopped_width && !chopped_height); | |
| 172 origin_colorspace = libyuv::FOURCC_NV21; | |
| 173 break; | |
| 174 case media::PIXEL_FORMAT_YUY2: | |
| 175 DCHECK(!chopped_width && !chopped_height); | |
| 176 origin_colorspace = libyuv::FOURCC_YUY2; | |
| 177 break; | |
| 178 case media::PIXEL_FORMAT_UYVY: | |
| 179 DCHECK(!chopped_width && !chopped_height); | |
| 180 origin_colorspace = libyuv::FOURCC_UYVY; | |
| 181 break; | |
| 182 case media::PIXEL_FORMAT_RGB24: | |
| 183 // Linux RGB24 defines red at lowest byte address, | |
| 184 // see http://linuxtv.org/downloads/v4l-dvb-apis/packed-rgb.html. | |
| 185 // Windows RGB24 defines blue at lowest byte, | |
| 186 // see https://msdn.microsoft.com/en-us/library/windows/desktop/dd407253 | |
| 187 #if defined(OS_LINUX) | |
| 188 origin_colorspace = libyuv::FOURCC_RAW; | |
| 189 #elif defined(OS_WIN) | |
| 190 origin_colorspace = libyuv::FOURCC_24BG; | |
| 191 #else | |
| 192 NOTREACHED() << "RGB24 is only available in Linux and Windows platforms"; | |
| 193 #endif | |
| 194 #if defined(OS_WIN) | |
| 195 // TODO(wjia): Currently, for RGB24 on WIN, capture device always passes | |
| 196 // in positive src_width and src_height. Remove this hardcoded value when | |
| 197 // negative src_height is supported. The negative src_height indicates | |
| 198 // that vertical flipping is needed. | |
| 199 flip = true; | |
| 200 #endif | |
| 201 break; | |
| 202 case media::PIXEL_FORMAT_RGB32: | |
| 203 // Fallback to PIXEL_FORMAT_ARGB setting |flip| in Windows | |
| 204 // platforms. | |
| 205 #if defined(OS_WIN) | |
| 206 flip = true; | |
| 207 #endif | |
| 208 case media::PIXEL_FORMAT_ARGB: | |
| 209 origin_colorspace = libyuv::FOURCC_ARGB; | |
| 210 break; | |
| 211 case media::PIXEL_FORMAT_MJPEG: | |
| 212 origin_colorspace = libyuv::FOURCC_MJPG; | |
| 213 break; | |
| 214 default: | |
| 215 NOTREACHED(); | |
| 216 } | |
| 217 | |
| 218 // The input |length| can be greater than the required buffer size because of | |
| 219 // paddings and/or alignments, but it cannot be smaller. | |
| 220 DCHECK_GE(static_cast<size_t>(length), frame_format.ImageAllocationSize()); | |
| 221 | |
| 222 if (external_jpeg_decoder_) { | |
| 223 const VideoCaptureGpuJpegDecoder::STATUS status = | |
| 224 external_jpeg_decoder_->GetStatus(); | |
| 225 if (status == VideoCaptureGpuJpegDecoder::FAILED) { | |
| 226 external_jpeg_decoder_.reset(); | |
| 227 } else if (status == VideoCaptureGpuJpegDecoder::INIT_PASSED && | |
| 228 frame_format.pixel_format == media::PIXEL_FORMAT_MJPEG && | |
| 229 rotation == 0 && !flip) { | |
| 230 external_jpeg_decoder_->DecodeCapturedData(data, length, frame_format, | |
| 231 reference_time, timestamp, | |
| 232 std::move(buffer)); | |
| 233 return; | |
| 234 } | |
| 235 } | |
| 236 | |
| 237 if (libyuv::ConvertToI420(data, | |
| 238 length, | |
| 239 y_plane_data, | |
| 240 yplane_stride, | |
| 241 u_plane_data, | |
| 242 uv_plane_stride, | |
| 243 v_plane_data, | |
| 244 uv_plane_stride, | |
| 245 crop_x, | |
| 246 crop_y, | |
| 247 frame_format.frame_size.width(), | |
| 248 (flip ? -1 : 1) * frame_format.frame_size.height(), | |
| 249 new_unrotated_width, | |
| 250 new_unrotated_height, | |
| 251 rotation_mode, | |
| 252 origin_colorspace) != 0) { | |
| 253 DLOG(WARNING) << "Failed to convert buffer's pixel format to I420 from " | |
| 254 << media::VideoPixelFormatToString(frame_format.pixel_format); | |
| 255 return; | |
| 256 } | |
| 257 | |
| 258 const VideoCaptureFormat output_format = VideoCaptureFormat( | |
| 259 dimensions, frame_format.frame_rate, | |
| 260 media::PIXEL_FORMAT_I420, output_pixel_storage); | |
| 261 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, | |
| 262 timestamp); | |
| 263 } | |
| 264 | |
| 265 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> | |
| 266 VideoCaptureDeviceClient::ReserveOutputBuffer( | |
| 267 const gfx::Size& frame_size, | |
| 268 media::VideoPixelFormat pixel_format, | |
| 269 media::VideoPixelStorage pixel_storage) { | |
| 270 DCHECK_GT(frame_size.width(), 0); | |
| 271 DCHECK_GT(frame_size.height(), 0); | |
| 272 // Currently, only I420 pixel format is supported. | |
| 273 DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format); | |
| 274 | |
| 275 // TODO(mcasas): For PIXEL_STORAGE_GPUMEMORYBUFFER, find a way to indicate if | |
| 276 // it's a ShMem GMB or a DmaBuf GMB. | |
| 277 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; | |
| 278 const int buffer_id = buffer_pool_->ReserveForProducer( | |
| 279 frame_size, pixel_format, pixel_storage, &buffer_id_to_drop); | |
| 280 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) | |
| 281 receiver_->OnBufferDestroyed(buffer_id_to_drop); | |
| 282 if (buffer_id == VideoCaptureBufferPool::kInvalidId) | |
| 283 return nullptr; | |
| 284 return base::WrapUnique<Buffer>( | |
| 285 new AutoReleaseBuffer(buffer_pool_, buffer_id)); | |
| 286 } | |
| 287 | |
| 288 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( | |
| 289 std::unique_ptr<Buffer> buffer, | |
| 290 const VideoCaptureFormat& frame_format, | |
| 291 base::TimeTicks reference_time, | |
| 292 base::TimeDelta timestamp) { | |
| 293 // Currently, only I420 pixel format is supported. | |
| 294 DCHECK_EQ(media::PIXEL_FORMAT_I420, frame_format.pixel_format); | |
| 295 | |
| 296 scoped_refptr<VideoFrame> frame; | |
| 297 switch (frame_format.pixel_storage) { | |
| 298 case media::PIXEL_STORAGE_GPUMEMORYBUFFER: { | |
| 299 // Create a VideoFrame to set the correct storage_type and pixel_format. | |
| 300 gfx::GpuMemoryBufferHandle handle; | |
| 301 frame = VideoFrame::WrapExternalYuvGpuMemoryBuffers( | |
| 302 media::PIXEL_FORMAT_I420, frame_format.frame_size, | |
| 303 gfx::Rect(frame_format.frame_size), frame_format.frame_size, 0, 0, 0, | |
| 304 reinterpret_cast<uint8_t*>(buffer->data(media::VideoFrame::kYPlane)), | |
| 305 reinterpret_cast<uint8_t*>(buffer->data(media::VideoFrame::kUPlane)), | |
| 306 reinterpret_cast<uint8_t*>(buffer->data(media::VideoFrame::kVPlane)), | |
| 307 handle, handle, handle, timestamp); | |
| 308 break; | |
| 309 } | |
| 310 case media::PIXEL_STORAGE_CPU: | |
| 311 frame = VideoFrame::WrapExternalSharedMemory( | |
| 312 media::PIXEL_FORMAT_I420, frame_format.frame_size, | |
| 313 gfx::Rect(frame_format.frame_size), frame_format.frame_size, | |
| 314 reinterpret_cast<uint8_t*>(buffer->data()), | |
| 315 VideoFrame::AllocationSize(media::PIXEL_FORMAT_I420, | |
| 316 frame_format.frame_size), | |
| 317 base::SharedMemory::NULLHandle(), 0u, timestamp); | |
| 318 break; | |
| 319 } | |
| 320 if (!frame) | |
| 321 return; | |
| 322 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, | |
| 323 frame_format.frame_rate); | |
| 324 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, | |
| 325 reference_time); | |
| 326 OnIncomingCapturedVideoFrame(std::move(buffer), frame); | |
| 327 } | |
| 328 | |
| 329 void VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame( | |
| 330 std::unique_ptr<Buffer> buffer, | |
| 331 const scoped_refptr<VideoFrame>& frame) { | |
| 332 receiver_->OnIncomingCapturedVideoFrame(std::move(buffer), frame); | |
| 333 } | |
| 334 | |
| 335 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> | |
| 336 VideoCaptureDeviceClient::ResurrectLastOutputBuffer( | |
| 337 const gfx::Size& dimensions, | |
| 338 media::VideoPixelFormat format, | |
| 339 media::VideoPixelStorage storage) { | |
| 340 const int buffer_id = | |
| 341 buffer_pool_->ResurrectLastForProducer(dimensions, format, storage); | |
| 342 if (buffer_id == VideoCaptureBufferPool::kInvalidId) | |
| 343 return nullptr; | |
| 344 return base::WrapUnique<Buffer>( | |
| 345 new AutoReleaseBuffer(buffer_pool_, buffer_id)); | |
| 346 } | |
| 347 | |
| 348 void VideoCaptureDeviceClient::OnError( | |
| 349 const tracked_objects::Location& from_here, | |
| 350 const std::string& reason) { | |
| 351 const std::string log_message = base::StringPrintf( | |
| 352 "error@ %s, %s, OS message: %s", from_here.ToString().c_str(), | |
| 353 reason.c_str(), | |
| 354 logging::SystemErrorCodeToString(logging::GetLastSystemErrorCode()) | |
| 355 .c_str()); | |
| 356 DLOG(ERROR) << log_message; | |
| 357 OnLog(log_message); | |
| 358 receiver_->OnError(); | |
| 359 } | |
| 360 | |
| 361 void VideoCaptureDeviceClient::OnLog( | |
| 362 const std::string& message) { | |
| 363 receiver_->OnLog(message); | |
| 364 } | |
| 365 | |
| 366 double VideoCaptureDeviceClient::GetBufferPoolUtilization() const { | |
| 367 return buffer_pool_->GetBufferPoolUtilization(); | |
| 368 } | |
| 369 | |
| 370 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> | |
| 371 VideoCaptureDeviceClient::ReserveI420OutputBuffer( | |
| 372 const gfx::Size& dimensions, | |
| 373 media::VideoPixelStorage storage, | |
| 374 uint8_t** y_plane_data, | |
| 375 uint8_t** u_plane_data, | |
| 376 uint8_t** v_plane_data) { | |
| 377 DCHECK(storage == media::PIXEL_STORAGE_GPUMEMORYBUFFER || | |
| 378 storage == media::PIXEL_STORAGE_CPU); | |
| 379 DCHECK(dimensions.height()); | |
| 380 DCHECK(dimensions.width()); | |
| 381 | |
| 382 const media::VideoPixelFormat format = media::PIXEL_FORMAT_I420; | |
| 383 std::unique_ptr<Buffer> buffer( | |
| 384 ReserveOutputBuffer(dimensions, media::PIXEL_FORMAT_I420, storage)); | |
| 385 if (!buffer) | |
| 386 return std::unique_ptr<Buffer>(); | |
| 387 | |
| 388 switch (storage) { | |
| 389 case media::PIXEL_STORAGE_CPU: | |
| 390 // TODO(emircan): See http://crbug.com/521068, move this pointer | |
| 391 // arithmetic inside Buffer::data() when this bug is resolved. | |
| 392 *y_plane_data = reinterpret_cast<uint8_t*>(buffer->data()); | |
| 393 *u_plane_data = | |
| 394 *y_plane_data + | |
| 395 VideoFrame::PlaneSize(format, VideoFrame::kYPlane, dimensions) | |
| 396 .GetArea(); | |
| 397 *v_plane_data = | |
| 398 *u_plane_data + | |
| 399 VideoFrame::PlaneSize(format, VideoFrame::kUPlane, dimensions) | |
| 400 .GetArea(); | |
| 401 return buffer; | |
| 402 case media::PIXEL_STORAGE_GPUMEMORYBUFFER: | |
| 403 *y_plane_data = | |
| 404 reinterpret_cast<uint8_t*>(buffer->data(VideoFrame::kYPlane)); | |
| 405 *u_plane_data = | |
| 406 reinterpret_cast<uint8_t*>(buffer->data(VideoFrame::kUPlane)); | |
| 407 *v_plane_data = | |
| 408 reinterpret_cast<uint8_t*>(buffer->data(VideoFrame::kVPlane)); | |
| 409 return buffer; | |
| 410 } | |
| 411 NOTREACHED(); | |
| 412 return std::unique_ptr<Buffer>(); | |
| 413 } | |
| 414 | |
| 415 } // namespace content | |
| OLD | NEW |