OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/browser/renderer_host/media/video_capture_device_client.h" | 5 #include "content/browser/renderer_host/media/video_capture_device_client.h" |
6 | 6 |
7 #include "base/bind.h" | 7 #include "base/bind.h" |
8 #include "base/strings/stringprintf.h" | 8 #include "base/strings/stringprintf.h" |
9 #include "base/trace_event/trace_event.h" | 9 #include "base/trace_event/trace_event.h" |
10 #include "content/browser/compositor/image_transport_factory.h" | 10 #include "content/browser/compositor/image_transport_factory.h" |
(...skipping 196 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
207 | 207 |
208 VideoCaptureDeviceClient::~VideoCaptureDeviceClient() {} | 208 VideoCaptureDeviceClient::~VideoCaptureDeviceClient() {} |
209 | 209 |
210 void VideoCaptureDeviceClient::OnIncomingCapturedData( | 210 void VideoCaptureDeviceClient::OnIncomingCapturedData( |
211 const uint8* data, | 211 const uint8* data, |
212 int length, | 212 int length, |
213 const VideoCaptureFormat& frame_format, | 213 const VideoCaptureFormat& frame_format, |
214 int rotation, | 214 int rotation, |
215 const base::TimeTicks& timestamp) { | 215 const base::TimeTicks& timestamp) { |
216 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedData"); | 216 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedData"); |
| 217 DCHECK_EQ(frame_format.pixel_storage, media::PIXEL_STORAGE_CPU); |
217 | 218 |
218 if (last_captured_pixel_format_ != frame_format.pixel_format) { | 219 if (last_captured_pixel_format_ != frame_format.pixel_format) { |
219 OnLog("Pixel format: " + media::VideoCaptureFormat::PixelFormatToString( | 220 OnLog("Pixel format: " + |
220 frame_format.pixel_format)); | 221 VideoCaptureFormat::PixelFormatToString(frame_format.pixel_format)); |
221 last_captured_pixel_format_ = frame_format.pixel_format; | 222 last_captured_pixel_format_ = frame_format.pixel_format; |
222 } | 223 } |
223 | 224 |
224 if (!frame_format.IsValid()) | 225 if (!frame_format.IsValid()) |
225 return; | 226 return; |
226 | 227 |
227 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest | 228 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest |
228 // bit decomposition of {width, height}, grabbing the odd and even parts. | 229 // bit decomposition of {width, height}, grabbing the odd and even parts. |
229 const int chopped_width = frame_format.frame_size.width() & 1; | 230 const int chopped_width = frame_format.frame_size.width() & 1; |
230 const int chopped_height = frame_format.frame_size.height() & 1; | 231 const int chopped_height = frame_format.frame_size.height() & 1; |
(...skipping 19 matching lines...) Expand all Loading... |
250 | 251 |
251 const gfx::Size dimensions(destination_width, destination_height); | 252 const gfx::Size dimensions(destination_width, destination_height); |
252 if (!VideoFrame::IsValidConfig(VideoFrame::I420, | 253 if (!VideoFrame::IsValidConfig(VideoFrame::I420, |
253 VideoFrame::STORAGE_UNKNOWN, | 254 VideoFrame::STORAGE_UNKNOWN, |
254 dimensions, | 255 dimensions, |
255 gfx::Rect(dimensions), | 256 gfx::Rect(dimensions), |
256 dimensions)) { | 257 dimensions)) { |
257 return; | 258 return; |
258 } | 259 } |
259 | 260 |
260 scoped_ptr<Buffer> buffer( | 261 scoped_ptr<Buffer> buffer(ReserveOutputBuffer( |
261 ReserveOutputBuffer(media::PIXEL_FORMAT_I420, dimensions)); | 262 dimensions, media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU)); |
262 if (!buffer.get()) | 263 if (!buffer.get()) |
263 return; | 264 return; |
264 | 265 |
265 uint8* const yplane = reinterpret_cast<uint8*>(buffer->data()); | 266 uint8* const yplane = reinterpret_cast<uint8*>(buffer->data()); |
266 uint8* const uplane = | 267 uint8* const uplane = |
267 yplane + VideoFrame::PlaneAllocationSize(VideoFrame::I420, | 268 yplane + VideoFrame::PlaneAllocationSize(VideoFrame::I420, |
268 VideoFrame::kYPlane, dimensions); | 269 VideoFrame::kYPlane, dimensions); |
269 uint8* const vplane = | 270 uint8* const vplane = |
270 uplane + VideoFrame::PlaneAllocationSize(VideoFrame::I420, | 271 uplane + VideoFrame::PlaneAllocationSize(VideoFrame::I420, |
271 VideoFrame::kUPlane, dimensions); | 272 VideoFrame::kUPlane, dimensions); |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
342 uv_plane_stride, | 343 uv_plane_stride, |
343 crop_x, | 344 crop_x, |
344 crop_y, | 345 crop_y, |
345 frame_format.frame_size.width(), | 346 frame_format.frame_size.width(), |
346 (flip ? -1 : 1) * frame_format.frame_size.height(), | 347 (flip ? -1 : 1) * frame_format.frame_size.height(), |
347 new_unrotated_width, | 348 new_unrotated_width, |
348 new_unrotated_height, | 349 new_unrotated_height, |
349 rotation_mode, | 350 rotation_mode, |
350 origin_colorspace) != 0) { | 351 origin_colorspace) != 0) { |
351 DLOG(WARNING) << "Failed to convert buffer's pixel format to I420 from " | 352 DLOG(WARNING) << "Failed to convert buffer's pixel format to I420 from " |
352 << media::VideoCaptureFormat::PixelFormatToString( | 353 << VideoCaptureFormat::PixelFormatToString( |
353 frame_format.pixel_format); | 354 frame_format.pixel_format); |
354 return; | 355 return; |
355 } | 356 } |
356 | 357 |
357 OnIncomingCapturedBuffer(buffer.Pass(), | 358 const VideoCaptureFormat output_format = |
358 media::VideoCaptureFormat(dimensions, | 359 VideoCaptureFormat(dimensions, frame_format.frame_rate, |
359 frame_format.frame_rate, | 360 media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); |
360 media::PIXEL_FORMAT_I420), | 361 OnIncomingCapturedBuffer(buffer.Pass(), output_format, timestamp); |
361 timestamp); | |
362 } | 362 } |
363 | 363 |
364 void | 364 void |
365 VideoCaptureDeviceClient::OnIncomingCapturedYuvData( | 365 VideoCaptureDeviceClient::OnIncomingCapturedYuvData( |
366 const uint8* y_data, | 366 const uint8* y_data, |
367 const uint8* u_data, | 367 const uint8* u_data, |
368 const uint8* v_data, | 368 const uint8* v_data, |
369 size_t y_stride, | 369 size_t y_stride, |
370 size_t u_stride, | 370 size_t u_stride, |
371 size_t v_stride, | 371 size_t v_stride, |
372 const VideoCaptureFormat& frame_format, | 372 const VideoCaptureFormat& frame_format, |
373 int clockwise_rotation, | 373 int clockwise_rotation, |
374 const base::TimeTicks& timestamp) { | 374 const base::TimeTicks& timestamp) { |
375 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedYuvData"); | 375 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedYuvData"); |
376 DCHECK_EQ(frame_format.pixel_format, media::PIXEL_FORMAT_I420); | 376 DCHECK_EQ(frame_format.pixel_format, media::PIXEL_FORMAT_I420); |
| 377 DCHECK_EQ(frame_format.pixel_storage, media::PIXEL_STORAGE_CPU); |
377 DCHECK_EQ(clockwise_rotation, 0) << "Rotation not supported"; | 378 DCHECK_EQ(clockwise_rotation, 0) << "Rotation not supported"; |
378 | 379 |
379 scoped_ptr<Buffer> buffer( | 380 scoped_ptr<Buffer> buffer(ReserveOutputBuffer(frame_format.frame_size, |
380 ReserveOutputBuffer(frame_format.pixel_format, frame_format.frame_size)); | 381 frame_format.pixel_format, |
| 382 frame_format.pixel_storage)); |
381 if (!buffer.get()) | 383 if (!buffer.get()) |
382 return; | 384 return; |
383 | 385 |
384 // Blit (copy) here from y,u,v into buffer.data()). Needed so we can return | 386 // Blit (copy) here from y,u,v into buffer.data()). Needed so we can return |
385 // the parameter buffer synchronously to the driver. | 387 // the parameter buffer synchronously to the driver. |
386 const size_t y_plane_size = VideoFrame::PlaneAllocationSize(VideoFrame::I420, | 388 const size_t y_plane_size = VideoFrame::PlaneAllocationSize(VideoFrame::I420, |
387 VideoFrame::kYPlane, frame_format.frame_size); | 389 VideoFrame::kYPlane, frame_format.frame_size); |
388 const size_t u_plane_size = VideoFrame::PlaneAllocationSize( | 390 const size_t u_plane_size = VideoFrame::PlaneAllocationSize( |
389 VideoFrame::I420, VideoFrame::kUPlane, frame_format.frame_size); | 391 VideoFrame::I420, VideoFrame::kUPlane, frame_format.frame_size); |
390 uint8* const dst_y = reinterpret_cast<uint8*>(buffer->data()); | 392 uint8* const dst_y = reinterpret_cast<uint8*>(buffer->data()); |
(...skipping 19 matching lines...) Expand all Loading... |
410 frame_format.frame_size.width(), | 412 frame_format.frame_size.width(), |
411 frame_format.frame_size.height())) { | 413 frame_format.frame_size.height())) { |
412 DLOG(WARNING) << "Failed to copy buffer"; | 414 DLOG(WARNING) << "Failed to copy buffer"; |
413 return; | 415 return; |
414 } | 416 } |
415 | 417 |
416 OnIncomingCapturedBuffer(buffer.Pass(), frame_format, timestamp); | 418 OnIncomingCapturedBuffer(buffer.Pass(), frame_format, timestamp); |
417 }; | 419 }; |
418 | 420 |
419 scoped_ptr<media::VideoCaptureDevice::Client::Buffer> | 421 scoped_ptr<media::VideoCaptureDevice::Client::Buffer> |
420 VideoCaptureDeviceClient::ReserveOutputBuffer(media::VideoPixelFormat format, | 422 VideoCaptureDeviceClient::ReserveOutputBuffer( |
421 const gfx::Size& dimensions) { | 423 const gfx::Size& frame_size, |
422 DCHECK(format == media::PIXEL_FORMAT_I420 || | 424 media::VideoPixelFormat pixel_format, |
423 format == media::PIXEL_FORMAT_TEXTURE || | 425 media::VideoPixelStorage pixel_storage) { |
424 format == media::PIXEL_FORMAT_GPUMEMORYBUFFER); | 426 DCHECK(pixel_format == media::PIXEL_FORMAT_I420 || |
425 DCHECK_GT(dimensions.width(), 0); | 427 pixel_format == media::PIXEL_FORMAT_ARGB); |
426 DCHECK_GT(dimensions.height(), 0); | 428 DCHECK_GT(frame_size.width(), 0); |
| 429 DCHECK_GT(frame_size.height(), 0); |
427 | 430 |
428 if (format == media::PIXEL_FORMAT_GPUMEMORYBUFFER && !texture_wrap_helper_) { | 431 if (pixel_storage == media::PIXEL_STORAGE_GPUMEMORYBUFFER && |
| 432 !texture_wrap_helper_) { |
429 texture_wrap_helper_ = | 433 texture_wrap_helper_ = |
430 new TextureWrapHelper(controller_, capture_task_runner_); | 434 new TextureWrapHelper(controller_, capture_task_runner_); |
431 } | 435 } |
432 | 436 |
| 437 // TODO(mcasas): For PIXEL_STORAGE_GPUMEMORYBUFFER, find a way to indicate if |
| 438 // it's a ShMem GMB or a DmaBuf GMB. |
433 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; | 439 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; |
434 const int buffer_id = | 440 const int buffer_id = buffer_pool_->ReserveForProducer( |
435 buffer_pool_->ReserveForProducer(format, dimensions, &buffer_id_to_drop); | 441 pixel_format, pixel_storage, frame_size, &buffer_id_to_drop); |
436 if (buffer_id == VideoCaptureBufferPool::kInvalidId) | 442 if (buffer_id == VideoCaptureBufferPool::kInvalidId) |
437 return NULL; | 443 return NULL; |
438 | 444 |
439 scoped_ptr<media::VideoCaptureDevice::Client::Buffer> output_buffer( | 445 scoped_ptr<media::VideoCaptureDevice::Client::Buffer> output_buffer( |
440 new AutoReleaseBuffer(buffer_pool_, buffer_id)); | 446 new AutoReleaseBuffer(buffer_pool_, buffer_id)); |
441 | 447 |
442 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) { | 448 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) { |
443 BrowserThread::PostTask(BrowserThread::IO, | 449 BrowserThread::PostTask(BrowserThread::IO, |
444 FROM_HERE, | 450 FROM_HERE, |
445 base::Bind(&VideoCaptureController::DoBufferDestroyedOnIOThread, | 451 base::Bind(&VideoCaptureController::DoBufferDestroyedOnIOThread, |
446 controller_, buffer_id_to_drop)); | 452 controller_, buffer_id_to_drop)); |
447 } | 453 } |
448 | 454 |
449 return output_buffer.Pass(); | 455 return output_buffer.Pass(); |
450 } | 456 } |
451 | 457 |
452 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( | 458 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( |
453 scoped_ptr<Buffer> buffer, | 459 scoped_ptr<Buffer> buffer, |
454 const media::VideoCaptureFormat& frame_format, | 460 const VideoCaptureFormat& frame_format, |
455 const base::TimeTicks& timestamp) { | 461 const base::TimeTicks& timestamp) { |
456 if (frame_format.pixel_format == media::PIXEL_FORMAT_GPUMEMORYBUFFER) { | 462 if (frame_format.pixel_storage == media::PIXEL_STORAGE_GPUMEMORYBUFFER) { |
457 capture_task_runner_->PostTask( | 463 capture_task_runner_->PostTask( |
458 FROM_HERE, | 464 FROM_HERE, |
459 base::Bind(&TextureWrapHelper::OnIncomingCapturedGpuMemoryBuffer, | 465 base::Bind(&TextureWrapHelper::OnIncomingCapturedGpuMemoryBuffer, |
460 texture_wrap_helper_, | 466 texture_wrap_helper_, |
461 base::Passed(&buffer), | 467 base::Passed(&buffer), |
462 frame_format, | 468 frame_format, |
463 timestamp)); | 469 timestamp)); |
464 } else { | 470 } else { |
465 DCHECK_EQ(frame_format.pixel_format, media::PIXEL_FORMAT_I420); | 471 DCHECK(frame_format.pixel_format == media::PIXEL_FORMAT_I420 || |
| 472 frame_format.pixel_format == media::PIXEL_FORMAT_ARGB); |
466 scoped_refptr<VideoFrame> video_frame = | 473 scoped_refptr<VideoFrame> video_frame = |
467 VideoFrame::WrapExternalData( | 474 VideoFrame::WrapExternalData( |
468 VideoFrame::I420, | 475 VideoFrame::I420, |
469 frame_format.frame_size, | 476 frame_format.frame_size, |
470 gfx::Rect(frame_format.frame_size), | 477 gfx::Rect(frame_format.frame_size), |
471 frame_format.frame_size, | 478 frame_format.frame_size, |
472 reinterpret_cast<uint8*>(buffer->data()), | 479 reinterpret_cast<uint8*>(buffer->data()), |
473 VideoFrame::AllocationSize(VideoFrame::I420, | 480 VideoFrame::AllocationSize(VideoFrame::I420, |
474 frame_format.frame_size), | 481 frame_format.frame_size), |
475 base::TimeDelta()); | 482 base::TimeDelta()); |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
529 capture_task_runner_->PostTask(FROM_HERE, | 536 capture_task_runner_->PostTask(FROM_HERE, |
530 base::Bind(&TextureWrapHelper::Init, this)); | 537 base::Bind(&TextureWrapHelper::Init, this)); |
531 } | 538 } |
532 | 539 |
533 void | 540 void |
534 VideoCaptureDeviceClient::TextureWrapHelper::OnIncomingCapturedGpuMemoryBuffer( | 541 VideoCaptureDeviceClient::TextureWrapHelper::OnIncomingCapturedGpuMemoryBuffer( |
535 scoped_ptr<media::VideoCaptureDevice::Client::Buffer> buffer, | 542 scoped_ptr<media::VideoCaptureDevice::Client::Buffer> buffer, |
536 const media::VideoCaptureFormat& frame_format, | 543 const media::VideoCaptureFormat& frame_format, |
537 const base::TimeTicks& timestamp) { | 544 const base::TimeTicks& timestamp) { |
538 DCHECK(capture_task_runner_->BelongsToCurrentThread()); | 545 DCHECK(capture_task_runner_->BelongsToCurrentThread()); |
539 DCHECK_EQ(frame_format.pixel_format, media::PIXEL_FORMAT_GPUMEMORYBUFFER); | 546 DCHECK_EQ(frame_format.pixel_storage, media::PIXEL_STORAGE_GPUMEMORYBUFFER); |
540 if (!gl_helper_) { | 547 if (!gl_helper_) { |
541 // |gl_helper_| might not exist due to asynchronous initialization not | 548 // |gl_helper_| might not exist due to asynchronous initialization not |
542 // finished or due to termination in process after a context loss. | 549 // finished or due to termination in process after a context loss. |
543 DVLOG(1) << " Skipping ingress frame, no GL context."; | 550 DVLOG(1) << " Skipping ingress frame, no GL context."; |
544 return; | 551 return; |
545 } | 552 } |
546 | 553 |
547 gpu::gles2::GLES2Interface* gl = capture_thread_context_->ContextGL(); | 554 gpu::gles2::GLES2Interface* gl = capture_thread_context_->ContextGL(); |
548 GLuint image_id = gl->CreateImageCHROMIUM(buffer->AsClientBuffer(), | 555 GLuint image_id = gl->CreateImageCHROMIUM(buffer->AsClientBuffer(), |
549 frame_format.frame_size.width(), | 556 frame_format.frame_size.width(), |
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
684 void VideoCaptureDeviceClient::TextureWrapHelper::OnError( | 691 void VideoCaptureDeviceClient::TextureWrapHelper::OnError( |
685 const std::string& message) { | 692 const std::string& message) { |
686 DCHECK(capture_task_runner_->BelongsToCurrentThread()); | 693 DCHECK(capture_task_runner_->BelongsToCurrentThread()); |
687 DLOG(ERROR) << message; | 694 DLOG(ERROR) << message; |
688 BrowserThread::PostTask( | 695 BrowserThread::PostTask( |
689 BrowserThread::IO, FROM_HERE, | 696 BrowserThread::IO, FROM_HERE, |
690 base::Bind(&VideoCaptureController::DoErrorOnIOThread, controller_)); | 697 base::Bind(&VideoCaptureController::DoErrorOnIOThread, controller_)); |
691 } | 698 } |
692 | 699 |
693 } // namespace content | 700 } // namespace content |
OLD | NEW |