OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/browser/media/capture/web_contents_video_capture_device.h" | 5 #include "content/browser/media/capture/web_contents_video_capture_device.h" |
6 | 6 |
7 #include <stddef.h> | 7 #include <stddef.h> |
8 #include <stdint.h> | 8 #include <stdint.h> |
9 | 9 |
10 #include <utility> | 10 #include <utility> |
(...skipping 19 matching lines...) Expand all Loading... |
30 #include "content/public/test/test_browser_thread_bundle.h" | 30 #include "content/public/test/test_browser_thread_bundle.h" |
31 #include "content/public/test/test_utils.h" | 31 #include "content/public/test/test_utils.h" |
32 #include "content/test/test_render_frame_host_factory.h" | 32 #include "content/test/test_render_frame_host_factory.h" |
33 #include "content/test/test_render_view_host.h" | 33 #include "content/test/test_render_view_host.h" |
34 #include "content/test/test_web_contents.h" | 34 #include "content/test/test_web_contents.h" |
35 #include "media/base/video_frame.h" | 35 #include "media/base/video_frame.h" |
36 #include "media/base/video_util.h" | 36 #include "media/base/video_util.h" |
37 #include "media/base/yuv_convert.h" | 37 #include "media/base/yuv_convert.h" |
38 #include "media/capture/video/video_capture_buffer_pool_impl.h" | 38 #include "media/capture/video/video_capture_buffer_pool_impl.h" |
39 #include "media/capture/video/video_capture_buffer_tracker_factory_impl.h" | 39 #include "media/capture/video/video_capture_buffer_tracker_factory_impl.h" |
| 40 #include "media/capture/video/video_capture_device_client.h" |
40 #include "media/capture/video_capture_types.h" | 41 #include "media/capture/video_capture_types.h" |
41 #include "skia/ext/platform_canvas.h" | 42 #include "skia/ext/platform_canvas.h" |
42 #include "testing/gmock/include/gmock/gmock.h" | 43 #include "testing/gmock/include/gmock/gmock.h" |
43 #include "testing/gtest/include/gtest/gtest.h" | 44 #include "testing/gtest/include/gtest/gtest.h" |
44 #include "third_party/skia/include/core/SkColor.h" | 45 #include "third_party/skia/include/core/SkColor.h" |
45 #include "ui/base/layout.h" | 46 #include "ui/base/layout.h" |
46 #include "ui/display/display.h" | 47 #include "ui/display/display.h" |
47 #include "ui/display/screen.h" | 48 #include "ui/display/screen.h" |
48 #include "ui/display/test/test_screen.h" | 49 #include "ui/display/test/test_screen.h" |
49 #include "ui/gfx/geometry/dip_util.h" | 50 #include "ui/gfx/geometry/dip_util.h" |
(...skipping 292 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
342 void(const uint8_t* data, | 343 void(const uint8_t* data, |
343 int length, | 344 int length, |
344 const media::VideoCaptureFormat& frame_format, | 345 const media::VideoCaptureFormat& frame_format, |
345 int rotation, | 346 int rotation, |
346 base::TimeTicks reference_time, | 347 base::TimeTicks reference_time, |
347 base::TimeDelta timestamp, | 348 base::TimeDelta timestamp, |
348 int frame_feedback_id)); | 349 int frame_feedback_id)); |
349 | 350 |
350 MOCK_METHOD0(DoOnIncomingCapturedBuffer, void(void)); | 351 MOCK_METHOD0(DoOnIncomingCapturedBuffer, void(void)); |
351 | 352 |
352 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> | 353 media::VideoCaptureDevice::Client::Buffer ReserveOutputBuffer( |
353 ReserveOutputBuffer(const gfx::Size& dimensions, | 354 const gfx::Size& dimensions, |
354 media::VideoPixelFormat format, | 355 media::VideoPixelFormat format, |
355 media::VideoPixelStorage storage, | 356 media::VideoPixelStorage storage, |
356 int frame_feedback_id) override { | 357 int frame_feedback_id) override { |
357 CHECK_EQ(format, media::PIXEL_FORMAT_I420); | 358 CHECK_EQ(format, media::PIXEL_FORMAT_I420); |
358 int buffer_id_to_drop = | 359 int buffer_id_to_drop = |
359 media::VideoCaptureBufferPool::kInvalidId; // Ignored. | 360 media::VideoCaptureBufferPool::kInvalidId; // Ignored. |
360 const int buffer_id = buffer_pool_->ReserveForProducer( | 361 const int buffer_id = buffer_pool_->ReserveForProducer( |
361 dimensions, format, storage, frame_feedback_id, &buffer_id_to_drop); | 362 dimensions, format, storage, frame_feedback_id, &buffer_id_to_drop); |
362 if (buffer_id == media::VideoCaptureBufferPool::kInvalidId) | 363 if (buffer_id == media::VideoCaptureBufferPool::kInvalidId) |
363 return NULL; | 364 return media::VideoCaptureDevice::Client::Buffer(); |
364 | 365 |
365 return std::unique_ptr<media::VideoCaptureDevice::Client::Buffer>( | 366 return media::VideoCaptureDeviceClient::MakeBufferStruct( |
366 new AutoReleaseBuffer(buffer_pool_, | 367 buffer_pool_, buffer_id, frame_feedback_id); |
367 buffer_pool_->GetBufferHandle(buffer_id), | |
368 buffer_id, frame_feedback_id)); | |
369 } | 368 } |
370 | 369 |
371 // Trampoline method to workaround GMOCK problems with std::unique_ptr<>. | 370 // Trampoline method to workaround GMOCK problems with std::unique_ptr<>. |
372 void OnIncomingCapturedBuffer(std::unique_ptr<Buffer> buffer, | 371 void OnIncomingCapturedBuffer(Buffer buffer, |
373 const media::VideoCaptureFormat& format, | 372 const media::VideoCaptureFormat& format, |
374 base::TimeTicks reference_time, | 373 base::TimeTicks reference_time, |
375 base::TimeDelta timestamp) override { | 374 base::TimeDelta timestamp) override { |
376 DoOnIncomingCapturedBuffer(); | 375 DoOnIncomingCapturedBuffer(); |
377 } | 376 } |
378 | 377 |
379 void OnIncomingCapturedBufferExt( | 378 void OnIncomingCapturedBufferExt( |
380 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> buffer, | 379 media::VideoCaptureDevice::Client::Buffer buffer, |
381 const media::VideoCaptureFormat& format, | 380 const media::VideoCaptureFormat& format, |
382 base::TimeTicks reference_time, | 381 base::TimeTicks reference_time, |
383 base::TimeDelta timestamp, | 382 base::TimeDelta timestamp, |
384 gfx::Rect visible_rect, | 383 gfx::Rect visible_rect, |
385 const media::VideoFrameMetadata& additional_metadata) override { | 384 const media::VideoFrameMetadata& additional_metadata) override { |
386 EXPECT_FALSE(visible_rect.IsEmpty()); | 385 EXPECT_FALSE(visible_rect.IsEmpty()); |
387 EXPECT_EQ(media::PIXEL_FORMAT_I420, format.pixel_format); | 386 EXPECT_EQ(media::PIXEL_FORMAT_I420, format.pixel_format); |
388 EXPECT_EQ(kTestFramesPerSecond, format.frame_rate); | 387 EXPECT_EQ(kTestFramesPerSecond, format.frame_rate); |
389 | 388 |
390 // TODO(miu): We just look at the center pixel presently, because if the | 389 // TODO(miu): We just look at the center pixel presently, because if the |
391 // analysis is too slow, the backlog of frames will grow without bound and | 390 // analysis is too slow, the backlog of frames will grow without bound and |
392 // trouble erupts. http://crbug.com/174519 | 391 // trouble erupts. http://crbug.com/174519 |
393 using media::VideoFrame; | 392 using media::VideoFrame; |
| 393 auto buffer_access = buffer.handle_provider->GetHandleForInProcessAccess(); |
394 auto frame = VideoFrame::WrapExternalSharedMemory( | 394 auto frame = VideoFrame::WrapExternalSharedMemory( |
395 media::PIXEL_FORMAT_I420, format.frame_size, visible_rect, | 395 media::PIXEL_FORMAT_I420, format.frame_size, visible_rect, |
396 format.frame_size, static_cast<uint8_t*>(buffer->data()), | 396 format.frame_size, buffer_access->data(), buffer_access->mapped_size(), |
397 buffer->mapped_size(), base::SharedMemory::NULLHandle(), 0u, | 397 base::SharedMemory::NULLHandle(), 0u, base::TimeDelta()); |
398 base::TimeDelta()); | |
399 const gfx::Point center = visible_rect.CenterPoint(); | 398 const gfx::Point center = visible_rect.CenterPoint(); |
400 const int center_offset_y = | 399 const int center_offset_y = |
401 (frame->stride(VideoFrame::kYPlane) * center.y()) + center.x(); | 400 (frame->stride(VideoFrame::kYPlane) * center.y()) + center.x(); |
402 const int center_offset_uv = | 401 const int center_offset_uv = |
403 (frame->stride(VideoFrame::kUPlane) * (center.y() / 2)) + | 402 (frame->stride(VideoFrame::kUPlane) * (center.y() / 2)) + |
404 (center.x() / 2); | 403 (center.x() / 2); |
405 report_callback_.Run( | 404 report_callback_.Run( |
406 SkColorSetRGB(frame->data(VideoFrame::kYPlane)[center_offset_y], | 405 SkColorSetRGB(frame->data(VideoFrame::kYPlane)[center_offset_y], |
407 frame->data(VideoFrame::kUPlane)[center_offset_uv], | 406 frame->data(VideoFrame::kUPlane)[center_offset_uv], |
408 frame->data(VideoFrame::kVPlane)[center_offset_uv]), | 407 frame->data(VideoFrame::kVPlane)[center_offset_uv]), |
409 frame->visible_rect().size()); | 408 frame->visible_rect().size()); |
410 } | 409 } |
411 | 410 |
412 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> | 411 media::VideoCaptureDevice::Client::Buffer ResurrectLastOutputBuffer( |
413 ResurrectLastOutputBuffer(const gfx::Size& dimensions, | 412 const gfx::Size& dimensions, |
414 media::VideoPixelFormat format, | 413 media::VideoPixelFormat format, |
415 media::VideoPixelStorage storage, | 414 media::VideoPixelStorage storage, |
416 int frame_feedback_id) override { | 415 int frame_feedback_id) override { |
417 CHECK_EQ(format, media::PIXEL_FORMAT_I420); | 416 CHECK_EQ(format, media::PIXEL_FORMAT_I420); |
418 const int buffer_id = | 417 const int buffer_id = |
419 buffer_pool_->ResurrectLastForProducer(dimensions, format, storage); | 418 buffer_pool_->ResurrectLastForProducer(dimensions, format, storage); |
420 if (buffer_id == media::VideoCaptureBufferPool::kInvalidId) | 419 if (buffer_id == media::VideoCaptureBufferPool::kInvalidId) |
421 return nullptr; | 420 return media::VideoCaptureDevice::Client::Buffer(); |
422 return std::unique_ptr<media::VideoCaptureDevice::Client::Buffer>( | 421 return media::VideoCaptureDeviceClient::MakeBufferStruct( |
423 new AutoReleaseBuffer(buffer_pool_, | 422 buffer_pool_, buffer_id, frame_feedback_id); |
424 buffer_pool_->GetBufferHandle(buffer_id), | |
425 buffer_id, frame_feedback_id)); | |
426 } | 423 } |
427 | 424 |
428 void OnError(const tracked_objects::Location& from_here, | 425 void OnError(const tracked_objects::Location& from_here, |
429 const std::string& reason) override { | 426 const std::string& reason) override { |
430 error_callback_.Run(); | 427 error_callback_.Run(); |
431 } | 428 } |
432 | 429 |
433 double GetBufferPoolUtilization() const override { return 0.0; } | 430 double GetBufferPoolUtilization() const override { return 0.0; } |
434 | 431 |
435 private: | 432 private: |
436 class AutoReleaseBuffer : public media::VideoCaptureDevice::Client::Buffer { | |
437 public: | |
438 AutoReleaseBuffer( | |
439 const scoped_refptr<media::VideoCaptureBufferPool>& pool, | |
440 std::unique_ptr<media::VideoCaptureBufferHandle> buffer_handle, | |
441 int buffer_id, | |
442 int frame_feedback_id) | |
443 : id_(buffer_id), | |
444 frame_feedback_id_(frame_feedback_id), | |
445 pool_(pool), | |
446 buffer_handle_(std::move(buffer_handle)) { | |
447 DCHECK(pool_); | |
448 } | |
449 int id() const override { return id_; } | |
450 int frame_feedback_id() const override { return frame_feedback_id_; } | |
451 gfx::Size dimensions() const override { | |
452 return buffer_handle_->dimensions(); | |
453 } | |
454 size_t mapped_size() const override { | |
455 return buffer_handle_->mapped_size(); | |
456 } | |
457 void* data(int plane) override { return buffer_handle_->data(plane); } | |
458 #if defined(OS_POSIX) && !defined(OS_MACOSX) | |
459 base::FileDescriptor AsPlatformFile() override { | |
460 return base::FileDescriptor(); | |
461 } | |
462 #endif | |
463 bool IsBackedByVideoFrame() const override { | |
464 return buffer_handle_->IsBackedByVideoFrame(); | |
465 } | |
466 scoped_refptr<media::VideoFrame> GetVideoFrame() override { | |
467 return buffer_handle_->GetVideoFrame(); | |
468 } | |
469 | |
470 private: | |
471 ~AutoReleaseBuffer() override { pool_->RelinquishProducerReservation(id_); } | |
472 | |
473 const int id_; | |
474 const int frame_feedback_id_; | |
475 const scoped_refptr<media::VideoCaptureBufferPool> pool_; | |
476 const std::unique_ptr<media::VideoCaptureBufferHandle> buffer_handle_; | |
477 }; | |
478 | |
479 scoped_refptr<media::VideoCaptureBufferPool> buffer_pool_; | 433 scoped_refptr<media::VideoCaptureBufferPool> buffer_pool_; |
480 base::Callback<void(SkColor, const gfx::Size&)> report_callback_; | 434 base::Callback<void(SkColor, const gfx::Size&)> report_callback_; |
481 base::Closure error_callback_; | 435 base::Closure error_callback_; |
482 | 436 |
483 DISALLOW_COPY_AND_ASSIGN(StubClient); | 437 DISALLOW_COPY_AND_ASSIGN(StubClient); |
484 }; | 438 }; |
485 | 439 |
486 class StubClientObserver { | 440 class StubClientObserver { |
487 public: | 441 public: |
488 StubClientObserver() | 442 StubClientObserver() |
(...skipping 788 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1277 for (int i = 0; i < 3; ++i) { | 1231 for (int i = 0; i < 3; ++i) { |
1278 SimulateRefreshFrameRequest(); | 1232 SimulateRefreshFrameRequest(); |
1279 ASSERT_NO_FATAL_FAILURE(client_observer()->WaitForNextColor(SK_ColorGREEN)); | 1233 ASSERT_NO_FATAL_FAILURE(client_observer()->WaitForNextColor(SK_ColorGREEN)); |
1280 } | 1234 } |
1281 | 1235 |
1282 device()->StopAndDeAllocate(); | 1236 device()->StopAndDeAllocate(); |
1283 } | 1237 } |
1284 | 1238 |
1285 } // namespace | 1239 } // namespace |
1286 } // namespace content | 1240 } // namespace content |
OLD | NEW |