OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #define _USE_MATH_DEFINES // For VC++ to get M_PI. This has to be first. | 5 #define _USE_MATH_DEFINES // For VC++ to get M_PI. This has to be first. |
6 | 6 |
7 #include <utility> | 7 #include <utility> |
8 | 8 |
9 #include "base/bind.h" | 9 #include "base/bind.h" |
10 #include "base/macros.h" | 10 #include "base/macros.h" |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
69 | 69 |
70 class TestScreenCapturer : public webrtc::DesktopCapturer { | 70 class TestScreenCapturer : public webrtc::DesktopCapturer { |
71 public: | 71 public: |
72 TestScreenCapturer() {} | 72 TestScreenCapturer() {} |
73 ~TestScreenCapturer() override {} | 73 ~TestScreenCapturer() override {} |
74 | 74 |
75 // webrtc::DesktopCapturer interface. | 75 // webrtc::DesktopCapturer interface. |
76 void Start(Callback* callback) override { | 76 void Start(Callback* callback) override { |
77 callback_ = callback; | 77 callback_ = callback; |
78 } | 78 } |
| 79 |
79 void CaptureFrame() override { | 80 void CaptureFrame() override { |
| 81 if (capture_request_index_to_fail_ >= 0) { |
| 82 capture_request_index_to_fail_--; |
| 83 if (capture_request_index_to_fail_ < 0) { |
| 84 callback_->OnCaptureResult( |
| 85 webrtc::DesktopCapturer::Result::ERROR_TEMPORARY, nullptr); |
| 86 return; |
| 87 } |
| 88 } |
| 89 |
80 // Return black 100x100 frame. | 90 // Return black 100x100 frame. |
81 std::unique_ptr<webrtc::DesktopFrame> frame( | 91 std::unique_ptr<webrtc::DesktopFrame> frame( |
82 new webrtc::BasicDesktopFrame(webrtc::DesktopSize(100, 100))); | 92 new webrtc::BasicDesktopFrame(webrtc::DesktopSize(100, 100))); |
83 memset(frame->data(), 0, frame->stride() * frame->size().height()); | 93 memset(frame->data(), frame_index_, |
84 | 94 frame->stride() * frame->size().height()); |
85 // Set updated_region only for the first frame, as the frame content | 95 frame_index_++; |
86 // doesn't change. | 96 frame->mutable_updated_region()->SetRect( |
87 if (!first_frame_sent_) { | 97 webrtc::DesktopRect::MakeSize(frame->size())); |
88 first_frame_sent_ = true; | |
89 frame->mutable_updated_region()->SetRect( | |
90 webrtc::DesktopRect::MakeSize(frame->size())); | |
91 } | |
92 | 98 |
93 callback_->OnCaptureResult(webrtc::DesktopCapturer::Result::SUCCESS, | 99 callback_->OnCaptureResult(webrtc::DesktopCapturer::Result::SUCCESS, |
94 std::move(frame)); | 100 std::move(frame)); |
95 } | 101 } |
96 | 102 |
97 bool GetSourceList(SourceList* sources) override { | 103 bool GetSourceList(SourceList* sources) override { |
98 return true; | 104 return true; |
99 } | 105 } |
100 | 106 |
101 bool SelectSource(SourceId id) override { | 107 bool SelectSource(SourceId id) override { |
102 return true; | 108 return true; |
103 } | 109 } |
104 | 110 |
| 111 void FailNthFrame(int n) { capture_request_index_to_fail_ = n; } |
| 112 |
105 private: | 113 private: |
106 Callback* callback_ = nullptr; | 114 Callback* callback_ = nullptr; |
107 bool first_frame_sent_ = false; | 115 int frame_index_ = 0; |
| 116 |
| 117 int capture_request_index_to_fail_ = -1; |
108 }; | 118 }; |
109 | 119 |
110 static const int kAudioSampleRate = AudioPacket::SAMPLING_RATE_48000; | 120 static const int kAudioSampleRate = AudioPacket::SAMPLING_RATE_48000; |
111 static const int kAudioPacketDurationMs = 50; | 121 static const int kAudioPacketDurationMs = 50; |
112 static constexpr int kSamplesPerAudioPacket = | 122 static constexpr int kSamplesPerAudioPacket = |
113 kAudioSampleRate * kAudioPacketDurationMs / | 123 kAudioSampleRate * kAudioPacketDurationMs / |
114 base::Time::kMillisecondsPerSecond; | 124 base::Time::kMillisecondsPerSecond; |
115 static constexpr base::TimeDelta kAudioPacketDuration = | 125 static constexpr base::TimeDelta kAudioPacketDuration = |
116 base::TimeDelta::FromMilliseconds(kAudioPacketDurationMs); | 126 base::TimeDelta::FromMilliseconds(kAudioPacketDurationMs); |
117 | 127 |
(...skipping 221 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
339 if (client_connected_ && run_loop_) | 349 if (client_connected_ && run_loop_) |
340 run_loop_->Quit(); | 350 run_loop_->Quit(); |
341 } | 351 } |
342 | 352 |
343 void OnClientConnected() { | 353 void OnClientConnected() { |
344 client_connected_ = true; | 354 client_connected_ = true; |
345 if (host_connected_ && run_loop_) | 355 if (host_connected_ && run_loop_) |
346 run_loop_->Quit(); | 356 run_loop_->Quit(); |
347 } | 357 } |
348 | 358 |
349 void WaitFirstVideoFrame() { | 359 void WaitNextVideoFrame() { |
| 360 size_t received_frames = |
| 361 is_using_webrtc() |
| 362 ? client_video_renderer_.GetFrameConsumer() |
| 363 ->received_frames() |
| 364 .size() |
| 365 : client_video_renderer_.GetVideoStub()->received_packets().size(); |
| 366 |
350 base::RunLoop run_loop; | 367 base::RunLoop run_loop; |
351 | 368 |
352 // Expect frames to be passed to FrameConsumer when WebRTC is used, or to | 369 // Expect frames to be passed to FrameConsumer when WebRTC is used, or to |
353 // VideoStub otherwise. | 370 // VideoStub otherwise. |
354 if (is_using_webrtc()) { | 371 if (is_using_webrtc()) { |
355 client_video_renderer_.GetFrameConsumer()->set_on_frame_callback( | 372 client_video_renderer_.GetFrameConsumer()->set_on_frame_callback( |
356 base::Bind(&base::RunLoop::Quit, base::Unretained(&run_loop))); | 373 base::Bind(&base::RunLoop::Quit, base::Unretained(&run_loop))); |
357 } else { | 374 } else { |
358 client_video_renderer_.GetVideoStub()->set_on_frame_callback( | 375 client_video_renderer_.GetVideoStub()->set_on_frame_callback( |
359 base::Bind(&base::RunLoop::Quit, base::Unretained(&run_loop))); | 376 base::Bind(&base::RunLoop::Quit, base::Unretained(&run_loop))); |
360 } | 377 } |
361 | 378 |
362 run_loop.Run(); | 379 run_loop.Run(); |
363 | 380 |
364 if (is_using_webrtc()) { | 381 if (is_using_webrtc()) { |
365 EXPECT_EQ( | 382 EXPECT_EQ( |
366 client_video_renderer_.GetFrameConsumer()->received_frames().size(), | 383 client_video_renderer_.GetFrameConsumer()->received_frames().size(), |
367 1U); | 384 received_frames + 1); |
368 EXPECT_EQ( | 385 EXPECT_EQ( |
369 client_video_renderer_.GetVideoStub()->received_packets().size(), 0U); | 386 client_video_renderer_.GetVideoStub()->received_packets().size(), 0U); |
370 client_video_renderer_.GetFrameConsumer()->set_on_frame_callback( | 387 client_video_renderer_.GetFrameConsumer()->set_on_frame_callback( |
371 base::Closure()); | 388 base::Closure()); |
372 } else { | 389 } else { |
373 EXPECT_EQ( | 390 EXPECT_EQ( |
374 client_video_renderer_.GetFrameConsumer()->received_frames().size(), | 391 client_video_renderer_.GetFrameConsumer()->received_frames().size(), |
375 0U); | 392 0U); |
376 EXPECT_EQ( | 393 EXPECT_EQ( |
377 client_video_renderer_.GetVideoStub()->received_packets().size(), 1U); | 394 client_video_renderer_.GetVideoStub()->received_packets().size(), |
| 395 received_frames + 1); |
378 client_video_renderer_.GetVideoStub()->set_on_frame_callback( | 396 client_video_renderer_.GetVideoStub()->set_on_frame_callback( |
379 base::Closure()); | 397 base::Closure()); |
380 } | 398 } |
381 } | 399 } |
382 | 400 |
383 void WaitFirstFrameStats() { | 401 void WaitFirstFrameStats() { |
384 if (!client_video_renderer_.GetFrameStatsConsumer() | 402 if (!client_video_renderer_.GetFrameStatsConsumer() |
385 ->received_stats() | 403 ->received_stats() |
386 .empty()) { | 404 .empty()) { |
387 return; | 405 return; |
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
487 run_loop.Run(); | 505 run_loop.Run(); |
488 } | 506 } |
489 | 507 |
490 TEST_P(ConnectionTest, Video) { | 508 TEST_P(ConnectionTest, Video) { |
491 Connect(); | 509 Connect(); |
492 | 510 |
493 std::unique_ptr<VideoStream> video_stream = | 511 std::unique_ptr<VideoStream> video_stream = |
494 host_connection_->StartVideoStream( | 512 host_connection_->StartVideoStream( |
495 base::MakeUnique<TestScreenCapturer>()); | 513 base::MakeUnique<TestScreenCapturer>()); |
496 | 514 |
497 WaitFirstVideoFrame(); | 515 WaitNextVideoFrame(); |
498 } | 516 } |
499 | 517 |
500 // Verifies that the VideoStream doesn't loose any video frames while the | 518 // Verifies that the VideoStream doesn't loose any video frames while the |
501 // connection is being established. | 519 // connection is being established. |
502 TEST_P(ConnectionTest, VideoWithSlowSignaling) { | 520 TEST_P(ConnectionTest, VideoWithSlowSignaling) { |
503 // Add signaling delay to slow down connection handshake. | 521 // Add signaling delay to slow down connection handshake. |
504 host_session_->set_signaling_delay(base::TimeDelta::FromMilliseconds(100)); | 522 host_session_->set_signaling_delay(base::TimeDelta::FromMilliseconds(100)); |
505 client_session_->set_signaling_delay(base::TimeDelta::FromMilliseconds(100)); | 523 client_session_->set_signaling_delay(base::TimeDelta::FromMilliseconds(100)); |
506 | 524 |
507 Connect(); | 525 Connect(); |
508 | 526 |
509 std::unique_ptr<VideoStream> video_stream = | 527 std::unique_ptr<VideoStream> video_stream = |
510 host_connection_->StartVideoStream( | 528 host_connection_->StartVideoStream( |
511 base::WrapUnique(new TestScreenCapturer())); | 529 base::WrapUnique(new TestScreenCapturer())); |
512 | 530 |
513 WaitFirstVideoFrame(); | 531 WaitNextVideoFrame(); |
514 } | 532 } |
515 | 533 |
516 TEST_P(ConnectionTest, DestroyOnIncomingMessage) { | 534 TEST_P(ConnectionTest, DestroyOnIncomingMessage) { |
517 Connect(); | 535 Connect(); |
518 | 536 |
519 KeyEvent event; | 537 KeyEvent event; |
520 event.set_usb_keycode(3); | 538 event.set_usb_keycode(3); |
521 event.set_pressed(true); | 539 event.set_pressed(true); |
522 | 540 |
523 base::RunLoop run_loop; | 541 base::RunLoop run_loop; |
(...skipping 23 matching lines...) Expand all Loading... |
547 scoped_refptr<InputEventTimestampsSourceImpl> input_event_timestamps_source = | 565 scoped_refptr<InputEventTimestampsSourceImpl> input_event_timestamps_source = |
548 new InputEventTimestampsSourceImpl(); | 566 new InputEventTimestampsSourceImpl(); |
549 input_event_timestamps_source->OnEventReceived( | 567 input_event_timestamps_source->OnEventReceived( |
550 InputEventTimestamps{event_timestamp, start_time}); | 568 InputEventTimestamps{event_timestamp, start_time}); |
551 | 569 |
552 std::unique_ptr<VideoStream> video_stream = | 570 std::unique_ptr<VideoStream> video_stream = |
553 host_connection_->StartVideoStream( | 571 host_connection_->StartVideoStream( |
554 base::MakeUnique<TestScreenCapturer>()); | 572 base::MakeUnique<TestScreenCapturer>()); |
555 video_stream->SetEventTimestampsSource(input_event_timestamps_source); | 573 video_stream->SetEventTimestampsSource(input_event_timestamps_source); |
556 | 574 |
557 WaitFirstVideoFrame(); | 575 WaitNextVideoFrame(); |
558 | 576 |
559 base::TimeTicks finish_time = base::TimeTicks::Now(); | 577 base::TimeTicks finish_time = base::TimeTicks::Now(); |
560 | 578 |
561 WaitFirstFrameStats(); | 579 WaitFirstFrameStats(); |
562 | 580 |
563 const FrameStats& stats = | 581 const FrameStats& stats = |
564 client_video_renderer_.GetFrameStatsConsumer()->received_stats().front(); | 582 client_video_renderer_.GetFrameStatsConsumer()->received_stats().front(); |
565 | 583 |
566 EXPECT_GT(stats.host_stats.frame_size, 0); | 584 EXPECT_GT(stats.host_stats.frame_size, 0); |
567 | 585 |
(...skipping 22 matching lines...) Expand all Loading... |
590 Connect(); | 608 Connect(); |
591 | 609 |
592 std::unique_ptr<AudioStream> audio_stream = | 610 std::unique_ptr<AudioStream> audio_stream = |
593 host_connection_->StartAudioStream(base::MakeUnique<TestAudioSource>()); | 611 host_connection_->StartAudioStream(base::MakeUnique<TestAudioSource>()); |
594 | 612 |
595 // Wait for 1 second worth of audio samples. | 613 // Wait for 1 second worth of audio samples. |
596 client_audio_player_.WaitForSamples(kAudioSampleRate * 2); | 614 client_audio_player_.WaitForSamples(kAudioSampleRate * 2); |
597 client_audio_player_.Verify(); | 615 client_audio_player_.Verify(); |
598 } | 616 } |
599 | 617 |
| 618 TEST_P(ConnectionTest, FirstCaptureFailed) { |
| 619 Connect(); |
| 620 |
| 621 base::TimeTicks event_timestamp = base::TimeTicks::FromInternalValue(42); |
| 622 |
| 623 scoped_refptr<InputEventTimestampsSourceImpl> input_event_timestamps_source = |
| 624 new InputEventTimestampsSourceImpl(); |
| 625 input_event_timestamps_source->OnEventReceived( |
| 626 InputEventTimestamps{event_timestamp, base::TimeTicks::Now()}); |
| 627 |
| 628 auto capturer = base::MakeUnique<TestScreenCapturer>(); |
| 629 capturer->FailNthFrame(0); |
| 630 auto video_stream = host_connection_->StartVideoStream(std::move(capturer)); |
| 631 video_stream->SetEventTimestampsSource(input_event_timestamps_source); |
| 632 |
| 633 WaitNextVideoFrame(); |
| 634 |
| 635 // Currently stats work in this test only for WebRTC because for ICE |
| 636 // connections stats are reported by SoftwareVideoRenderer which is not used |
| 637 // in this test. |
| 638 // TODO(sergeyu): Fix this. |
| 639 if (is_using_webrtc()) { |
| 640 WaitFirstFrameStats(); |
| 641 |
| 642 // Verify that the event timestamp received before the first frame gets used |
| 643 // for the second frame. |
| 644 const FrameStats& stats = client_video_renderer_.GetFrameStatsConsumer() |
| 645 ->received_stats() |
| 646 .front(); |
| 647 EXPECT_EQ(event_timestamp, stats.host_stats.latest_event_timestamp); |
| 648 } |
| 649 } |
| 650 |
| 651 TEST_P(ConnectionTest, SecondCaptureFailed) { |
| 652 Connect(); |
| 653 |
| 654 auto capturer = base::MakeUnique<TestScreenCapturer>(); |
| 655 capturer->FailNthFrame(1); |
| 656 auto video_stream = host_connection_->StartVideoStream(std::move(capturer)); |
| 657 |
| 658 WaitNextVideoFrame(); |
| 659 WaitNextVideoFrame(); |
| 660 } |
| 661 |
600 } // namespace protocol | 662 } // namespace protocol |
601 } // namespace remoting | 663 } // namespace remoting |
OLD | NEW |