OLD | NEW |
---|---|
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "base/environment.h" | 5 #include "base/environment.h" |
6 #include "base/test/test_timeouts.h" | 6 #include "base/test/test_timeouts.h" |
7 #include "content/renderer/media/webrtc_audio_capturer.h" | 7 #include "content/renderer/media/webrtc_audio_capturer.h" |
8 #include "content/renderer/media/webrtc_audio_device_impl.h" | 8 #include "content/renderer/media/webrtc_audio_device_impl.h" |
9 #include "content/renderer/media/webrtc_audio_renderer.h" | 9 #include "content/renderer/media/webrtc_audio_renderer.h" |
10 #include "content/renderer/render_thread_impl.h" | 10 #include "content/renderer/render_thread_impl.h" |
(...skipping 304 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
315 | 315 |
316 // Verify that a call to webrtc::VoEBase::StartRecording() starts audio input | 316 // Verify that a call to webrtc::VoEBase::StartRecording() starts audio input |
317 // with the correct set of parameters. A WebRtcAudioDeviceImpl instance will | 317 // with the correct set of parameters. A WebRtcAudioDeviceImpl instance will |
318 // be utilized to implement the actual audio path. The test registers a | 318 // be utilized to implement the actual audio path. The test registers a |
319 // webrtc::VoEExternalMedia implementation to hijack the input audio and | 319 // webrtc::VoEExternalMedia implementation to hijack the input audio and |
320 // verify that streaming starts correctly. An external transport implementation | 320 // verify that streaming starts correctly. An external transport implementation |
321 // is also required to ensure that "sending" can start without actually trying | 321 // is also required to ensure that "sending" can start without actually trying |
322 // to send encoded packets to the network. Our main interest here is to ensure | 322 // to send encoded packets to the network. Our main interest here is to ensure |
323 // that the audio capturing starts as it should. | 323 // that the audio capturing starts as it should. |
324 // Disabled when running headless since the bots don't have the required config. | 324 // Disabled when running headless since the bots don't have the required config. |
325 TEST_F(WebRTCAudioDeviceTest, StartRecording) { | 325 |
326 // TODO(leozwang): Because ExternalMediaProcessing is disabled in webrtc, | |
327 // disable this unit test on Android for now. | |
328 #if defined(OS_ANDROID) | |
329 #define MAYBE_StartRecording DISABLED_StartRecording | |
330 #else | |
331 #define MAYBE_StartRecording StartRecording | |
332 #endif | |
333 TEST_F(WebRTCAudioDeviceTest, MAYBE_StartRecording) { | |
326 if (!has_input_devices_ || !has_output_devices_) { | 334 if (!has_input_devices_ || !has_output_devices_) { |
327 LOG(WARNING) << "Missing audio devices."; | 335 LOG(WARNING) << "Missing audio devices."; |
328 return; | 336 return; |
329 } | 337 } |
330 | 338 |
331 scoped_ptr<media::AudioHardwareConfig> config = CreateRealHardwareConfig(); | 339 scoped_ptr<media::AudioHardwareConfig> config = CreateRealHardwareConfig(); |
332 SetAudioHardwareConfig(config.get()); | 340 SetAudioHardwareConfig(config.get()); |
333 | 341 |
334 if (!HardwareSampleRatesAreValid()) | 342 if (!HardwareSampleRatesAreValid()) |
335 return; | 343 return; |
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
459 } | 467 } |
460 | 468 |
461 // Uses WebRtcAudioDeviceImpl to play out recorded audio in loopback. | 469 // Uses WebRtcAudioDeviceImpl to play out recorded audio in loopback. |
462 // An external transport implementation is utilized to feed back RTP packets | 470 // An external transport implementation is utilized to feed back RTP packets |
463 // which are recorded, encoded, packetized into RTP packets and finally | 471 // which are recorded, encoded, packetized into RTP packets and finally |
464 // "transmitted". The RTP packets are then fed back into the VoiceEngine | 472 // "transmitted". The RTP packets are then fed back into the VoiceEngine |
465 // where they are decoded and played out on the default audio output device. | 473 // where they are decoded and played out on the default audio output device. |
466 // Disabled when running headless since the bots don't have the required config. | 474 // Disabled when running headless since the bots don't have the required config. |
467 // TODO(henrika): improve quality by using a wideband codec, enabling noise- | 475 // TODO(henrika): improve quality by using a wideband codec, enabling noise- |
468 // suppressions etc. | 476 // suppressions etc. |
469 TEST_F(WebRTCAudioDeviceTest, FullDuplexAudioWithAGC) { | 477 // FullDuplexAudioWithAGC is flanky on Android, disable it for now. |
wjia(left Chromium)
2013/02/12 02:34:24
nit: flaky
| |
478 #if defined(OS_ANDROID) | |
479 #define MAYBE_FullDuplexAudioWithAGC DISABLED_FullDuplexAudioWithAGC | |
480 #else | |
481 #define MAYBE_FullDuplexAudioWithAGC FullDuplexAudioWithAGC | |
482 #endif | |
483 TEST_F(WebRTCAudioDeviceTest, MAYBE_FullDuplexAudioWithAGC) { | |
470 if (!has_output_devices_ || !has_input_devices_) { | 484 if (!has_output_devices_ || !has_input_devices_) { |
471 LOG(WARNING) << "Missing audio devices."; | 485 LOG(WARNING) << "Missing audio devices."; |
472 return; | 486 return; |
473 } | 487 } |
474 | 488 |
475 scoped_ptr<media::AudioHardwareConfig> config = CreateRealHardwareConfig(); | 489 scoped_ptr<media::AudioHardwareConfig> config = CreateRealHardwareConfig(); |
476 SetAudioHardwareConfig(config.get()); | 490 SetAudioHardwareConfig(config.get()); |
477 | 491 |
478 if (!HardwareSampleRatesAreValid()) | 492 if (!HardwareSampleRatesAreValid()) |
479 return; | 493 return; |
(...skipping 18 matching lines...) Expand all Loading... | |
498 | 512 |
499 ScopedWebRTCPtr<webrtc::VoEBase> base(engine.get()); | 513 ScopedWebRTCPtr<webrtc::VoEBase> base(engine.get()); |
500 ASSERT_TRUE(base.valid()); | 514 ASSERT_TRUE(base.valid()); |
501 int err = base->Init(webrtc_audio_device); | 515 int err = base->Init(webrtc_audio_device); |
502 ASSERT_EQ(0, err); | 516 ASSERT_EQ(0, err); |
503 | 517 |
504 EXPECT_TRUE(InitializeCapturer(webrtc_audio_device.get())); | 518 EXPECT_TRUE(InitializeCapturer(webrtc_audio_device.get())); |
505 | 519 |
506 ScopedWebRTCPtr<webrtc::VoEAudioProcessing> audio_processing(engine.get()); | 520 ScopedWebRTCPtr<webrtc::VoEAudioProcessing> audio_processing(engine.get()); |
507 ASSERT_TRUE(audio_processing.valid()); | 521 ASSERT_TRUE(audio_processing.valid()); |
522 #if defined(OS_ANDROID) | |
523 // On Android, by default AGC is off. | |
524 bool enabled = true; | |
525 webrtc::AgcModes agc_mode = webrtc::kAgcDefault; | |
526 EXPECT_EQ(0, audio_processing->GetAgcStatus(enabled, agc_mode)); | |
527 EXPECT_FALSE(enabled); | |
528 #else | |
508 bool enabled = false; | 529 bool enabled = false; |
509 webrtc::AgcModes agc_mode = webrtc::kAgcDefault; | 530 webrtc::AgcModes agc_mode = webrtc::kAgcDefault; |
510 EXPECT_EQ(0, audio_processing->GetAgcStatus(enabled, agc_mode)); | 531 EXPECT_EQ(0, audio_processing->GetAgcStatus(enabled, agc_mode)); |
511 EXPECT_TRUE(enabled); | 532 EXPECT_TRUE(enabled); |
512 EXPECT_EQ(agc_mode, webrtc::kAgcAdaptiveAnalog); | 533 EXPECT_EQ(agc_mode, webrtc::kAgcAdaptiveAnalog); |
534 #endif | |
513 | 535 |
514 int ch = base->CreateChannel(); | 536 int ch = base->CreateChannel(); |
515 EXPECT_NE(-1, ch); | 537 EXPECT_NE(-1, ch); |
516 | 538 |
517 ScopedWebRTCPtr<webrtc::VoENetwork> network(engine.get()); | 539 ScopedWebRTCPtr<webrtc::VoENetwork> network(engine.get()); |
518 ASSERT_TRUE(network.valid()); | 540 ASSERT_TRUE(network.valid()); |
519 scoped_ptr<WebRTCTransportImpl> transport( | 541 scoped_ptr<WebRTCTransportImpl> transport( |
520 new WebRTCTransportImpl(network.get())); | 542 new WebRTCTransportImpl(network.get())); |
521 EXPECT_EQ(0, network->RegisterExternalTransport(ch, *transport.get())); | 543 EXPECT_EQ(0, network->RegisterExternalTransport(ch, *transport.get())); |
522 EXPECT_EQ(0, base->StartPlayout(ch)); | 544 EXPECT_EQ(0, base->StartPlayout(ch)); |
523 EXPECT_EQ(0, base->StartSend(ch)); | 545 EXPECT_EQ(0, base->StartSend(ch)); |
524 renderer->Play(); | 546 renderer->Play(); |
525 | 547 |
526 LOG(INFO) << ">> You should now be able to hear yourself in loopback..."; | 548 LOG(INFO) << ">> You should now be able to hear yourself in loopback..."; |
527 message_loop_.PostDelayedTask(FROM_HERE, | 549 message_loop_.PostDelayedTask(FROM_HERE, |
528 MessageLoop::QuitClosure(), | 550 MessageLoop::QuitClosure(), |
529 base::TimeDelta::FromSeconds(2)); | 551 base::TimeDelta::FromSeconds(2)); |
530 message_loop_.Run(); | 552 message_loop_.Run(); |
531 | 553 |
532 renderer->Stop(); | 554 renderer->Stop(); |
533 EXPECT_EQ(0, base->StopSend(ch)); | 555 EXPECT_EQ(0, base->StopSend(ch)); |
534 EXPECT_EQ(0, base->StopPlayout(ch)); | 556 EXPECT_EQ(0, base->StopPlayout(ch)); |
535 | 557 |
536 EXPECT_EQ(0, base->DeleteChannel(ch)); | 558 EXPECT_EQ(0, base->DeleteChannel(ch)); |
537 EXPECT_EQ(0, base->Terminate()); | 559 EXPECT_EQ(0, base->Terminate()); |
538 } | 560 } |
539 | 561 |
540 } // namespace content | 562 } // namespace content |
OLD | NEW |