| Index: content/renderer/media/webrtc_audio_device_unittest.cc
|
| diff --git a/content/renderer/media/webrtc_audio_device_unittest.cc b/content/renderer/media/webrtc_audio_device_unittest.cc
|
| index 2f1a21098390e2b2b22fc2447fd5e6999b1d33f6..f18ad13fe139d1f27b963fa1bdcfdb6e35bd0c23 100644
|
| --- a/content/renderer/media/webrtc_audio_device_unittest.cc
|
| +++ b/content/renderer/media/webrtc_audio_device_unittest.cc
|
| @@ -24,6 +24,8 @@ using testing::StrEq;
|
|
|
| namespace {
|
|
|
| +static const int kRenderViewId = -2;
|
| +
|
| ACTION_P(QuitMessageLoop, loop_or_proxy) {
|
| loop_or_proxy->PostTask(FROM_HERE, MessageLoop::QuitClosure());
|
| }
|
| @@ -265,13 +267,15 @@ TEST_F(WebRTCAudioDeviceTest, StartPlayout) {
|
| return;
|
|
|
| EXPECT_CALL(media_observer(),
|
| - OnSetAudioStreamStatus(_, 1, StrEq("created"))).Times(1);
|
| + OnSetAudioStreamStatus(_, kRenderViewId,
|
| + 1, StrEq("created"))).Times(1);
|
| EXPECT_CALL(media_observer(),
|
| - OnSetAudioStreamPlaying(_, 1, true)).Times(1);
|
| + OnSetAudioStreamPlaying(_, kRenderViewId, 1, true)).Times(1);
|
| EXPECT_CALL(media_observer(),
|
| - OnSetAudioStreamStatus(_, 1, StrEq("closed"))).Times(1);
|
| + OnSetAudioStreamStatus(_, kRenderViewId,
|
| + 1, StrEq("closed"))).Times(1);
|
| EXPECT_CALL(media_observer(),
|
| - OnDeleteAudioStream(_, 1)).Times(AnyNumber());
|
| + OnDeleteAudioStream(_, kRenderViewId, 1)).Times(AnyNumber());
|
|
|
| scoped_refptr<WebRtcAudioDeviceImpl> webrtc_audio_device(
|
| new WebRtcAudioDeviceImpl(0));
|
| @@ -409,13 +413,16 @@ TEST_F(WebRTCAudioDeviceTest, PlayLocalFile) {
|
| return;
|
|
|
| EXPECT_CALL(media_observer(),
|
| - OnSetAudioStreamStatus(_, 1, StrEq("created"))).Times(1);
|
| + OnSetAudioStreamStatus( _, kRenderViewId,
|
| + 1, StrEq("created"))).Times(1);
|
| EXPECT_CALL(media_observer(),
|
| - OnSetAudioStreamPlaying(_, 1, true)).Times(1);
|
| + OnSetAudioStreamPlaying(_, kRenderViewId,
|
| + 1, true)).Times(1);
|
| EXPECT_CALL(media_observer(),
|
| - OnSetAudioStreamStatus(_, 1, StrEq("closed"))).Times(1);
|
| + OnSetAudioStreamStatus(_, kRenderViewId,
|
| + 1, StrEq("closed"))).Times(1);
|
| EXPECT_CALL(media_observer(),
|
| - OnDeleteAudioStream(_, 1)).Times(AnyNumber());
|
| + OnDeleteAudioStream(_, kRenderViewId, 1)).Times(AnyNumber());
|
|
|
| scoped_refptr<WebRtcAudioDeviceImpl> webrtc_audio_device(
|
| new WebRtcAudioDeviceImpl(0));
|
| @@ -477,13 +484,13 @@ TEST_F(WebRTCAudioDeviceTest, FullDuplexAudioWithAGC) {
|
| return;
|
|
|
| EXPECT_CALL(media_observer(),
|
| - OnSetAudioStreamStatus(_, 1, StrEq("created")));
|
| + OnSetAudioStreamStatus(_, kRenderViewId, 1, StrEq("created")));
|
| EXPECT_CALL(media_observer(),
|
| - OnSetAudioStreamPlaying(_, 1, true));
|
| + OnSetAudioStreamPlaying(_, kRenderViewId, 1, true));
|
| EXPECT_CALL(media_observer(),
|
| - OnSetAudioStreamStatus(_, 1, StrEq("closed")));
|
| + OnSetAudioStreamStatus(_, kRenderViewId, 1, StrEq("closed")));
|
| EXPECT_CALL(media_observer(),
|
| - OnDeleteAudioStream(_, 1)).Times(AnyNumber());
|
| + OnDeleteAudioStream(_, kRenderViewId, 1)).Times(AnyNumber());
|
|
|
| scoped_refptr<WebRtcAudioDeviceImpl> webrtc_audio_device(
|
| new WebRtcAudioDeviceImpl(0));
|
|
|