Index: content/renderer/media/media_stream_audio_processor_unittest.cc |
diff --git a/content/renderer/media/media_stream_audio_processor_unittest.cc b/content/renderer/media/media_stream_audio_processor_unittest.cc |
index 0e40ece9e5c4982c0b745540fe0c90dbf90a641a..83f07f6f7fc5b5da73bc2e20d2d60ede6b75f6ca 100644 |
--- a/content/renderer/media/media_stream_audio_processor_unittest.cc |
+++ b/content/renderer/media/media_stream_audio_processor_unittest.cc |
@@ -26,6 +26,16 @@ using ::testing::AnyNumber; |
using ::testing::AtLeast; |
using ::testing::Return; |
+using media::AudioParameters; |
+ |
+namespace webrtc { |
+ |
+bool operator==(const webrtc::Point& lhs, const webrtc::Point& rhs) { |
+ return lhs.x() == rhs.x() && lhs.y() == rhs.y() && lhs.z() == rhs.z(); |
+} |
+ |
+} // namespace webrtc |
+ |
namespace content { |
namespace { |
@@ -133,12 +143,12 @@ class MediaStreamAudioProcessorTest : public ::testing::Test { |
EXPECT_NEAR(input_capture_delay.InMillisecondsF(), |
capture_delay.InMillisecondsF(), |
output_buffer_duration.InMillisecondsF()); |
- EXPECT_EQ(audio_processor->OutputFormat().sample_rate(), |
- expected_output_sample_rate); |
- EXPECT_EQ(audio_processor->OutputFormat().channels(), |
- expected_output_channels); |
- EXPECT_EQ(audio_processor->OutputFormat().frames_per_buffer(), |
- expected_output_buffer_size); |
+ EXPECT_EQ(expected_output_sample_rate, |
+ audio_processor->OutputFormat().sample_rate()); |
+ EXPECT_EQ(expected_output_channels, |
+ audio_processor->OutputFormat().channels()); |
+ EXPECT_EQ(expected_output_buffer_size, |
+ audio_processor->OutputFormat().frames_per_buffer()); |
} |
data_ptr += params.frames_per_buffer() * params.channels(); |
@@ -181,6 +191,7 @@ class MediaStreamAudioProcessorTest : public ::testing::Test { |
} |
media::AudioParameters params_; |
+ MediaStreamDevice::AudioDeviceParameters input_device_params_; |
}; |
// Test crashing with ASAN on Android. crbug.com/468762 |
@@ -195,7 +206,7 @@ TEST_F(MediaStreamAudioProcessorTest, MAYBE_WithAudioProcessing) { |
new WebRtcAudioDeviceImpl()); |
scoped_refptr<MediaStreamAudioProcessor> audio_processor( |
new rtc::RefCountedObject<MediaStreamAudioProcessor>( |
- constraint_factory.CreateWebMediaConstraints(), 0, |
+ constraint_factory.CreateWebMediaConstraints(), input_device_params_, |
webrtc_audio_device.get())); |
EXPECT_TRUE(audio_processor->has_audio_processing()); |
audio_processor->OnCaptureFormatChanged(params_); |
@@ -220,8 +231,8 @@ TEST_F(MediaStreamAudioProcessorTest, VerifyTabCaptureWithoutAudioProcessing) { |
tab_string); |
scoped_refptr<MediaStreamAudioProcessor> audio_processor( |
new rtc::RefCountedObject<MediaStreamAudioProcessor>( |
- tab_constraint_factory.CreateWebMediaConstraints(), 0, |
- webrtc_audio_device.get())); |
+ tab_constraint_factory.CreateWebMediaConstraints(), |
+ input_device_params_, webrtc_audio_device.get())); |
EXPECT_FALSE(audio_processor->has_audio_processing()); |
audio_processor->OnCaptureFormatChanged(params_); |
@@ -237,8 +248,8 @@ TEST_F(MediaStreamAudioProcessorTest, VerifyTabCaptureWithoutAudioProcessing) { |
system_constraint_factory.AddMandatory(kMediaStreamSource, |
system_string); |
audio_processor = new rtc::RefCountedObject<MediaStreamAudioProcessor>( |
- system_constraint_factory.CreateWebMediaConstraints(), 0, |
- webrtc_audio_device.get()); |
+ system_constraint_factory.CreateWebMediaConstraints(), |
+ input_device_params_, webrtc_audio_device.get()); |
EXPECT_FALSE(audio_processor->has_audio_processing()); |
// Set |audio_processor| to NULL to make sure |webrtc_audio_device| outlives |
@@ -254,7 +265,7 @@ TEST_F(MediaStreamAudioProcessorTest, TurnOffDefaultConstraints) { |
new WebRtcAudioDeviceImpl()); |
scoped_refptr<MediaStreamAudioProcessor> audio_processor( |
new rtc::RefCountedObject<MediaStreamAudioProcessor>( |
- constraint_factory.CreateWebMediaConstraints(), 0, |
+ constraint_factory.CreateWebMediaConstraints(), input_device_params_, |
webrtc_audio_device.get())); |
EXPECT_FALSE(audio_processor->has_audio_processing()); |
audio_processor->OnCaptureFormatChanged(params_); |
@@ -367,6 +378,67 @@ TEST_F(MediaStreamAudioProcessorTest, ValidateConstraints) { |
EXPECT_FALSE(audio_constraints.IsValid()); |
} |
+MediaAudioConstraints MakeMediaAudioConstraints( |
+ const MockMediaConstraintFactory& constraint_factory) { |
+ return MediaAudioConstraints(constraint_factory.CreateWebMediaConstraints(), |
+ AudioParameters::NO_EFFECTS); |
+} |
+ |
+TEST_F(MediaStreamAudioProcessorTest, SelectsConstraintsArrayGeometryIfExists) { |
+ std::vector<webrtc::Point> constraints_geometry(1, |
+ webrtc::Point(-0.02, 0, 0)); |
+ constraints_geometry.push_back(webrtc::Point(0.02, 0, 0)); |
+ |
+ std::vector<webrtc::Point> input_device_geometry(1, webrtc::Point(0, 0, 0)); |
+ input_device_geometry.push_back(webrtc::Point(0, 0.05f, 0)); |
+ |
+ { |
+ // Both geometries empty. |
+ MockMediaConstraintFactory constraint_factory; |
+ MediaStreamDevice::AudioDeviceParameters input_params; |
+ |
+ const auto& actual_geometry = GetArrayGeometryPreferringConstraints( |
+ MakeMediaAudioConstraints(constraint_factory), input_params); |
+ EXPECT_EQ(std::vector<webrtc::Point>(), actual_geometry); |
+ } |
+ { |
+ // Constraints geometry empty. |
+ MockMediaConstraintFactory constraint_factory; |
+ MediaStreamDevice::AudioDeviceParameters input_params; |
+ input_params.mic_positions.push_back(media::Point(0, 0, 0)); |
+ input_params.mic_positions.push_back(media::Point(0, 0.05f, 0)); |
+ |
+ const auto& actual_geometry = GetArrayGeometryPreferringConstraints( |
+ MakeMediaAudioConstraints(constraint_factory), input_params); |
+ EXPECT_EQ(input_device_geometry, actual_geometry); |
+ } |
+ { |
+ // Input device geometry empty. |
+ MockMediaConstraintFactory constraint_factory; |
+ constraint_factory.AddOptional(MediaAudioConstraints::kGoogArrayGeometry, |
+ std::string("-0.02 0 0 0.02 0 0")); |
+ MediaStreamDevice::AudioDeviceParameters input_params; |
+ |
+ const auto& actual_geometry = GetArrayGeometryPreferringConstraints( |
+ MakeMediaAudioConstraints(constraint_factory), input_params); |
+ EXPECT_EQ(constraints_geometry, actual_geometry); |
+ } |
+ { |
+ // Both geometries existing. |
+ MockMediaConstraintFactory constraint_factory; |
+ constraint_factory.AddOptional(MediaAudioConstraints::kGoogArrayGeometry, |
+ std::string("-0.02 0 0 0.02 0 0")); |
+ MediaStreamDevice::AudioDeviceParameters input_params; |
+ input_params.mic_positions.push_back(media::Point(0, 0, 0)); |
+ input_params.mic_positions.push_back(media::Point(0, 0.05f, 0)); |
+ |
+ // Constraints geometry is preferred. |
+ const auto& actual_geometry = GetArrayGeometryPreferringConstraints( |
+ MakeMediaAudioConstraints(constraint_factory), input_params); |
+ EXPECT_EQ(constraints_geometry, actual_geometry); |
+ } |
+} |
+ |
// Test crashing with ASAN on Android. crbug.com/468762 |
#if defined(OS_ANDROID) && defined(ADDRESS_SANITIZER) |
#define MAYBE_TestAllSampleRates DISABLED_TestAllSampleRates |
@@ -379,7 +451,7 @@ TEST_F(MediaStreamAudioProcessorTest, MAYBE_TestAllSampleRates) { |
new WebRtcAudioDeviceImpl()); |
scoped_refptr<MediaStreamAudioProcessor> audio_processor( |
new rtc::RefCountedObject<MediaStreamAudioProcessor>( |
- constraint_factory.CreateWebMediaConstraints(), 0, |
+ constraint_factory.CreateWebMediaConstraints(), input_device_params_, |
webrtc_audio_device.get())); |
EXPECT_TRUE(audio_processor->has_audio_processing()); |
@@ -420,7 +492,7 @@ TEST_F(MediaStreamAudioProcessorTest, GetAecDumpMessageFilter) { |
new WebRtcAudioDeviceImpl()); |
scoped_refptr<MediaStreamAudioProcessor> audio_processor( |
new rtc::RefCountedObject<MediaStreamAudioProcessor>( |
- constraint_factory.CreateWebMediaConstraints(), 0, |
+ constraint_factory.CreateWebMediaConstraints(), input_device_params_, |
webrtc_audio_device.get())); |
EXPECT_TRUE(audio_processor->aec_dump_message_filter_.get()); |
@@ -440,7 +512,7 @@ TEST_F(MediaStreamAudioProcessorTest, TestStereoAudio) { |
new WebRtcAudioDeviceImpl()); |
scoped_refptr<MediaStreamAudioProcessor> audio_processor( |
new rtc::RefCountedObject<MediaStreamAudioProcessor>( |
- constraint_factory.CreateWebMediaConstraints(), 0, |
+ constraint_factory.CreateWebMediaConstraints(), input_device_params_, |
webrtc_audio_device.get())); |
EXPECT_FALSE(audio_processor->has_audio_processing()); |
const media::AudioParameters source_params( |
@@ -504,7 +576,7 @@ TEST_F(MediaStreamAudioProcessorTest, MAYBE_TestWithKeyboardMicChannel) { |
new WebRtcAudioDeviceImpl()); |
scoped_refptr<MediaStreamAudioProcessor> audio_processor( |
new rtc::RefCountedObject<MediaStreamAudioProcessor>( |
- constraint_factory.CreateWebMediaConstraints(), 0, |
+ constraint_factory.CreateWebMediaConstraints(), input_device_params_, |
webrtc_audio_device.get())); |
EXPECT_TRUE(audio_processor->has_audio_processing()); |
@@ -522,35 +594,4 @@ TEST_F(MediaStreamAudioProcessorTest, MAYBE_TestWithKeyboardMicChannel) { |
audio_processor = NULL; |
} |
-using Point = webrtc::Point; |
-using PointVector = std::vector<Point>; |
- |
-void ExpectPointVectorEqual(const PointVector& expected, |
- const PointVector& actual) { |
- EXPECT_EQ(expected.size(), actual.size()); |
- for (size_t i = 0; i < actual.size(); ++i) { |
- EXPECT_EQ(expected[i].x(), actual[i].x()); |
- EXPECT_EQ(expected[i].y(), actual[i].y()); |
- EXPECT_EQ(expected[i].z(), actual[i].z()); |
- } |
-} |
- |
-TEST(MediaStreamAudioProcessorOptionsTest, ParseArrayGeometry) { |
- const PointVector expected_empty; |
- ExpectPointVectorEqual(expected_empty, ParseArrayGeometry("")); |
- ExpectPointVectorEqual(expected_empty, ParseArrayGeometry("0 0 a")); |
- ExpectPointVectorEqual(expected_empty, ParseArrayGeometry("1 2")); |
- ExpectPointVectorEqual(expected_empty, ParseArrayGeometry("1 2 3 4")); |
- |
- { |
- PointVector expected(1, Point(-0.02f, 0, 0)); |
- expected.push_back(Point(0.02f, 0, 0)); |
- ExpectPointVectorEqual(expected, ParseArrayGeometry("-0.02 0 0 0.02 0 0")); |
- } |
- { |
- PointVector expected(1, Point(1, 2, 3)); |
- ExpectPointVectorEqual(expected, ParseArrayGeometry("1 2 3")); |
- } |
-} |
- |
} // namespace content |