| OLD | NEW |
| 1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "base/android/build_info.h" | 5 #include "base/android/build_info.h" |
| 6 #include "base/basictypes.h" | 6 #include "base/basictypes.h" |
| 7 #include "base/bind.h" | 7 #include "base/bind.h" |
| 8 #include "base/files/file_util.h" | 8 #include "base/files/file_util.h" |
| 9 #include "base/memory/scoped_ptr.h" | 9 #include "base/memory/scoped_ptr.h" |
| 10 #include "base/message_loop/message_loop.h" | 10 #include "base/message_loop/message_loop.h" |
| (...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 165 : event_(event), pos_(0) { | 165 : event_(event), pos_(0) { |
| 166 // Reads a test file from media/test/data directory and stores it in | 166 // Reads a test file from media/test/data directory and stores it in |
| 167 // a DecoderBuffer. | 167 // a DecoderBuffer. |
| 168 file_ = ReadTestDataFile(name); | 168 file_ = ReadTestDataFile(name); |
| 169 | 169 |
| 170 // Log the name of the file which is used as input for this test. | 170 // Log the name of the file which is used as input for this test. |
| 171 base::FilePath file_path = GetTestDataFilePath(name); | 171 base::FilePath file_path = GetTestDataFilePath(name); |
| 172 DVLOG(0) << "Reading from file: " << file_path.value().c_str(); | 172 DVLOG(0) << "Reading from file: " << file_path.value().c_str(); |
| 173 } | 173 } |
| 174 | 174 |
| 175 virtual ~FileAudioSource() {} | 175 ~FileAudioSource() override {} |
| 176 | 176 |
| 177 // AudioOutputStream::AudioSourceCallback implementation. | 177 // AudioOutputStream::AudioSourceCallback implementation. |
| 178 | 178 |
| 179 // Use samples read from a data file and fill up the audio buffer | 179 // Use samples read from a data file and fill up the audio buffer |
| 180 // provided to us in the callback. | 180 // provided to us in the callback. |
| 181 virtual int OnMoreData(AudioBus* audio_bus, | 181 int OnMoreData(AudioBus* audio_bus, uint32 total_bytes_delay) override { |
| 182 uint32 total_bytes_delay) override { | |
| 183 bool stop_playing = false; | 182 bool stop_playing = false; |
| 184 int max_size = | 183 int max_size = |
| 185 audio_bus->frames() * audio_bus->channels() * kBytesPerSample; | 184 audio_bus->frames() * audio_bus->channels() * kBytesPerSample; |
| 186 | 185 |
| 187 // Adjust data size and prepare for end signal if file has ended. | 186 // Adjust data size and prepare for end signal if file has ended. |
| 188 if (pos_ + max_size > file_size()) { | 187 if (pos_ + max_size > file_size()) { |
| 189 stop_playing = true; | 188 stop_playing = true; |
| 190 max_size = file_size() - pos_; | 189 max_size = file_size() - pos_; |
| 191 } | 190 } |
| 192 | 191 |
| 193 // File data is stored as interleaved 16-bit values. Copy data samples from | 192 // File data is stored as interleaved 16-bit values. Copy data samples from |
| 194 // the file and deinterleave to match the audio bus format. | 193 // the file and deinterleave to match the audio bus format. |
| 195 // FromInterleaved() will zero out any unfilled frames when there is not | 194 // FromInterleaved() will zero out any unfilled frames when there is not |
| 196 // sufficient data remaining in the file to fill up the complete frame. | 195 // sufficient data remaining in the file to fill up the complete frame. |
| 197 int frames = max_size / (audio_bus->channels() * kBytesPerSample); | 196 int frames = max_size / (audio_bus->channels() * kBytesPerSample); |
| 198 if (max_size) { | 197 if (max_size) { |
| 199 audio_bus->FromInterleaved(file_->data() + pos_, frames, kBytesPerSample); | 198 audio_bus->FromInterleaved(file_->data() + pos_, frames, kBytesPerSample); |
| 200 pos_ += max_size; | 199 pos_ += max_size; |
| 201 } | 200 } |
| 202 | 201 |
| 203 // Set event to ensure that the test can stop when the file has ended. | 202 // Set event to ensure that the test can stop when the file has ended. |
| 204 if (stop_playing) | 203 if (stop_playing) |
| 205 event_->Signal(); | 204 event_->Signal(); |
| 206 | 205 |
| 207 return frames; | 206 return frames; |
| 208 } | 207 } |
| 209 | 208 |
| 210 virtual void OnError(AudioOutputStream* stream) override {} | 209 void OnError(AudioOutputStream* stream) override {} |
| 211 | 210 |
| 212 int file_size() { return file_->data_size(); } | 211 int file_size() { return file_->data_size(); } |
| 213 | 212 |
| 214 private: | 213 private: |
| 215 base::WaitableEvent* event_; | 214 base::WaitableEvent* event_; |
| 216 int pos_; | 215 int pos_; |
| 217 scoped_refptr<DecoderBuffer> file_; | 216 scoped_refptr<DecoderBuffer> file_; |
| 218 | 217 |
| 219 DISALLOW_COPY_AND_ASSIGN(FileAudioSource); | 218 DISALLOW_COPY_AND_ASSIGN(FileAudioSource); |
| 220 }; | 219 }; |
| (...skipping 15 matching lines...) Expand all Loading... |
| 236 | 235 |
| 237 // Open up the binary file which will be written to in the destructor. | 236 // Open up the binary file which will be written to in the destructor. |
| 238 base::FilePath file_path; | 237 base::FilePath file_path; |
| 239 EXPECT_TRUE(PathService::Get(base::DIR_SOURCE_ROOT, &file_path)); | 238 EXPECT_TRUE(PathService::Get(base::DIR_SOURCE_ROOT, &file_path)); |
| 240 file_path = file_path.AppendASCII(file_name.c_str()); | 239 file_path = file_path.AppendASCII(file_name.c_str()); |
| 241 binary_file_ = base::OpenFile(file_path, "wb"); | 240 binary_file_ = base::OpenFile(file_path, "wb"); |
| 242 DLOG_IF(ERROR, !binary_file_) << "Failed to open binary PCM data file."; | 241 DLOG_IF(ERROR, !binary_file_) << "Failed to open binary PCM data file."; |
| 243 DVLOG(0) << "Writing to file: " << file_path.value().c_str(); | 242 DVLOG(0) << "Writing to file: " << file_path.value().c_str(); |
| 244 } | 243 } |
| 245 | 244 |
| 246 virtual ~FileAudioSink() { | 245 ~FileAudioSink() override { |
| 247 int bytes_written = 0; | 246 int bytes_written = 0; |
| 248 while (bytes_written < buffer_->forward_capacity()) { | 247 while (bytes_written < buffer_->forward_capacity()) { |
| 249 const uint8* chunk; | 248 const uint8* chunk; |
| 250 int chunk_size; | 249 int chunk_size; |
| 251 | 250 |
| 252 // Stop writing if no more data is available. | 251 // Stop writing if no more data is available. |
| 253 if (!buffer_->GetCurrentChunk(&chunk, &chunk_size)) | 252 if (!buffer_->GetCurrentChunk(&chunk, &chunk_size)) |
| 254 break; | 253 break; |
| 255 | 254 |
| 256 // Write recorded data chunk to the file and prepare for next chunk. | 255 // Write recorded data chunk to the file and prepare for next chunk. |
| 257 // TODO(henrika): use file_util:: instead. | 256 // TODO(henrika): use file_util:: instead. |
| 258 fwrite(chunk, 1, chunk_size, binary_file_); | 257 fwrite(chunk, 1, chunk_size, binary_file_); |
| 259 buffer_->Seek(chunk_size); | 258 buffer_->Seek(chunk_size); |
| 260 bytes_written += chunk_size; | 259 bytes_written += chunk_size; |
| 261 } | 260 } |
| 262 base::CloseFile(binary_file_); | 261 base::CloseFile(binary_file_); |
| 263 } | 262 } |
| 264 | 263 |
| 265 // AudioInputStream::AudioInputCallback implementation. | 264 // AudioInputStream::AudioInputCallback implementation. |
| 266 virtual void OnData(AudioInputStream* stream, | 265 void OnData(AudioInputStream* stream, |
| 267 const AudioBus* src, | 266 const AudioBus* src, |
| 268 uint32 hardware_delay_bytes, | 267 uint32 hardware_delay_bytes, |
| 269 double volume) override { | 268 double volume) override { |
| 270 const int num_samples = src->frames() * src->channels(); | 269 const int num_samples = src->frames() * src->channels(); |
| 271 scoped_ptr<int16> interleaved(new int16[num_samples]); | 270 scoped_ptr<int16> interleaved(new int16[num_samples]); |
| 272 const int bytes_per_sample = sizeof(*interleaved); | 271 const int bytes_per_sample = sizeof(*interleaved); |
| 273 src->ToInterleaved(src->frames(), bytes_per_sample, interleaved.get()); | 272 src->ToInterleaved(src->frames(), bytes_per_sample, interleaved.get()); |
| 274 | 273 |
| 275 // Store data data in a temporary buffer to avoid making blocking | 274 // Store data data in a temporary buffer to avoid making blocking |
| 276 // fwrite() calls in the audio callback. The complete buffer will be | 275 // fwrite() calls in the audio callback. The complete buffer will be |
| 277 // written to file in the destructor. | 276 // written to file in the destructor. |
| 278 const int size = bytes_per_sample * num_samples; | 277 const int size = bytes_per_sample * num_samples; |
| 279 if (!buffer_->Append((const uint8*)interleaved.get(), size)) | 278 if (!buffer_->Append((const uint8*)interleaved.get(), size)) |
| 280 event_->Signal(); | 279 event_->Signal(); |
| 281 } | 280 } |
| 282 | 281 |
| 283 virtual void OnError(AudioInputStream* stream) override {} | 282 void OnError(AudioInputStream* stream) override {} |
| 284 | 283 |
| 285 private: | 284 private: |
| 286 base::WaitableEvent* event_; | 285 base::WaitableEvent* event_; |
| 287 AudioParameters params_; | 286 AudioParameters params_; |
| 288 scoped_ptr<media::SeekableBuffer> buffer_; | 287 scoped_ptr<media::SeekableBuffer> buffer_; |
| 289 FILE* binary_file_; | 288 FILE* binary_file_; |
| 290 | 289 |
| 291 DISALLOW_COPY_AND_ASSIGN(FileAudioSink); | 290 DISALLOW_COPY_AND_ASSIGN(FileAudioSink); |
| 292 }; | 291 }; |
| 293 | 292 |
| 294 // Implements AudioInputCallback and AudioSourceCallback to support full | 293 // Implements AudioInputCallback and AudioSourceCallback to support full |
| 295 // duplex audio where captured samples are played out in loopback after | 294 // duplex audio where captured samples are played out in loopback after |
| 296 // reading from a temporary FIFO storage. | 295 // reading from a temporary FIFO storage. |
| 297 class FullDuplexAudioSinkSource | 296 class FullDuplexAudioSinkSource |
| 298 : public AudioInputStream::AudioInputCallback, | 297 : public AudioInputStream::AudioInputCallback, |
| 299 public AudioOutputStream::AudioSourceCallback { | 298 public AudioOutputStream::AudioSourceCallback { |
| 300 public: | 299 public: |
| 301 explicit FullDuplexAudioSinkSource(const AudioParameters& params) | 300 explicit FullDuplexAudioSinkSource(const AudioParameters& params) |
| 302 : params_(params), | 301 : params_(params), |
| 303 previous_time_(base::TimeTicks::Now()), | 302 previous_time_(base::TimeTicks::Now()), |
| 304 started_(false) { | 303 started_(false) { |
| 305 // Start with a reasonably small FIFO size. It will be increased | 304 // Start with a reasonably small FIFO size. It will be increased |
| 306 // dynamically during the test if required. | 305 // dynamically during the test if required. |
| 307 fifo_.reset(new media::SeekableBuffer(0, 2 * params.GetBytesPerBuffer())); | 306 fifo_.reset(new media::SeekableBuffer(0, 2 * params.GetBytesPerBuffer())); |
| 308 buffer_.reset(new uint8[params_.GetBytesPerBuffer()]); | 307 buffer_.reset(new uint8[params_.GetBytesPerBuffer()]); |
| 309 } | 308 } |
| 310 | 309 |
| 311 virtual ~FullDuplexAudioSinkSource() {} | 310 ~FullDuplexAudioSinkSource() override {} |
| 312 | 311 |
| 313 // AudioInputStream::AudioInputCallback implementation | 312 // AudioInputStream::AudioInputCallback implementation |
| 314 virtual void OnData(AudioInputStream* stream, | 313 void OnData(AudioInputStream* stream, |
| 315 const AudioBus* src, | 314 const AudioBus* src, |
| 316 uint32 hardware_delay_bytes, | 315 uint32 hardware_delay_bytes, |
| 317 double volume) override { | 316 double volume) override { |
| 318 const base::TimeTicks now_time = base::TimeTicks::Now(); | 317 const base::TimeTicks now_time = base::TimeTicks::Now(); |
| 319 const int diff = (now_time - previous_time_).InMilliseconds(); | 318 const int diff = (now_time - previous_time_).InMilliseconds(); |
| 320 | 319 |
| 321 EXPECT_EQ(params_.bits_per_sample(), 16); | 320 EXPECT_EQ(params_.bits_per_sample(), 16); |
| 322 const int num_samples = src->frames() * src->channels(); | 321 const int num_samples = src->frames() * src->channels(); |
| 323 scoped_ptr<int16> interleaved(new int16[num_samples]); | 322 scoped_ptr<int16> interleaved(new int16[num_samples]); |
| 324 const int bytes_per_sample = sizeof(*interleaved); | 323 const int bytes_per_sample = sizeof(*interleaved); |
| 325 src->ToInterleaved(src->frames(), bytes_per_sample, interleaved.get()); | 324 src->ToInterleaved(src->frames(), bytes_per_sample, interleaved.get()); |
| 326 const int size = bytes_per_sample * num_samples; | 325 const int size = bytes_per_sample * num_samples; |
| 327 | 326 |
| (...skipping 16 matching lines...) Expand all Loading... |
| 344 return; | 343 return; |
| 345 | 344 |
| 346 // Append new data to the FIFO and extend the size if the max capacity | 345 // Append new data to the FIFO and extend the size if the max capacity |
| 347 // was exceeded. Flush the FIFO when extended just in case. | 346 // was exceeded. Flush the FIFO when extended just in case. |
| 348 if (!fifo_->Append((const uint8*)interleaved.get(), size)) { | 347 if (!fifo_->Append((const uint8*)interleaved.get(), size)) { |
| 349 fifo_->set_forward_capacity(2 * fifo_->forward_capacity()); | 348 fifo_->set_forward_capacity(2 * fifo_->forward_capacity()); |
| 350 fifo_->Clear(); | 349 fifo_->Clear(); |
| 351 } | 350 } |
| 352 } | 351 } |
| 353 | 352 |
| 354 virtual void OnError(AudioInputStream* stream) override {} | 353 void OnError(AudioInputStream* stream) override {} |
| 355 | 354 |
| 356 // AudioOutputStream::AudioSourceCallback implementation | 355 // AudioOutputStream::AudioSourceCallback implementation |
| 357 virtual int OnMoreData(AudioBus* dest, | 356 int OnMoreData(AudioBus* dest, uint32 total_bytes_delay) override { |
| 358 uint32 total_bytes_delay) override { | |
| 359 const int size_in_bytes = | 357 const int size_in_bytes = |
| 360 (params_.bits_per_sample() / 8) * dest->frames() * dest->channels(); | 358 (params_.bits_per_sample() / 8) * dest->frames() * dest->channels(); |
| 361 EXPECT_EQ(size_in_bytes, params_.GetBytesPerBuffer()); | 359 EXPECT_EQ(size_in_bytes, params_.GetBytesPerBuffer()); |
| 362 | 360 |
| 363 base::AutoLock lock(lock_); | 361 base::AutoLock lock(lock_); |
| 364 | 362 |
| 365 // We add an initial delay of ~1 second before loopback starts to ensure | 363 // We add an initial delay of ~1 second before loopback starts to ensure |
| 366 // a stable callback sequences and to avoid initial bursts which might add | 364 // a stable callback sequences and to avoid initial bursts which might add |
| 367 // to the extra FIFO delay. | 365 // to the extra FIFO delay. |
| 368 if (!started_) { | 366 if (!started_) { |
| 369 dest->Zero(); | 367 dest->Zero(); |
| 370 return dest->frames(); | 368 return dest->frames(); |
| 371 } | 369 } |
| 372 | 370 |
| 373 // Fill up destination with zeros if the FIFO does not contain enough | 371 // Fill up destination with zeros if the FIFO does not contain enough |
| 374 // data to fulfill the request. | 372 // data to fulfill the request. |
| 375 if (fifo_->forward_bytes() < size_in_bytes) { | 373 if (fifo_->forward_bytes() < size_in_bytes) { |
| 376 dest->Zero(); | 374 dest->Zero(); |
| 377 } else { | 375 } else { |
| 378 fifo_->Read(buffer_.get(), size_in_bytes); | 376 fifo_->Read(buffer_.get(), size_in_bytes); |
| 379 dest->FromInterleaved( | 377 dest->FromInterleaved( |
| 380 buffer_.get(), dest->frames(), params_.bits_per_sample() / 8); | 378 buffer_.get(), dest->frames(), params_.bits_per_sample() / 8); |
| 381 } | 379 } |
| 382 | 380 |
| 383 return dest->frames(); | 381 return dest->frames(); |
| 384 } | 382 } |
| 385 | 383 |
| 386 virtual void OnError(AudioOutputStream* stream) override {} | 384 void OnError(AudioOutputStream* stream) override {} |
| 387 | 385 |
| 388 private: | 386 private: |
| 389 // Converts from bytes to milliseconds given number of bytes and existing | 387 // Converts from bytes to milliseconds given number of bytes and existing |
| 390 // audio parameters. | 388 // audio parameters. |
| 391 double BytesToMilliseconds(int bytes) const { | 389 double BytesToMilliseconds(int bytes) const { |
| 392 const int frames = bytes / params_.GetBytesPerFrame(); | 390 const int frames = bytes / params_.GetBytesPerFrame(); |
| 393 return (base::TimeDelta::FromMicroseconds( | 391 return (base::TimeDelta::FromMicroseconds( |
| 394 frames * base::Time::kMicrosecondsPerSecond / | 392 frames * base::Time::kMicrosecondsPerSecond / |
| 395 static_cast<double>(params_.sample_rate()))).InMillisecondsF(); | 393 static_cast<double>(params_.sample_rate()))).InMillisecondsF(); |
| 396 } | 394 } |
| (...skipping 10 matching lines...) Expand all Loading... |
| 407 | 405 |
| 408 // Test fixture class for tests which only exercise the output path. | 406 // Test fixture class for tests which only exercise the output path. |
| 409 class AudioAndroidOutputTest : public testing::Test { | 407 class AudioAndroidOutputTest : public testing::Test { |
| 410 public: | 408 public: |
| 411 AudioAndroidOutputTest() | 409 AudioAndroidOutputTest() |
| 412 : loop_(new base::MessageLoopForUI()), | 410 : loop_(new base::MessageLoopForUI()), |
| 413 audio_manager_(AudioManager::CreateForTesting()), | 411 audio_manager_(AudioManager::CreateForTesting()), |
| 414 audio_output_stream_(NULL) { | 412 audio_output_stream_(NULL) { |
| 415 } | 413 } |
| 416 | 414 |
| 417 virtual ~AudioAndroidOutputTest() { | 415 ~AudioAndroidOutputTest() override {} |
| 418 } | |
| 419 | 416 |
| 420 protected: | 417 protected: |
| 421 AudioManager* audio_manager() { return audio_manager_.get(); } | 418 AudioManager* audio_manager() { return audio_manager_.get(); } |
| 422 base::MessageLoopForUI* loop() { return loop_.get(); } | 419 base::MessageLoopForUI* loop() { return loop_.get(); } |
| 423 const AudioParameters& audio_output_parameters() { | 420 const AudioParameters& audio_output_parameters() { |
| 424 return audio_output_parameters_; | 421 return audio_output_parameters_; |
| 425 } | 422 } |
| 426 | 423 |
| 427 // Synchronously runs the provided callback/closure on the audio thread. | 424 // Synchronously runs the provided callback/closure on the audio thread. |
| 428 void RunOnAudioThread(const base::Closure& closure) { | 425 void RunOnAudioThread(const base::Closure& closure) { |
| (...skipping 539 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 968 base::PlatformThread::Sleep(base::TimeDelta::FromSeconds(20)); | 965 base::PlatformThread::Sleep(base::TimeDelta::FromSeconds(20)); |
| 969 printf("\n"); | 966 printf("\n"); |
| 970 StopAndCloseAudioOutputStreamOnAudioThread(); | 967 StopAndCloseAudioOutputStreamOnAudioThread(); |
| 971 StopAndCloseAudioInputStreamOnAudioThread(); | 968 StopAndCloseAudioInputStreamOnAudioThread(); |
| 972 } | 969 } |
| 973 | 970 |
| 974 INSTANTIATE_TEST_CASE_P(AudioAndroidInputTest, AudioAndroidInputTest, | 971 INSTANTIATE_TEST_CASE_P(AudioAndroidInputTest, AudioAndroidInputTest, |
| 975 testing::ValuesIn(RunAudioRecordInputPathTests())); | 972 testing::ValuesIn(RunAudioRecordInputPathTests())); |
| 976 | 973 |
| 977 } // namespace media | 974 } // namespace media |
| OLD | NEW |