| Index: chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc
|
| diff --git a/chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc b/chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc
|
| index 84004a87363f4e98677a2e428dd06da6e1509fab..3ec3bf7872b99648d4a9efb06009017d8c988017 100644
|
| --- a/chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc
|
| +++ b/chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc
|
| @@ -20,18 +20,15 @@
|
| #include "base/threading/thread.h"
|
| #include "base/time/time.h"
|
| #include "chromecast/base/task_runner_impl.h"
|
| +#include "chromecast/media/cma/base/cast_decoder_buffer_impl.h"
|
| #include "chromecast/media/cma/base/decoder_buffer_adapter.h"
|
| #include "chromecast/media/cma/base/decoder_config_adapter.h"
|
| #include "chromecast/media/cma/test/frame_segmenter_for_test.h"
|
| -#include "chromecast/media/cma/test/media_component_device_feeder_for_test.h"
|
| #include "chromecast/public/cast_media_shlib.h"
|
| -#include "chromecast/public/media/audio_pipeline_device.h"
|
| #include "chromecast/public/media/cast_decoder_buffer.h"
|
| #include "chromecast/public/media/decoder_config.h"
|
| -#include "chromecast/public/media/media_clock_device.h"
|
| #include "chromecast/public/media/media_pipeline_backend.h"
|
| #include "chromecast/public/media/media_pipeline_device_params.h"
|
| -#include "chromecast/public/media/video_pipeline_device.h"
|
| #include "media/base/audio_decoder_config.h"
|
| #include "media/base/decoder_buffer.h"
|
| #include "media/base/video_decoder_config.h"
|
| @@ -42,9 +39,6 @@ namespace media {
|
|
|
| namespace {
|
|
|
| -typedef ScopedVector<MediaComponentDeviceFeederForTest>::iterator
|
| - ComponentDeviceIterator;
|
| -
|
| const base::TimeDelta kMonitorLoopDelay = base::TimeDelta::FromMilliseconds(20);
|
|
|
| base::FilePath GetTestDataFilePath(const std::string& name) {
|
| @@ -59,7 +53,8 @@ base::FilePath GetTestDataFilePath(const std::string& name) {
|
|
|
| } // namespace
|
|
|
| -class AudioVideoPipelineDeviceTest : public testing::Test {
|
| +class AudioVideoPipelineDeviceTest : public testing::Test,
|
| + public MediaPipelineBackend::Delegate {
|
| public:
|
| struct PauseInfo {
|
| PauseInfo() {}
|
| @@ -96,25 +91,30 @@ class AudioVideoPipelineDeviceTest : public testing::Test {
|
|
|
| void Start();
|
|
|
| + // MediaPipelineBackend::Delegate implementation:
|
| + void OnVideoResolutionChanged(MediaPipelineBackend::Decoder* decoder,
|
| + const Size& size) override {}
|
| + void OnPushBufferComplete(MediaPipelineBackend::Decoder* decoder,
|
| + MediaPipelineBackend::BufferStatus status) override;
|
| + void OnEndOfStream(MediaPipelineBackend::Decoder* decoder) override;
|
| +
|
| private:
|
| void Initialize();
|
|
|
| void LoadAudioStream(const std::string& filename);
|
| void LoadVideoStream(const std::string& filename, bool raw_h264);
|
|
|
| + void FeedAudioBuffer();
|
| + void FeedVideoBuffer();
|
| +
|
| void MonitorLoop();
|
|
|
| void OnPauseCompleted();
|
|
|
| - void OnEos(MediaComponentDeviceFeederForTest* device_feeder);
|
| -
|
| scoped_ptr<TaskRunnerImpl> task_runner_;
|
| scoped_ptr<MediaPipelineBackend> backend_;
|
| - MediaClockDevice* media_clock_device_;
|
| -
|
| - // Devices to feed
|
| - ScopedVector<MediaComponentDeviceFeederForTest>
|
| - component_device_feeders_;
|
| + scoped_ptr<CastDecoderBufferImpl> backend_audio_buffer_;
|
| + scoped_ptr<CastDecoderBufferImpl> backend_video_buffer_;
|
|
|
| // Current media time.
|
| base::TimeDelta pause_time_;
|
| @@ -123,11 +123,23 @@ class AudioVideoPipelineDeviceTest : public testing::Test {
|
| std::vector<PauseInfo> pause_pattern_;
|
| int pause_pattern_idx_;
|
|
|
| + BufferList audio_buffers_;
|
| + BufferList video_buffers_;
|
| +
|
| + MediaPipelineBackend::AudioDecoder* audio_decoder_;
|
| + MediaPipelineBackend::VideoDecoder* video_decoder_;
|
| + bool audio_feeding_completed_;
|
| + bool video_feeding_completed_;
|
| +
|
| DISALLOW_COPY_AND_ASSIGN(AudioVideoPipelineDeviceTest);
|
| };
|
|
|
| AudioVideoPipelineDeviceTest::AudioVideoPipelineDeviceTest()
|
| - : pause_pattern_() {
|
| + : pause_pattern_(),
|
| + audio_decoder_(nullptr),
|
| + video_decoder_(nullptr),
|
| + audio_feeding_completed_(true),
|
| + video_feeding_completed_(true) {
|
| }
|
|
|
| AudioVideoPipelineDeviceTest::~AudioVideoPipelineDeviceTest() {
|
| @@ -147,6 +159,8 @@ void AudioVideoPipelineDeviceTest::ConfigureForAudioOnly(
|
| const std::string& filename) {
|
| Initialize();
|
| LoadAudioStream(filename);
|
| + bool success = backend_->Initialize(this);
|
| + ASSERT_TRUE(success);
|
| }
|
|
|
| void AudioVideoPipelineDeviceTest::ConfigureForVideoOnly(
|
| @@ -154,6 +168,8 @@ void AudioVideoPipelineDeviceTest::ConfigureForVideoOnly(
|
| bool raw_h264) {
|
| Initialize();
|
| LoadVideoStream(filename, raw_h264);
|
| + bool success = backend_->Initialize(this);
|
| + ASSERT_TRUE(success);
|
| }
|
|
|
| void AudioVideoPipelineDeviceTest::ConfigureForFile(
|
| @@ -161,38 +177,32 @@ void AudioVideoPipelineDeviceTest::ConfigureForFile(
|
| Initialize();
|
| LoadVideoStream(filename, false /* raw_h264 */);
|
| LoadAudioStream(filename);
|
| + bool success = backend_->Initialize(this);
|
| + ASSERT_TRUE(success);
|
| }
|
|
|
| void AudioVideoPipelineDeviceTest::LoadAudioStream(
|
| const std::string& filename) {
|
| base::FilePath file_path = GetTestDataFilePath(filename);
|
| DemuxResult demux_result = FFmpegDemuxForTest(file_path, true /* audio */);
|
| - BufferList frames = demux_result.frames;
|
|
|
| - AudioPipelineDevice* audio_pipeline_device = backend_->GetAudio();
|
| + audio_buffers_ = demux_result.frames;
|
| + audio_decoder_ = backend_->CreateAudioDecoder();
|
| + audio_feeding_completed_ = false;
|
|
|
| - bool success = audio_pipeline_device->SetConfig(
|
| - DecoderConfigAdapter::ToCastAudioConfig(kPrimary,
|
| - demux_result.audio_config));
|
| + bool success =
|
| + audio_decoder_->SetConfig(DecoderConfigAdapter::ToCastAudioConfig(
|
| + kPrimary, demux_result.audio_config));
|
| ASSERT_TRUE(success);
|
|
|
| - VLOG(2) << "Got " << frames.size() << " audio input frames";
|
| -
|
| - frames.push_back(
|
| - scoped_refptr<DecoderBufferBase>(
|
| - new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer())));
|
| + VLOG(2) << "Got " << audio_buffers_.size() << " audio input frames";
|
|
|
| - MediaComponentDeviceFeederForTest* device_feeder =
|
| - new MediaComponentDeviceFeederForTest(audio_pipeline_device, frames);
|
| - device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos,
|
| - base::Unretained(this),
|
| - device_feeder));
|
| - component_device_feeders_.push_back(device_feeder);
|
| + audio_buffers_.push_back(scoped_refptr<DecoderBufferBase>(
|
| + new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer())));
|
| }
|
|
|
| void AudioVideoPipelineDeviceTest::LoadVideoStream(const std::string& filename,
|
| bool raw_h264) {
|
| - BufferList frames;
|
| VideoConfig video_config;
|
|
|
| if (raw_h264) {
|
| @@ -200,7 +210,8 @@ void AudioVideoPipelineDeviceTest::LoadVideoStream(const std::string& filename,
|
| base::MemoryMappedFile video_stream;
|
| ASSERT_TRUE(video_stream.Initialize(file_path))
|
| << "Couldn't open stream file: " << file_path.MaybeAsASCII();
|
| - frames = H264SegmenterForTest(video_stream.data(), video_stream.length());
|
| + video_buffers_ =
|
| + H264SegmenterForTest(video_stream.data(), video_stream.length());
|
|
|
| // TODO(erickung): Either pull data from stream or make caller specify value
|
| video_config.codec = kCodecH264;
|
| @@ -211,59 +222,162 @@ void AudioVideoPipelineDeviceTest::LoadVideoStream(const std::string& filename,
|
| base::FilePath file_path = GetTestDataFilePath(filename);
|
| DemuxResult demux_result = FFmpegDemuxForTest(file_path,
|
| /*audio*/ false);
|
| - frames = demux_result.frames;
|
| + video_buffers_ = demux_result.frames;
|
| video_config = DecoderConfigAdapter::ToCastVideoConfig(
|
| kPrimary, demux_result.video_config);
|
| }
|
|
|
| - VideoPipelineDevice* video_pipeline_device = backend_->GetVideo();
|
| -
|
| - // Set configuration.
|
| - bool success = video_pipeline_device->SetConfig(video_config);
|
| + video_decoder_ = backend_->CreateVideoDecoder();
|
| + video_feeding_completed_ = false;
|
| + bool success = video_decoder_->SetConfig(video_config);
|
| ASSERT_TRUE(success);
|
|
|
| - VLOG(2) << "Got " << frames.size() << " video input frames";
|
| + VLOG(2) << "Got " << video_buffers_.size() << " video input frames";
|
| +
|
| + video_buffers_.push_back(scoped_refptr<DecoderBufferBase>(
|
| + new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer())));
|
| +}
|
| +
|
| +void AudioVideoPipelineDeviceTest::FeedAudioBuffer() {
|
| + // Possibly feed one frame
|
| + DCHECK(!audio_buffers_.empty());
|
| + if (audio_feeding_completed_)
|
| + return;
|
| +
|
| + scoped_refptr<DecoderBufferBase> buffer = audio_buffers_.front();
|
| + if (backend_audio_buffer_)
|
| + backend_audio_buffer_->set_buffer(buffer);
|
| + else
|
| + backend_audio_buffer_.reset(new CastDecoderBufferImpl(buffer));
|
| +
|
| + MediaPipelineBackend::BufferStatus status =
|
| + audio_decoder_->PushBuffer(nullptr, // decrypt_context
|
| + backend_audio_buffer_.get());
|
| + EXPECT_NE(status, MediaPipelineBackend::kBufferFailed);
|
| + audio_buffers_.pop_front();
|
| +
|
| + // Feeding is done, just wait for the end of stream callback.
|
| + if (buffer->end_of_stream() || audio_buffers_.empty()) {
|
| + if (audio_buffers_.empty() && !buffer->end_of_stream()) {
|
| + LOG(WARNING) << "Stream emptied without feeding EOS frame";
|
| + }
|
| +
|
| + audio_feeding_completed_ = true;
|
| + return;
|
| + }
|
| +
|
| + if (status == MediaPipelineBackend::kBufferPending)
|
| + return;
|
| +
|
| + OnPushBufferComplete(audio_decoder_, MediaPipelineBackend::kBufferSuccess);
|
| +}
|
| +
|
| +void AudioVideoPipelineDeviceTest::FeedVideoBuffer() {
|
| + // Possibly feed one frame
|
| + DCHECK(!video_buffers_.empty());
|
| + if (video_feeding_completed_)
|
| + return;
|
| +
|
| + scoped_refptr<DecoderBufferBase> buffer = video_buffers_.front();
|
| + if (backend_video_buffer_)
|
| + backend_video_buffer_->set_buffer(buffer);
|
| + else
|
| + backend_video_buffer_.reset(new CastDecoderBufferImpl(buffer));
|
| +
|
| + MediaPipelineBackend::BufferStatus status =
|
| + video_decoder_->PushBuffer(nullptr, // decrypt_context
|
| + backend_video_buffer_.get());
|
| + EXPECT_NE(status, MediaPipelineBackend::kBufferFailed);
|
| + video_buffers_.pop_front();
|
| +
|
| + // Feeding is done, just wait for the end of stream callback.
|
| + if (buffer->end_of_stream() || video_buffers_.empty()) {
|
| + if (video_buffers_.empty() && !buffer->end_of_stream()) {
|
| + LOG(WARNING) << "Stream emptied without feeding EOS frame";
|
| + }
|
| +
|
| + video_feeding_completed_ = true;
|
| + return;
|
| + }
|
|
|
| - frames.push_back(
|
| - scoped_refptr<DecoderBufferBase>(new DecoderBufferAdapter(
|
| - ::media::DecoderBuffer::CreateEOSBuffer())));
|
| + if (status == MediaPipelineBackend::kBufferPending)
|
| + return;
|
|
|
| - MediaComponentDeviceFeederForTest* device_feeder =
|
| - new MediaComponentDeviceFeederForTest(video_pipeline_device, frames);
|
| - device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos,
|
| - base::Unretained(this),
|
| - device_feeder));
|
| - component_device_feeders_.push_back(device_feeder);
|
| + OnPushBufferComplete(video_decoder_, MediaPipelineBackend::kBufferSuccess);
|
| }
|
|
|
| void AudioVideoPipelineDeviceTest::Start() {
|
| pause_time_ = base::TimeDelta();
|
| pause_pattern_idx_ = 0;
|
|
|
| - for (size_t i = 0; i < component_device_feeders_.size(); i++) {
|
| + if (audio_decoder_) {
|
| base::ThreadTaskRunnerHandle::Get()->PostTask(
|
| - FROM_HERE, base::Bind(&MediaComponentDeviceFeederForTest::Feed,
|
| - base::Unretained(component_device_feeders_[i])));
|
| + FROM_HERE,
|
| + base::Bind(&AudioVideoPipelineDeviceTest::FeedAudioBuffer,
|
| + base::Unretained(this)));
|
| + }
|
| + if (video_decoder_) {
|
| + base::ThreadTaskRunnerHandle::Get()->PostTask(
|
| + FROM_HERE,
|
| + base::Bind(&AudioVideoPipelineDeviceTest::FeedVideoBuffer,
|
| + base::Unretained(this)));
|
| }
|
|
|
| - media_clock_device_->SetState(MediaClockDevice::kStateRunning);
|
| + backend_->Start(0);
|
|
|
| base::ThreadTaskRunnerHandle::Get()->PostTask(
|
| FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop,
|
| base::Unretained(this)));
|
| }
|
|
|
| +void AudioVideoPipelineDeviceTest::OnEndOfStream(
|
| + MediaPipelineBackend::Decoder* decoder) {
|
| + bool success = backend_->Stop();
|
| + ASSERT_TRUE(success);
|
| +
|
| + if (decoder == audio_decoder_)
|
| + audio_decoder_ = nullptr;
|
| + else if (decoder == video_decoder_)
|
| + video_decoder_ = nullptr;
|
| +
|
| + if (!audio_decoder_ && !video_decoder_)
|
| + base::MessageLoop::current()->QuitWhenIdle();
|
| +}
|
| +
|
| +void AudioVideoPipelineDeviceTest::OnPushBufferComplete(
|
| + MediaPipelineBackend::Decoder* decoder,
|
| + MediaPipelineBackend::BufferStatus status) {
|
| + EXPECT_NE(status, MediaPipelineBackend::kBufferFailed);
|
| +
|
| + if (decoder == audio_decoder_) {
|
| + if (audio_feeding_completed_)
|
| + return;
|
| +
|
| + base::ThreadTaskRunnerHandle::Get()->PostTask(
|
| + FROM_HERE,
|
| + base::Bind(&AudioVideoPipelineDeviceTest::FeedAudioBuffer,
|
| + base::Unretained(this)));
|
| + } else if (decoder == video_decoder_) {
|
| + if (video_feeding_completed_)
|
| + return;
|
| +
|
| + base::ThreadTaskRunnerHandle::Get()->PostTask(
|
| + FROM_HERE,
|
| + base::Bind(&AudioVideoPipelineDeviceTest::FeedVideoBuffer,
|
| + base::Unretained(this)));
|
| + }
|
| +}
|
| +
|
| void AudioVideoPipelineDeviceTest::MonitorLoop() {
|
| - base::TimeDelta media_time = base::TimeDelta::FromMicroseconds(
|
| - media_clock_device_->GetTimeMicroseconds());
|
| + base::TimeDelta media_time =
|
| + base::TimeDelta::FromMicroseconds(backend_->GetCurrentPts());
|
|
|
| if (!pause_pattern_.empty() &&
|
| pause_pattern_[pause_pattern_idx_].delay >= base::TimeDelta() &&
|
| media_time >= pause_time_ + pause_pattern_[pause_pattern_idx_].delay) {
|
| // Do Pause
|
| - media_clock_device_->SetRate(0.0);
|
| - pause_time_ = base::TimeDelta::FromMicroseconds(
|
| - media_clock_device_->GetTimeMicroseconds());
|
| + backend_->Pause();
|
| + pause_time_ = base::TimeDelta::FromMicroseconds(backend_->GetCurrentPts());
|
|
|
| VLOG(2) << "Pausing at " << pause_time_.InMilliseconds() << "ms for " <<
|
| pause_pattern_[pause_pattern_idx_].length.InMilliseconds() << "ms";
|
| @@ -285,8 +399,8 @@ void AudioVideoPipelineDeviceTest::MonitorLoop() {
|
|
|
| void AudioVideoPipelineDeviceTest::OnPauseCompleted() {
|
| // Make sure the media time didn't move during that time.
|
| - base::TimeDelta media_time = base::TimeDelta::FromMicroseconds(
|
| - media_clock_device_->GetTimeMicroseconds());
|
| + base::TimeDelta media_time =
|
| + base::TimeDelta::FromMicroseconds(backend_->GetCurrentPts());
|
|
|
| // TODO(damienv):
|
| // Should be:
|
| @@ -303,41 +417,16 @@ void AudioVideoPipelineDeviceTest::OnPauseCompleted() {
|
| VLOG(2) << "Pause complete, restarting media clock";
|
|
|
| // Resume playback and frame feeding.
|
| - media_clock_device_->SetRate(1.0);
|
| + backend_->Resume();
|
|
|
| MonitorLoop();
|
| }
|
|
|
| -void AudioVideoPipelineDeviceTest::OnEos(
|
| - MediaComponentDeviceFeederForTest* device_feeder) {
|
| - for (ComponentDeviceIterator it = component_device_feeders_.begin();
|
| - it != component_device_feeders_.end();
|
| - ++it) {
|
| - if (*it == device_feeder) {
|
| - component_device_feeders_.erase(it);
|
| - break;
|
| - }
|
| - }
|
| -
|
| - // Check if all streams finished
|
| - if (component_device_feeders_.empty())
|
| - base::MessageLoop::current()->QuitWhenIdle();
|
| -}
|
| -
|
| void AudioVideoPipelineDeviceTest::Initialize() {
|
| // Create the media device.
|
| task_runner_.reset(new TaskRunnerImpl());
|
| MediaPipelineDeviceParams params(task_runner_.get());
|
| backend_.reset(CastMediaShlib::CreateMediaPipelineBackend(params));
|
| - media_clock_device_ = backend_->GetClock();
|
| -
|
| - // Clock initialization and configuration.
|
| - bool success =
|
| - media_clock_device_->SetState(MediaClockDevice::kStateIdle);
|
| - ASSERT_TRUE(success);
|
| - success = media_clock_device_->ResetTimeline(0);
|
| - ASSERT_TRUE(success);
|
| - media_clock_device_->SetRate(1.0);
|
| }
|
|
|
| TEST_F(AudioVideoPipelineDeviceTest, Mp3Playback) {
|
|
|