| Index: chromecast/media/cma/pipeline/audio_video_pipeline_impl_unittest.cc
|
| diff --git a/chromecast/media/cma/pipeline/audio_video_pipeline_impl_unittest.cc b/chromecast/media/cma/pipeline/audio_video_pipeline_impl_unittest.cc
|
| index ab32e0566364ae90820621c84befc25409629046..1722b1c062e627252ce9ef6e3fff5989bdac7ae0 100644
|
| --- a/chromecast/media/cma/pipeline/audio_video_pipeline_impl_unittest.cc
|
| +++ b/chromecast/media/cma/pipeline/audio_video_pipeline_impl_unittest.cc
|
| @@ -9,23 +9,28 @@
|
| #include "base/memory/ptr_util.h"
|
| #include "base/message_loop/message_loop.h"
|
| #include "base/run_loop.h"
|
| -#include "base/single_thread_task_runner.h"
|
| +#include "base/threading/thread_task_runner_handle.h"
|
| #include "chromecast/media/base/decrypt_context_impl.h"
|
| #include "chromecast/media/cdm/cast_cdm_context.h"
|
| -#include "chromecast/media/cma/backend/audio_decoder_default.h"
|
| -#include "chromecast/media/cma/backend/media_pipeline_backend_default.h"
|
| -#include "chromecast/media/cma/backend/video_decoder_default.h"
|
| #include "chromecast/media/cma/pipeline/av_pipeline_client.h"
|
| #include "chromecast/media/cma/pipeline/media_pipeline_impl.h"
|
| #include "chromecast/media/cma/pipeline/video_pipeline_client.h"
|
| #include "chromecast/media/cma/test/frame_generator_for_test.h"
|
| #include "chromecast/media/cma/test/mock_frame_provider.h"
|
| +#include "chromecast/media/cma/test/mock_media_pipeline_backend.h"
|
| +#include "chromecast/public/media/cast_decoder_buffer.h"
|
| #include "media/base/audio_decoder_config.h"
|
| #include "media/base/media_util.h"
|
| #include "media/base/video_decoder_config.h"
|
| #include "media/cdm/player_tracker_impl.h"
|
| #include "testing/gtest/include/gtest/gtest.h"
|
|
|
| +using testing::_;
|
| +using testing::Invoke;
|
| +using testing::NiceMock;
|
| +using testing::Return;
|
| +using testing::SaveArg;
|
| +
|
| namespace {
|
| // Total number of frames generated by CodedFrameProvider.
|
| // The first frame has config, while the last one is EOS.
|
| @@ -38,6 +43,18 @@ const int kLastFrameTimestamp = (kNumFrames - 2) * kFrameDurationUs;
|
| namespace chromecast {
|
| namespace media {
|
|
|
| +ACTION_P2(PushBuffer, delegate, buffer_pts) {
|
| + if (arg0->end_of_stream()) {
|
| + base::ThreadTaskRunnerHandle::Get()->PostTask(
|
| + FROM_HERE,
|
| + base::Bind(&MediaPipelineBackend::Decoder::Delegate::OnEndOfStream,
|
| + base::Unretained(*delegate)));
|
| + } else {
|
| + *buffer_pts = arg0->timestamp();
|
| + }
|
| + return MediaPipelineBackend::kBufferSuccess;
|
| +}
|
| +
|
| class CastCdmContextForTest : public CastCdmContext {
|
| public:
|
| CastCdmContextForTest() : license_installed_(false) {}
|
| @@ -82,26 +99,35 @@ class CastCdmContextForTest : public CastCdmContext {
|
| // etc in a simple API for tests to use.
|
| class PipelineHelper {
|
| public:
|
| - PipelineHelper(scoped_refptr<base::SingleThreadTaskRunner> task_runner,
|
| - bool audio,
|
| - bool video,
|
| - bool encrypted)
|
| + enum Stream { STREAM_AUDIO, STREAM_VIDEO };
|
| +
|
| + PipelineHelper(bool audio, bool video, bool encrypted)
|
| : have_audio_(audio),
|
| have_video_(video),
|
| encrypted_(encrypted),
|
| - pipeline_backend_(nullptr) {
|
| - eos_[STREAM_AUDIO] = eos_[STREAM_VIDEO] = false;
|
| - }
|
| + pipeline_backend_(nullptr),
|
| + audio_decoder_delegate_(nullptr),
|
| + video_decoder_delegate_(nullptr) {}
|
|
|
| void Setup() {
|
| if (encrypted_) {
|
| cdm_context_.reset(new CastCdmContextForTest());
|
| }
|
| - std::unique_ptr<MediaPipelineBackendDefault> backend =
|
| - base::MakeUnique<MediaPipelineBackendDefault>();
|
| - pipeline_backend_ = backend.get();
|
| +
|
| + pipeline_backend_ = new MockMediaPipelineBackend();
|
| + ON_CALL(*pipeline_backend_, SetPlaybackRate(_)).WillByDefault(Return(true));
|
| + ON_CALL(audio_decoder_, SetConfig(_)).WillByDefault(Return(true));
|
| + ON_CALL(audio_decoder_, PushBuffer(_))
|
| + .WillByDefault(PushBuffer(&audio_decoder_delegate_,
|
| + &last_push_pts_[STREAM_AUDIO]));
|
| + ON_CALL(video_decoder_, SetConfig(_)).WillByDefault(Return(true));
|
| + ON_CALL(video_decoder_, PushBuffer(_))
|
| + .WillByDefault(PushBuffer(&video_decoder_delegate_,
|
| + &last_push_pts_[STREAM_VIDEO]));
|
| +
|
| media_pipeline_ = base::MakeUnique<MediaPipelineImpl>();
|
| - media_pipeline_->Initialize(kLoadTypeURL, std::move(backend));
|
| + media_pipeline_->Initialize(kLoadTypeURL,
|
| + base::WrapUnique(pipeline_backend_));
|
|
|
| if (have_audio_) {
|
| ::media::AudioDecoderConfig audio_config(
|
| @@ -111,6 +137,12 @@ class PipelineHelper {
|
| AvPipelineClient client;
|
| client.eos_cb = base::Bind(&PipelineHelper::OnEos, base::Unretained(this),
|
| STREAM_AUDIO);
|
| + EXPECT_CALL(*pipeline_backend_, CreateAudioDecoder())
|
| + .Times(1)
|
| + .WillOnce(Return(&audio_decoder_));
|
| + EXPECT_CALL(audio_decoder_, SetDelegate(_))
|
| + .Times(1)
|
| + .WillOnce(SaveArg<0>(&audio_decoder_delegate_));
|
| ::media::PipelineStatus status = media_pipeline_->InitializeAudio(
|
| audio_config, client, CreateFrameProvider());
|
| ASSERT_EQ(::media::PIPELINE_OK, status);
|
| @@ -125,6 +157,12 @@ class PipelineHelper {
|
| VideoPipelineClient client;
|
| client.av_pipeline_client.eos_cb = base::Bind(
|
| &PipelineHelper::OnEos, base::Unretained(this), STREAM_VIDEO);
|
| + EXPECT_CALL(*pipeline_backend_, CreateVideoDecoder())
|
| + .Times(1)
|
| + .WillOnce(Return(&video_decoder_));
|
| + EXPECT_CALL(video_decoder_, SetDelegate(_))
|
| + .Times(1)
|
| + .WillOnce(SaveArg<0>(&video_decoder_delegate_));
|
| ::media::PipelineStatus status = media_pipeline_->InitializeVideo(
|
| video_configs, client, CreateFrameProvider());
|
| ASSERT_EQ(::media::PIPELINE_OK, status);
|
| @@ -135,12 +173,24 @@ class PipelineHelper {
|
| eos_cb_ = eos_cb;
|
| eos_[STREAM_AUDIO] = !media_pipeline_->HasAudio();
|
| eos_[STREAM_VIDEO] = !media_pipeline_->HasVideo();
|
| - base::TimeDelta start_time = base::TimeDelta::FromMilliseconds(0);
|
| - media_pipeline_->StartPlayingFrom(start_time);
|
| - media_pipeline_->SetPlaybackRate(1.0);
|
| + last_push_pts_[STREAM_AUDIO] = std::numeric_limits<int64_t>::min();
|
| + last_push_pts_[STREAM_VIDEO] = std::numeric_limits<int64_t>::min();
|
| + int64_t start_pts = 0;
|
| +
|
| + EXPECT_CALL(*pipeline_backend_, Initialize())
|
| + .Times(1)
|
| + .WillOnce(Return(true));
|
| + EXPECT_CALL(*pipeline_backend_, Start(start_pts))
|
| + .Times(1)
|
| + .WillOnce(Return(true));
|
| +
|
| + media_pipeline_->StartPlayingFrom(
|
| + base::TimeDelta::FromMilliseconds(start_pts));
|
| + media_pipeline_->SetPlaybackRate(1.0f);
|
| }
|
| void SetCdm() { media_pipeline_->SetCdm(cdm_context_.get()); }
|
| void Flush(const base::Closure& flush_cb) {
|
| + EXPECT_CALL(*pipeline_backend_, Stop()).Times(1);
|
| media_pipeline_->Flush(flush_cb);
|
| }
|
| void Stop() {
|
| @@ -149,11 +199,11 @@ class PipelineHelper {
|
| }
|
| void SetCdmLicenseInstalled() { cdm_context_->SetLicenseInstalled(); }
|
|
|
| - MediaPipelineBackendDefault* pipeline_backend() { return pipeline_backend_; }
|
| + bool have_audio() const { return have_audio_; }
|
| + bool have_video() const { return have_video_; }
|
| + int64_t last_push_pts(Stream stream) const { return last_push_pts_[stream]; }
|
|
|
| private:
|
| - enum Stream { STREAM_AUDIO, STREAM_VIDEO };
|
| -
|
| std::unique_ptr<CodedFrameProvider> CreateFrameProvider() {
|
| std::vector<FrameGeneratorForTest::FrameSpec> frame_specs;
|
| frame_specs.resize(kNumFrames);
|
| @@ -189,10 +239,15 @@ class PipelineHelper {
|
| bool have_video_;
|
| bool encrypted_;
|
| bool eos_[2];
|
| + int64_t last_push_pts_[2];
|
| base::Closure eos_cb_;
|
| std::unique_ptr<CastCdmContextForTest> cdm_context_;
|
| + MockMediaPipelineBackend* pipeline_backend_;
|
| + NiceMock<MockAudioDecoder> audio_decoder_;
|
| + NiceMock<MockVideoDecoder> video_decoder_;
|
| + MediaPipelineBackend::Decoder::Delegate* audio_decoder_delegate_;
|
| + MediaPipelineBackend::Decoder::Delegate* video_decoder_delegate_;
|
| std::unique_ptr<MediaPipelineImpl> media_pipeline_;
|
| - MediaPipelineBackendDefault* pipeline_backend_;
|
|
|
| DISALLOW_COPY_AND_ASSIGN(PipelineHelper);
|
| };
|
| @@ -207,8 +262,7 @@ class AudioVideoPipelineImplTest
|
| protected:
|
| void SetUp() override {
|
| pipeline_helper_.reset(new PipelineHelper(
|
| - message_loop_.task_runner(), ::testing::get<0>(GetParam()),
|
| - ::testing::get<1>(GetParam()), false));
|
| + ::testing::get<0>(GetParam()), ::testing::get<1>(GetParam()), false));
|
| pipeline_helper_->Setup();
|
| }
|
|
|
| @@ -219,18 +273,13 @@ class AudioVideoPipelineImplTest
|
| };
|
|
|
| static void VerifyPlay(PipelineHelper* pipeline_helper) {
|
| - // The backend must still be running.
|
| - MediaPipelineBackendDefault* backend = pipeline_helper->pipeline_backend();
|
| - EXPECT_TRUE(backend->running());
|
| -
|
| - // The decoders must have received a few frames.
|
| - const AudioDecoderDefault* audio_decoder = backend->audio_decoder();
|
| - const VideoDecoderDefault* video_decoder = backend->video_decoder();
|
| - ASSERT_TRUE(audio_decoder || video_decoder);
|
| - if (audio_decoder)
|
| - EXPECT_EQ(kLastFrameTimestamp, audio_decoder->last_push_pts());
|
| - if (video_decoder)
|
| - EXPECT_EQ(kLastFrameTimestamp, video_decoder->last_push_pts());
|
| + // The decoders must have received the last frame.
|
| + if (pipeline_helper->have_audio())
|
| + EXPECT_EQ(kLastFrameTimestamp,
|
| + pipeline_helper->last_push_pts(PipelineHelper::STREAM_AUDIO));
|
| + if (pipeline_helper->have_video())
|
| + EXPECT_EQ(kLastFrameTimestamp,
|
| + pipeline_helper->last_push_pts(PipelineHelper::STREAM_VIDEO));
|
|
|
| pipeline_helper->Stop();
|
| }
|
| @@ -246,18 +295,11 @@ TEST_P(AudioVideoPipelineImplTest, Play) {
|
| }
|
|
|
| static void VerifyFlush(PipelineHelper* pipeline_helper) {
|
| - // The backend must have been stopped.
|
| - MediaPipelineBackendDefault* backend = pipeline_helper->pipeline_backend();
|
| - EXPECT_FALSE(backend->running());
|
| -
|
| // The decoders must not have received any frame.
|
| - const AudioDecoderDefault* audio_decoder = backend->audio_decoder();
|
| - const VideoDecoderDefault* video_decoder = backend->video_decoder();
|
| - ASSERT_TRUE(audio_decoder || video_decoder);
|
| - if (audio_decoder)
|
| - EXPECT_LT(audio_decoder->last_push_pts(), 0);
|
| - if (video_decoder)
|
| - EXPECT_LT(video_decoder->last_push_pts(), 0);
|
| + if (pipeline_helper->have_audio())
|
| + EXPECT_LT(pipeline_helper->last_push_pts(PipelineHelper::STREAM_AUDIO), 0);
|
| + if (pipeline_helper->have_video())
|
| + EXPECT_LT(pipeline_helper->last_push_pts(PipelineHelper::STREAM_VIDEO), 0);
|
|
|
| pipeline_helper->Stop();
|
| }
|
| @@ -310,8 +352,7 @@ class EncryptedAVPipelineImplTest : public ::testing::Test {
|
|
|
| protected:
|
| void SetUp() override {
|
| - pipeline_helper_.reset(
|
| - new PipelineHelper(message_loop_.task_runner(), true, true, true));
|
| + pipeline_helper_.reset(new PipelineHelper(true, true, true));
|
| pipeline_helper_->Setup();
|
| }
|
|
|
|
|