Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(4414)

Unified Diff: chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc

Issue 622853002: Chromecast: adds interfaces for hardware layer of CMA pipeline. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@cma-decrypt-context
Patch Set: rebased onto CMA testing CL Created 6 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc
diff --git a/chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc b/chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc
new file mode 100644
index 0000000000000000000000000000000000000000..33afc74f57af1691c37b992695c89482919f2543
--- /dev/null
+++ b/chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc
@@ -0,0 +1,276 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
damienv1 2014/10/03 22:19:41 I think the whole unit test has been updated in th
gunsch 2014/10/03 23:08:44 Huh, so it was. Updated.
+
+#include <list>
+#include <vector>
+
+#include "base/basictypes.h"
+#include "base/bind.h"
+#include "base/files/file_path.h"
+#include "base/files/memory_mapped_file.h"
+#include "base/logging.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/message_loop/message_loop.h"
+#include "base/message_loop/message_loop_proxy.h"
+#include "base/path_service.h"
+#include "base/threading/thread.h"
+#include "base/time/time.h"
+#include "chromecast/media/cma/backend/audio_pipeline_device.h"
+#include "chromecast/media/cma/backend/media_clock_device.h"
+#include "chromecast/media/cma/backend/media_component_device_feeder_for_test.h"
+#include "chromecast/media/cma/backend/media_pipeline_device.h"
+#include "chromecast/media/cma/backend/video_pipeline_device.h"
+#include "chromecast/media/cma/base/decoder_buffer_adapter.h"
+#include "chromecast/media/cma/base/decoder_buffer_base.h"
+#include "chromecast/media/cma/test/frame_segmenter_for_test.h"
+#include "media/base/audio_decoder_config.h"
+#include "media/base/buffers.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/video_decoder_config.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace chromecast {
+namespace media {
+
+namespace {
+
+base::FilePath GetTestDataFilePath(const std::string& name) {
+ base::FilePath file_path;
+ CHECK(PathService::Get(base::DIR_SOURCE_ROOT, &file_path));
+
+ file_path = file_path.Append(FILE_PATH_LITERAL("media"))
+ .Append(FILE_PATH_LITERAL("test"))
+ .Append(FILE_PATH_LITERAL("data"))
+ .AppendASCII(name);
+ return file_path;
+}
+
+}
+
+class AudioVideoPipelineDeviceTest : public testing::Test {
+ public:
+ AudioVideoPipelineDeviceTest();
+ virtual ~AudioVideoPipelineDeviceTest();
+
+ void ConfigureForFile(std::string filename);
+ void ConfigureForAudioOnly(std::string filename);
+ void ConfigureForVideoOnly(std::string filename,
+ bool raw_h264);
+
+ void Start();
+
+ private:
+ void Initialize();
+
+ void LoadAudioStream(std::string filename);
+ void LoadVideoStream(std::string filename, bool raw_h264);
+
+ void OnPrimaryEos();
+
+ scoped_ptr<MediaPipelineDevice> media_pipeline_device_;
+ MediaClockDevice* media_clock_device_;
+
+ // Devices to feed
+ std::vector<scoped_refptr<MediaComponentDeviceFeeder> >
+ component_device_feeders_;
+
+ DISALLOW_COPY_AND_ASSIGN(AudioVideoPipelineDeviceTest);
+};
+
+AudioVideoPipelineDeviceTest::AudioVideoPipelineDeviceTest() {
+}
+
+AudioVideoPipelineDeviceTest::~AudioVideoPipelineDeviceTest() {
+}
+
+void AudioVideoPipelineDeviceTest::ConfigureForAudioOnly(std::string filename) {
+ Initialize();
+
+ LoadAudioStream(filename);
+
+ component_device_feeders_[0]->SetFrameCountBetweenPause(50);
+ component_device_feeders_[0]->SetEosCB(
+ base::Bind(&AudioVideoPipelineDeviceTest::OnPrimaryEos,
+ base::Unretained(this)));
+}
+
+void AudioVideoPipelineDeviceTest::ConfigureForVideoOnly(std::string filename,
+ bool raw_h264) {
+ Initialize();
+
+ LoadVideoStream(filename, raw_h264);
+
+ component_device_feeders_[0]->SetFrameCountBetweenPause(50);
+ component_device_feeders_[0]->SetEosCB(
+ base::Bind(&AudioVideoPipelineDeviceTest::OnPrimaryEos,
+ base::Unretained(this)));
+}
+
+void AudioVideoPipelineDeviceTest::ConfigureForFile(std::string filename) {
+ Initialize();
+
+ LoadVideoStream(filename, /* raw_h264 */ false);
+ LoadAudioStream(filename);
+
+ component_device_feeders_[0]->SetFrameCountBetweenPause(50);
+ component_device_feeders_[0]->SetEosCB(
+ base::Bind(&AudioVideoPipelineDeviceTest::OnPrimaryEos,
+ base::Unretained(this)));
+}
+
+void AudioVideoPipelineDeviceTest::LoadAudioStream(std::string filename) {
+ base::FilePath file_path = GetTestDataFilePath(filename);
+ DemuxResult demux_result = FFmpegDemuxForTest(file_path, /*audio*/ true);
+ std::list<scoped_refptr<DecoderBufferBase> > frames = demux_result.frames;
+
+ AudioPipelineDevice *audio_pipeline_device =
+ media_pipeline_device_->GetAudioPipelineDevice();
+
+ // Set configuration.
+ bool success = audio_pipeline_device->SetConfig(demux_result.audio_config);
+ ASSERT_TRUE(success);
+
+ VLOG(2) << "Got " << frames.size() << " audio input frames";
+
+ frames.push_back(
+ scoped_refptr<DecoderBufferBase>(
+ new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer())));
+
+ scoped_refptr<MediaComponentDeviceFeeder> new_feeder =
+ scoped_refptr<MediaComponentDeviceFeeder>(
+ new MediaComponentDeviceFeeder(audio_pipeline_device,
+ media_clock_device_,
+ frames));
+ new_feeder->Initialize();
+
+ component_device_feeders_.push_back(new_feeder);
+}
+
+void AudioVideoPipelineDeviceTest::LoadVideoStream(std::string filename,
+ bool raw_h264) {
+ std::list<scoped_refptr<DecoderBufferBase> > frames;
+ ::media::VideoDecoderConfig video_config;
+
+ if (raw_h264) {
+ base::FilePath file_path = GetTestDataFilePath(filename);
+ base::MemoryMappedFile video_stream;
+ ASSERT_TRUE(video_stream.Initialize(file_path))
+ << "Couldn't open stream file: " << file_path.MaybeAsASCII();
+ frames = H264SegmenterForTest(video_stream.data(), video_stream.length());
+
+ // Use arbitraty sizes.
+ gfx::Size coded_size(320, 240);
+ gfx::Rect visible_rect(0, 0, 320, 240);
+ gfx::Size natural_size(320, 240);
+
+ video_config = ::media::VideoDecoderConfig(
+ ::media::kCodecH264,
+ ::media::H264PROFILE_MAIN,
+ ::media::VideoFrame::I420,
+ coded_size,
+ visible_rect,
+ natural_size,
+ NULL, 0, false);
+ } else {
+ base::FilePath file_path = GetTestDataFilePath(filename);
+ DemuxResult demux_result = FFmpegDemuxForTest(file_path,
+ /*audio*/ false);
+ frames = demux_result.frames;
+ video_config = demux_result.video_config;
+ }
+
+ VideoPipelineDevice *video_pipeline_device =
+ media_pipeline_device_->GetVideoPipelineDevice();
+
+ // Set configuration.
+ bool success = video_pipeline_device->SetConfig(video_config);
+ ASSERT_TRUE(success);
+
+ VLOG(2) << "Got " << frames.size() << " video input frames";
+
+ frames.push_back(
+ scoped_refptr<DecoderBufferBase>(new DecoderBufferAdapter(
+ ::media::DecoderBuffer::CreateEOSBuffer())));
+
+ scoped_refptr<MediaComponentDeviceFeeder> new_feeder =
+ scoped_refptr<MediaComponentDeviceFeeder>(
+ new MediaComponentDeviceFeeder(video_pipeline_device,
+ media_clock_device_,
+ frames));
+ new_feeder->Initialize();
+
+ component_device_feeders_.push_back(new_feeder);
+}
+
+void AudioVideoPipelineDeviceTest::Start() {
+ for (int i=0; i < component_device_feeders_.size(); i++) {
+ base::MessageLoopProxy::current()->PostTask(
+ FROM_HERE,
+ base::Bind(&MediaComponentDeviceFeeder::Feed,
+ base::Unretained(component_device_feeders_[i].get())));
+ }
+}
+
+void AudioVideoPipelineDeviceTest::OnPrimaryEos() {
+ base::MessageLoop::current()->QuitWhenIdle();
+}
+
+void AudioVideoPipelineDeviceTest::Initialize() {
+ // Create the media device.
+ MediaPipelineDevice::CreateParameters params;
+ media_pipeline_device_.reset(CreateMediaPipelineDevice(params).release());
+ media_clock_device_ = media_pipeline_device_->GetMediaClockDevice();
+
+ // Clock initialization and configuration.
+ bool success =
+ media_clock_device_->SetState(MediaClockDevice::kStateIdle);
+ ASSERT_TRUE(success);
+ success = media_clock_device_->ResetTimeline(base::TimeDelta());
+ ASSERT_TRUE(success);
+ media_clock_device_->SetRate(1.0);
+}
+
+TEST_F(AudioVideoPipelineDeviceTest, Mp3Playback) {
+ scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
+
+ ConfigureForAudioOnly("sfx.mp3");
+ Start();
+ message_loop->Run();
+}
+
+TEST_F(AudioVideoPipelineDeviceTest, VorbisPlayback) {
+ scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
+
+ ConfigureForAudioOnly("sfx.ogg");
+ Start();
+ message_loop->Run();
+}
+
+TEST_F(AudioVideoPipelineDeviceTest, H264Playback) {
+ scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
+
+ ConfigureForVideoOnly("bear.h264", /* raw_h264 */ true);
damienv1 2014/10/03 22:19:41 Note: we introduce a dependency on Chrome test med
gunsch 2014/10/03 23:08:44 Only w.r.t header includes. Even gyp DEPS are okay
+ Start();
+ message_loop->Run();
+}
+
+TEST_F(AudioVideoPipelineDeviceTest, Vp8Playback) {
+ scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
+
+ ConfigureForVideoOnly("bear-vp8a.webm", /* raw_h264 */ false);
+ Start();
+ message_loop->Run();
+}
+
+TEST_F(AudioVideoPipelineDeviceTest, VorbisAndVp8InWebmPlayback) {
+ scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
+
+ ConfigureForFile("bear-640x360.webm");
+ Start();
+ message_loop->Run();
+}
+
+} // namespace media
+} // namespace chromecast

Powered by Google App Engine
This is Rietveld 408576698