Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(723)

Side by Side Diff: chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc

Issue 622853002: Chromecast: adds interfaces for hardware layer of CMA pipeline. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@cma-decrypt-context
Patch Set: style feedback/fixes Created 6 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include <list>
damienv1 2014/10/06 17:00:23 nit: Not used.
gunsch 2014/10/06 17:19:51 Done.
6 #include <vector>
7
8 #include "base/basictypes.h"
9 #include "base/bind.h"
10 #include "base/files/file_path.h"
11 #include "base/files/memory_mapped_file.h"
12 #include "base/logging.h"
13 #include "base/memory/ref_counted.h"
14 #include "base/memory/scoped_ptr.h"
15 #include "base/memory/scoped_vector.h"
16 #include "base/message_loop/message_loop.h"
17 #include "base/message_loop/message_loop_proxy.h"
18 #include "base/path_service.h"
19 #include "base/threading/thread.h"
20 #include "base/time/time.h"
21 #include "chromecast/media/base/decrypt_context.h"
22 #include "chromecast/media/cma/backend/audio_pipeline_device.h"
23 #include "chromecast/media/cma/backend/media_clock_device.h"
24 #include "chromecast/media/cma/backend/media_pipeline_device.h"
25 #include "chromecast/media/cma/backend/media_pipeline_device_params.h"
26 #include "chromecast/media/cma/backend/video_pipeline_device.h"
27 #include "chromecast/media/cma/base/decoder_buffer_adapter.h"
28 #include "chromecast/media/cma/base/decoder_buffer_base.h"
29 #include "chromecast/media/cma/test/frame_segmenter_for_test.h"
30 #include "chromecast/media/cma/test/media_component_device_feeder_for_test.h"
31 #include "media/base/audio_decoder_config.h"
32 #include "media/base/buffers.h"
33 #include "media/base/decoder_buffer.h"
34 #include "media/base/video_decoder_config.h"
35 #include "testing/gtest/include/gtest/gtest.h"
36
37 namespace chromecast {
38 namespace media {
39
40 namespace {
41
42 typedef ScopedVector<MediaComponentDeviceFeederForTest>::iterator
43 ComponentDeviceIterator;
44
45 const base::TimeDelta kMonitorLoopDelay = base::TimeDelta::FromMilliseconds(20);
46
47 base::FilePath GetTestDataFilePath(const std::string& name) {
48 base::FilePath file_path;
49 CHECK(PathService::Get(base::DIR_SOURCE_ROOT, &file_path));
50
51 file_path = file_path.Append(FILE_PATH_LITERAL("media"))
52 .Append(FILE_PATH_LITERAL("test")).Append(FILE_PATH_LITERAL("data"))
53 .AppendASCII(name);
54 return file_path;
55 }
56
57 } // namespace
58
59 class AudioVideoPipelineDeviceTest : public testing::Test {
60 public:
61 struct PauseInfo {
62 PauseInfo() {}
63 PauseInfo(base::TimeDelta d, base::TimeDelta l) : delay(d), length(l) {}
64 ~PauseInfo() {}
65
66 base::TimeDelta delay;
67 base::TimeDelta length;
68 };
69
70 AudioVideoPipelineDeviceTest();
71 virtual ~AudioVideoPipelineDeviceTest();
72
73 void ConfigureForFile(std::string filename);
74 void ConfigureForAudioOnly(std::string filename);
75 void ConfigureForVideoOnly(std::string filename, bool raw_h264);
76
77 // Pattern loops, waiting >= pattern[i].delay against media clock between
78 // pauses, then pausing for >= pattern[i].length against MessageLoop
79 // A pause with delay <0 signals to stop sequence and do not loop
80 void SetPausePattern(const std::vector<PauseInfo> pattern);
81
82 // Adds a pause to the end of pause pattern
83 void AddPause(base::TimeDelta delay, base::TimeDelta length);
84
85 void Start();
86
87 private:
88 void Initialize();
89
90 void LoadAudioStream(std::string filename);
91 void LoadVideoStream(std::string filename, bool raw_h264);
92
93 void MonitorLoop();
94
95 void OnPauseCompleted();
96
97 void OnEos(MediaComponentDeviceFeederForTest* device_feeder);
98
99 scoped_ptr<MediaPipelineDevice> media_pipeline_device_;
100 MediaClockDevice* media_clock_device_;
101
102 // Devices to feed
103 ScopedVector<MediaComponentDeviceFeederForTest>
104 component_device_feeders_;
105
106 // Current media time.
107 base::TimeDelta pause_time_;
108
109 // Pause settings
110 std::vector<PauseInfo> pause_pattern_;
111 int pause_pattern_idx_;
112
113 DISALLOW_COPY_AND_ASSIGN(AudioVideoPipelineDeviceTest);
114 };
115
116 AudioVideoPipelineDeviceTest::AudioVideoPipelineDeviceTest()
117 : pause_pattern_() {
118 }
119
120 AudioVideoPipelineDeviceTest::~AudioVideoPipelineDeviceTest() {
121 }
122
123 void AudioVideoPipelineDeviceTest::AddPause(base::TimeDelta delay,
124 base::TimeDelta length) {
125 pause_pattern_.push_back(PauseInfo(delay, length));
126 }
127
128 void AudioVideoPipelineDeviceTest::SetPausePattern(
129 const std::vector<PauseInfo> pattern) {
130 pause_pattern_ = pattern;
131 }
132
133 void AudioVideoPipelineDeviceTest::ConfigureForAudioOnly(std::string filename) {
134 Initialize();
135 LoadAudioStream(filename);
136 }
137
138 void AudioVideoPipelineDeviceTest::ConfigureForVideoOnly(std::string filename,
139 bool raw_h264) {
140 Initialize();
141 LoadVideoStream(filename, raw_h264);
142 }
143
144 void AudioVideoPipelineDeviceTest::ConfigureForFile(std::string filename) {
145 Initialize();
146 LoadVideoStream(filename, false /* raw_h264 */);
147 LoadAudioStream(filename);
148 }
149
150 void AudioVideoPipelineDeviceTest::LoadAudioStream(std::string filename) {
151 base::FilePath file_path = GetTestDataFilePath(filename);
152 DemuxResult demux_result = FFmpegDemuxForTest(file_path, true /* audio */);
153 BufferList frames = demux_result.frames;
154
155 AudioPipelineDevice* audio_pipeline_device =
156 media_pipeline_device_->GetAudioPipelineDevice();
157
158 bool success = audio_pipeline_device->SetConfig(demux_result.audio_config);
159 ASSERT_TRUE(success);
160
161 VLOG(2) << "Got " << frames.size() << " audio input frames";
162
163 frames.push_back(
164 scoped_refptr<DecoderBufferBase>(
165 new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer())));
166
167 MediaComponentDeviceFeederForTest* device_feeder =
168 new MediaComponentDeviceFeederForTest(audio_pipeline_device, frames);
169 device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos,
170 base::Unretained(this),
171 device_feeder));
172 component_device_feeders_.push_back(device_feeder);
173 }
174
175 void AudioVideoPipelineDeviceTest::LoadVideoStream(std::string filename,
176 bool raw_h264) {
177 BufferList frames;
178 ::media::VideoDecoderConfig video_config;
179
180 if (raw_h264) {
181 base::FilePath file_path = GetTestDataFilePath(filename);
182 base::MemoryMappedFile video_stream;
183 ASSERT_TRUE(video_stream.Initialize(file_path))
184 << "Couldn't open stream file: " << file_path.MaybeAsASCII();
185 frames = H264SegmenterForTest(video_stream.data(), video_stream.length());
186
187 // Use arbitraty sizes.
188 gfx::Size coded_size(320, 240);
189 gfx::Rect visible_rect(0, 0, 320, 240);
190 gfx::Size natural_size(320, 240);
191
192 // TODO(kjoswiak): Either pull data from stream or make caller specify value
193 video_config = ::media::VideoDecoderConfig(
194 ::media::kCodecH264,
195 ::media::H264PROFILE_MAIN,
196 ::media::VideoFrame::I420,
197 coded_size,
198 visible_rect,
199 natural_size,
200 NULL, 0, false);
201 } else {
202 base::FilePath file_path = GetTestDataFilePath(filename);
203 DemuxResult demux_result = FFmpegDemuxForTest(file_path,
204 /*audio*/ false);
205 frames = demux_result.frames;
206 video_config = demux_result.video_config;
207 }
208
209 VideoPipelineDevice* video_pipeline_device =
210 media_pipeline_device_->GetVideoPipelineDevice();
211
212 // Set configuration.
213 bool success = video_pipeline_device->SetConfig(video_config);
214 ASSERT_TRUE(success);
215
216 VLOG(2) << "Got " << frames.size() << " video input frames";
217
218 frames.push_back(
219 scoped_refptr<DecoderBufferBase>(new DecoderBufferAdapter(
220 ::media::DecoderBuffer::CreateEOSBuffer())));
221
222 MediaComponentDeviceFeederForTest* device_feeder =
223 new MediaComponentDeviceFeederForTest(video_pipeline_device, frames);
224 device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos,
225 base::Unretained(this),
226 device_feeder));
227 component_device_feeders_.push_back(device_feeder);
228 }
229
230 void AudioVideoPipelineDeviceTest::Start() {
231 pause_time_ = base::TimeDelta();
232 pause_pattern_idx_ = 0;
233
234 for (int i = 0; i < component_device_feeders_.size(); i++) {
235 base::MessageLoopProxy::current()->PostTask(
236 FROM_HERE,
237 base::Bind(&MediaComponentDeviceFeederForTest::Feed,
238 base::Unretained(component_device_feeders_[i])));
239 }
240
241 media_clock_device_->SetState(MediaClockDevice::kStateRunning);
242
243 base::MessageLoopProxy::current()->PostTask(
244 FROM_HERE,
245 base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop,
246 base::Unretained(this)));
247 }
248
249 void AudioVideoPipelineDeviceTest::MonitorLoop() {
250 base::TimeDelta media_time = media_clock_device_->GetTime();
251
252 if (!pause_pattern_.empty() &&
253 pause_pattern_[pause_pattern_idx_].delay >= base::TimeDelta() &&
254 media_time >= pause_time_ + pause_pattern_[pause_pattern_idx_].delay) {
255 // Do Pause
256 media_clock_device_->SetRate(0.0);
257 pause_time_ = media_clock_device_->GetTime();
258
259 VLOG(2) << "Pausing at " << pause_time_.InMilliseconds() << "ms for " <<
260 pause_pattern_[pause_pattern_idx_].length.InMilliseconds() << "ms";
261
262 // Wait for pause finish
263 base::MessageLoopProxy::current()->PostDelayedTask(
264 FROM_HERE,
265 base::Bind(&AudioVideoPipelineDeviceTest::OnPauseCompleted,
266 base::Unretained(this)),
267 pause_pattern_[pause_pattern_idx_].length);
268 return;
269 }
270
271 // Check state again in a little while
272 base::MessageLoopProxy::current()->PostDelayedTask(
273 FROM_HERE,
274 base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop,
275 base::Unretained(this)),
276 kMonitorLoopDelay);
277 }
278
279 void AudioVideoPipelineDeviceTest::OnPauseCompleted() {
280 // Make sure the media time didn't move during that time.
281 base::TimeDelta media_time = media_clock_device_->GetTime();
282
283 // TODO(damienv):
284 // Should be:
285 // EXPECT_EQ(media_time, media_time_);
286 // However, some backends, when rendering the first frame while in paused
287 // mode moves the time forward.
288 // This behaviour is not intended.
289 EXPECT_GE(media_time, pause_time_);
290 EXPECT_LE(media_time, pause_time_ + base::TimeDelta::FromMilliseconds(50));
291
292 pause_time_ = media_time;
293 pause_pattern_idx_ = (pause_pattern_idx_ + 1) % pause_pattern_.size();
294
295 VLOG(2) << "Pause complete, restarting media clock";
296
297 // Resume playback and frame feeding.
298 media_clock_device_->SetRate(1.0);
299
300 MonitorLoop();
301 }
302
303 void AudioVideoPipelineDeviceTest::OnEos(
304 MediaComponentDeviceFeederForTest* device_feeder) {
305 for (ComponentDeviceIterator it = component_device_feeders_.begin();
306 it != component_device_feeders_.end();
307 ++it) {
308 if (*it == device_feeder) {
309 component_device_feeders_.erase(it);
310 break;
311 }
312 }
313
314 // Check if all streams finished
315 if (component_device_feeders_.empty())
316 base::MessageLoop::current()->QuitWhenIdle();
317 }
318
319 void AudioVideoPipelineDeviceTest::Initialize() {
320 // Create the media device.
321 MediaPipelineDeviceParams params;
322 media_pipeline_device_.reset(CreateMediaPipelineDevice(params).release());
323 media_clock_device_ = media_pipeline_device_->GetMediaClockDevice();
324
325 // Clock initialization and configuration.
326 bool success =
327 media_clock_device_->SetState(MediaClockDevice::kStateIdle);
328 ASSERT_TRUE(success);
329 success = media_clock_device_->ResetTimeline(base::TimeDelta());
330 ASSERT_TRUE(success);
331 media_clock_device_->SetRate(1.0);
332 }
333
334 TEST_F(AudioVideoPipelineDeviceTest, Mp3Playback) {
335 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
336
337 ConfigureForAudioOnly("sfx.mp3");
338 Start();
339 message_loop->Run();
340 }
341
342 TEST_F(AudioVideoPipelineDeviceTest, VorbisPlayback) {
343 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
344
345 ConfigureForAudioOnly("sfx.ogg");
346 Start();
347 message_loop->Run();
348 }
349
350 TEST_F(AudioVideoPipelineDeviceTest, H264Playback) {
351 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
352
353 ConfigureForVideoOnly("bear.h264", true /* raw_h264 */);
354 Start();
355 message_loop->Run();
356 }
357
358 TEST_F(AudioVideoPipelineDeviceTest, WebmPlaybackWithPause) {
359 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
360
361 // Setup to pause for 100ms every 500ms
362 AddPause(base::TimeDelta::FromMilliseconds(500),
363 base::TimeDelta::FromMilliseconds(100));
364
365 ConfigureForVideoOnly("bear-640x360.webm", false /* raw_h264 */);
366 Start();
367 message_loop->Run();
368 }
369
370 TEST_F(AudioVideoPipelineDeviceTest, Vp8Playback) {
371 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
372
373 ConfigureForVideoOnly("bear-vp8a.webm", false /* raw_h264 */);
374 Start();
375 message_loop->Run();
376 }
377
378 TEST_F(AudioVideoPipelineDeviceTest, WebmPlayback) {
379 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
380
381 ConfigureForFile("bear-640x360.webm");
382 Start();
383 message_loop->Run();
384 }
385
386 } // namespace media
387 } // namespace chromecast
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698