OLD | NEW |
---|---|
(Empty) | |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include <list> | |
6 #include <vector> | |
7 | |
8 #include "base/basictypes.h" | |
9 #include "base/bind.h" | |
10 #include "base/files/file_path.h" | |
11 #include "base/files/memory_mapped_file.h" | |
12 #include "base/logging.h" | |
13 #include "base/memory/ref_counted.h" | |
14 #include "base/memory/scoped_ptr.h" | |
15 #include "base/memory/scoped_vector.h" | |
16 #include "base/message_loop/message_loop.h" | |
17 #include "base/message_loop/message_loop_proxy.h" | |
18 #include "base/path_service.h" | |
19 #include "base/threading/thread.h" | |
20 #include "base/time/time.h" | |
21 #include "chromecast/media/base/decrypt_context.h" | |
22 #include "chromecast/media/cma/backend/audio_pipeline_device.h" | |
23 #include "chromecast/media/cma/backend/media_clock_device.h" | |
24 #include "chromecast/media/cma/backend/media_pipeline_device.h" | |
25 #include "chromecast/media/cma/backend/media_pipeline_device_params.h" | |
26 #include "chromecast/media/cma/backend/video_pipeline_device.h" | |
27 #include "chromecast/media/cma/base/decoder_buffer_adapter.h" | |
28 #include "chromecast/media/cma/base/decoder_buffer_base.h" | |
29 #include "chromecast/media/cma/test/frame_segmenter_for_test.h" | |
30 #include "chromecast/media/cma/test/media_component_device_feeder_for_test.h" | |
31 #include "media/base/audio_decoder_config.h" | |
32 #include "media/base/buffers.h" | |
33 #include "media/base/decoder_buffer.h" | |
34 #include "media/base/video_decoder_config.h" | |
35 #include "testing/gtest/include/gtest/gtest.h" | |
36 | |
37 namespace chromecast { | |
38 namespace media { | |
39 | |
40 namespace { | |
41 | |
42 typedef ScopedVector<MediaComponentDeviceFeederForTest>::iterator | |
43 ComponentDeviceIterator; | |
44 | |
45 const base::TimeDelta kMonitorLoopDelay = base::TimeDelta::FromMilliseconds(20); | |
46 | |
47 base::FilePath GetTestDataFilePath(const std::string& name) { | |
48 base::FilePath file_path; | |
49 CHECK(PathService::Get(base::DIR_SOURCE_ROOT, &file_path)); | |
50 | |
51 file_path = file_path.Append(FILE_PATH_LITERAL("media")) | |
52 .Append(FILE_PATH_LITERAL("test")).Append(FILE_PATH_LITERAL("data")) | |
53 .AppendASCII(name); | |
54 return file_path; | |
55 } | |
56 | |
57 } // namespace | |
58 | |
59 class AudioVideoPipelineDeviceTest : public testing::Test { | |
60 public: | |
61 struct PauseInfo { | |
62 PauseInfo() {} | |
63 PauseInfo(base::TimeDelta d, base::TimeDelta l) : delay(d), length(l) {} | |
64 ~PauseInfo() {} | |
65 | |
66 base::TimeDelta delay; | |
67 base::TimeDelta length; | |
68 }; | |
69 | |
70 AudioVideoPipelineDeviceTest(); | |
71 virtual ~AudioVideoPipelineDeviceTest(); | |
72 | |
73 void ConfigureForFile(std::string filename); | |
74 void ConfigureForAudioOnly(std::string filename); | |
75 void ConfigureForVideoOnly(std::string filename, | |
76 bool raw_h264); | |
damienv1
2014/10/03 23:33:45
nit: Could go on the previous line.
gunsch
2014/10/04 00:37:40
Done.
| |
77 | |
78 // Pattern loops, waiting >= pattern[i].delay against media clock between | |
79 // pauses, then pausing for >= pattern[i].length against MessageLoop | |
damienv1
2014/10/03 23:33:45
nit: extra spaces not needed
gunsch
2014/10/04 00:37:40
Done.
| |
80 // A pause with delay <0 signals to stop sequence and do not loop | |
81 void SetPausePattern(const std::vector<PauseInfo> pattern); | |
damienv1
2014/10/03 23:33:44
Blank line.
gunsch
2014/10/04 00:37:40
Done.
| |
82 // Add a pause to the end of pause pattern | |
83 void AddPause(base::TimeDelta delay, base::TimeDelta length); | |
84 | |
85 void Start(); | |
86 | |
87 private: | |
88 void Initialize(); | |
89 | |
90 void LoadAudioStream(std::string filename); | |
91 void LoadVideoStream(std::string filename, bool raw_h264); | |
92 | |
93 void MonitorLoop(); | |
94 | |
95 void OnPauseCompleted(); | |
96 | |
97 void OnEos(MediaComponentDeviceFeederForTest *device_feeder); | |
98 | |
99 scoped_ptr<MediaPipelineDevice> media_pipeline_device_; | |
100 MediaClockDevice* media_clock_device_; | |
101 | |
102 // Devices to feed | |
103 ScopedVector<MediaComponentDeviceFeederForTest> | |
104 component_device_feeders_; | |
105 | |
106 // Current media time. | |
107 base::TimeDelta pause_time_; | |
108 | |
109 // Pause settings | |
110 std::vector<PauseInfo> pause_pattern_; | |
111 int pause_pattern_idx_; | |
112 | |
113 DISALLOW_COPY_AND_ASSIGN(AudioVideoPipelineDeviceTest); | |
114 }; | |
115 | |
116 AudioVideoPipelineDeviceTest::AudioVideoPipelineDeviceTest() | |
117 : pause_pattern_() { | |
118 } | |
119 | |
120 AudioVideoPipelineDeviceTest::~AudioVideoPipelineDeviceTest() { | |
121 } | |
122 | |
123 void AudioVideoPipelineDeviceTest::AddPause(base::TimeDelta delay, | |
124 base::TimeDelta length) { | |
125 pause_pattern_.push_back(PauseInfo(delay, length)); | |
126 } | |
127 | |
128 void AudioVideoPipelineDeviceTest::SetPausePattern( | |
129 const std::vector<PauseInfo> pattern) { | |
130 // Copy pattern | |
damienv1
2014/10/03 23:33:44
Copy could be removed. Does not help.
gunsch
2014/10/04 00:37:40
Done.
| |
131 pause_pattern_ = pattern; | |
132 } | |
133 | |
134 void AudioVideoPipelineDeviceTest::ConfigureForAudioOnly(std::string filename) { | |
135 Initialize(); | |
136 | |
damienv1
2014/10/03 23:33:44
nit: No blank line needed.
gunsch
2014/10/04 00:37:40
Done.
| |
137 LoadAudioStream(filename); | |
138 } | |
139 | |
140 void AudioVideoPipelineDeviceTest::ConfigureForVideoOnly(std::string filename, | |
141 bool raw_h264) { | |
142 Initialize(); | |
143 | |
damienv1
2014/10/03 23:33:45
Not blank line needed.
gunsch
2014/10/04 00:37:40
Done.
| |
144 LoadVideoStream(filename, raw_h264); | |
145 } | |
146 | |
147 void AudioVideoPipelineDeviceTest::ConfigureForFile(std::string filename) { | |
148 Initialize(); | |
149 | |
150 LoadVideoStream(filename, /* raw_h264 */ false); | |
damienv1
2014/10/03 23:33:45
Style ?
gunsch
2014/10/04 00:37:40
Done.
| |
151 LoadAudioStream(filename); | |
152 } | |
153 | |
154 void AudioVideoPipelineDeviceTest::LoadAudioStream(std::string filename) { | |
155 base::FilePath file_path = GetTestDataFilePath(filename); | |
156 DemuxResult demux_result = FFmpegDemuxForTest(file_path, /*audio*/ true); | |
157 std::list<scoped_refptr<DecoderBufferBase> > frames = demux_result.frames; | |
158 | |
159 AudioPipelineDevice *audio_pipeline_device = | |
160 media_pipeline_device_->GetAudioPipelineDevice(); | |
161 | |
162 // Set configuration. | |
163 bool success = audio_pipeline_device->SetConfig(demux_result.audio_config); | |
164 ASSERT_TRUE(success); | |
165 | |
166 VLOG(2) << "Got " << frames.size() << " audio input frames"; | |
167 | |
168 frames.push_back( | |
169 scoped_refptr<DecoderBufferBase>( | |
170 new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer()))); | |
171 | |
172 MediaComponentDeviceFeederForTest *device_feeder = | |
damienv1
2014/10/03 23:33:45
Style: Test*
gunsch
2014/10/04 00:37:40
Done.
| |
173 new MediaComponentDeviceFeederForTest( | |
174 audio_pipeline_device, | |
175 frames, | |
176 base::Bind(&AudioVideoPipelineDeviceTest::OnEos, | |
177 base::Unretained(this))); | |
178 | |
179 device_feeder->Initialize(); | |
180 | |
181 component_device_feeders_.push_back(device_feeder); | |
182 } | |
183 | |
184 void AudioVideoPipelineDeviceTest::LoadVideoStream(std::string filename, | |
185 bool raw_h264) { | |
186 std::list<scoped_refptr<DecoderBufferBase> > frames; | |
187 ::media::VideoDecoderConfig video_config; | |
188 | |
189 if (raw_h264) { | |
190 base::FilePath file_path = GetTestDataFilePath(filename); | |
191 base::MemoryMappedFile video_stream; | |
192 ASSERT_TRUE(video_stream.Initialize(file_path)) | |
193 << "Couldn't open stream file: " << file_path.MaybeAsASCII(); | |
194 frames = H264SegmenterForTest(video_stream.data(), video_stream.length()); | |
195 | |
196 // Use arbitraty sizes. | |
197 gfx::Size coded_size(320, 240); | |
198 gfx::Rect visible_rect(0, 0, 320, 240); | |
199 gfx::Size natural_size(320, 240); | |
200 | |
201 // TODO(kjoswiak): Either pull data from stream or make caller specify value | |
202 video_config = ::media::VideoDecoderConfig( | |
203 ::media::kCodecH264, | |
204 ::media::H264PROFILE_MAIN, | |
205 ::media::VideoFrame::I420, | |
206 coded_size, | |
207 visible_rect, | |
208 natural_size, | |
209 NULL, 0, false); | |
210 } else { | |
211 base::FilePath file_path = GetTestDataFilePath(filename); | |
212 DemuxResult demux_result = FFmpegDemuxForTest(file_path, | |
213 /*audio*/ false); | |
damienv1
2014/10/03 23:33:45
Style issue ?
gunsch
2014/10/04 00:37:40
Done.
| |
214 frames = demux_result.frames; | |
215 video_config = demux_result.video_config; | |
216 } | |
217 | |
218 VideoPipelineDevice *video_pipeline_device = | |
219 media_pipeline_device_->GetVideoPipelineDevice(); | |
220 | |
221 // Set configuration. | |
222 bool success = video_pipeline_device->SetConfig(video_config); | |
223 ASSERT_TRUE(success); | |
224 | |
225 VLOG(2) << "Got " << frames.size() << " video input frames"; | |
226 | |
227 frames.push_back( | |
228 scoped_refptr<DecoderBufferBase>(new DecoderBufferAdapter( | |
229 ::media::DecoderBuffer::CreateEOSBuffer()))); | |
230 | |
231 MediaComponentDeviceFeederForTest *device_feeder = | |
232 new MediaComponentDeviceFeederForTest( | |
233 video_pipeline_device, | |
234 frames, | |
235 base::Bind(&AudioVideoPipelineDeviceTest::OnEos, | |
236 base::Unretained(this))); | |
237 | |
238 device_feeder->Initialize(); | |
239 | |
240 component_device_feeders_.push_back(device_feeder); | |
241 } | |
242 | |
243 void AudioVideoPipelineDeviceTest::Start() { | |
244 pause_time_ = base::TimeDelta(); | |
245 pause_pattern_idx_ = 0; | |
246 | |
247 for (int i = 0; i < component_device_feeders_.size(); i++) { | |
248 base::MessageLoopProxy::current()->PostTask( | |
249 FROM_HERE, | |
250 base::Bind(&MediaComponentDeviceFeederForTest::Feed, | |
251 base::Unretained(component_device_feeders_[i]))); | |
252 } | |
253 | |
254 base::MessageLoopProxy::current()->PostTask( | |
255 FROM_HERE, | |
256 base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop, | |
257 base::Unretained(this))); | |
258 } | |
259 | |
260 void AudioVideoPipelineDeviceTest::MonitorLoop() { | |
261 // Start the clock if needed. | |
262 media_clock_device_->SetState(MediaClockDevice::kStateRunning); | |
damienv1
2014/10/03 23:33:44
Does not seem the right place. Why not make it par
gunsch
2014/10/04 00:37:40
Done.
| |
263 | |
264 base::TimeDelta media_time = media_clock_device_->GetTime(); | |
265 | |
266 if (!pause_pattern_.empty() && | |
267 pause_pattern_[pause_pattern_idx_].delay >= base::TimeDelta() && | |
268 media_time >= pause_time_ + pause_pattern_[pause_pattern_idx_].delay) { | |
269 // Do Pause | |
270 media_clock_device_->SetRate(0.0); | |
271 pause_time_ = media_clock_device_->GetTime(); | |
272 | |
273 VLOG(2) << "Pausing at " << pause_time_.InMilliseconds() << "ms for " << | |
274 pause_pattern_[pause_pattern_idx_].length.InMilliseconds() << "ms"; | |
275 | |
276 // Wait for pause finish | |
277 base::MessageLoopProxy::current()->PostDelayedTask( | |
278 FROM_HERE, | |
279 base::Bind(&AudioVideoPipelineDeviceTest::OnPauseCompleted, | |
280 base::Unretained(this)), | |
281 pause_pattern_[pause_pattern_idx_].length); | |
damienv1
2014/10/03 23:33:45
nit: Add a return and remove the else.
gunsch
2014/10/04 00:37:40
Done.
| |
282 } else { | |
283 // Check state again in a little while | |
284 base::MessageLoopProxy::current()->PostDelayedTask( | |
285 FROM_HERE, | |
286 base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop, | |
287 base::Unretained(this)), | |
288 kMonitorLoopDelay); | |
289 } | |
290 } | |
291 | |
292 void AudioVideoPipelineDeviceTest::OnPauseCompleted() { | |
293 // Make sure the media time didn't move during that time. | |
294 base::TimeDelta media_time = media_clock_device_->GetTime(); | |
295 | |
296 // TODO(damienv): | |
297 // Should be: | |
298 // EXPECT_EQ(media_time, media_time_); | |
299 // However, some backends, when rendering the first frame while in paused | |
300 // mode moves the time forward. | |
301 // This behaviour is not intended. | |
302 EXPECT_GE(media_time, pause_time_); | |
303 EXPECT_LE(media_time, pause_time_ + base::TimeDelta::FromMilliseconds(50)); | |
304 | |
305 pause_time_ = media_time; | |
306 pause_pattern_idx_ = (pause_pattern_idx_ + 1) % pause_pattern_.size(); | |
307 | |
308 VLOG(2) << "Pause complete, restarting media clock"; | |
309 | |
310 // Resume playback and frame feeding. | |
311 media_clock_device_->SetRate(1.0); | |
312 | |
313 MonitorLoop(); | |
314 } | |
315 | |
316 void AudioVideoPipelineDeviceTest::OnEos( | |
317 MediaComponentDeviceFeederForTest *device_feeder) { | |
318 for (ComponentDeviceIterator it = component_device_feeders_.begin(); | |
319 it != component_device_feeders_.end(); | |
320 ++it) { | |
321 if (*it == device_feeder) { | |
322 component_device_feeders_.erase(it); | |
323 break; | |
324 } | |
325 } | |
damienv1
2014/10/03 23:33:44
nit: Add blank line.
gunsch
2014/10/04 00:37:40
Done.
| |
326 // Check if all streams finished | |
327 if(component_device_feeders_.empty()) | |
328 base::MessageLoop::current()->QuitWhenIdle(); | |
329 } | |
330 | |
331 void AudioVideoPipelineDeviceTest::Initialize() { | |
332 // Create the media device. | |
333 MediaPipelineDeviceParams params; | |
334 media_pipeline_device_.reset(CreateMediaPipelineDevice(params).release()); | |
335 media_clock_device_ = media_pipeline_device_->GetMediaClockDevice(); | |
336 | |
337 // Clock initialization and configuration. | |
338 bool success = | |
339 media_clock_device_->SetState(MediaClockDevice::kStateIdle); | |
340 ASSERT_TRUE(success); | |
341 success = media_clock_device_->ResetTimeline(base::TimeDelta()); | |
342 ASSERT_TRUE(success); | |
343 media_clock_device_->SetRate(1.0); | |
344 } | |
345 | |
346 TEST_F(AudioVideoPipelineDeviceTest, Mp3Playback) { | |
347 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); | |
348 | |
349 ConfigureForAudioOnly("sfx.mp3"); | |
350 Start(); | |
351 message_loop->Run(); | |
352 } | |
353 | |
354 TEST_F(AudioVideoPipelineDeviceTest, VorbisPlayback) { | |
355 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); | |
356 | |
357 ConfigureForAudioOnly("sfx.ogg"); | |
358 Start(); | |
359 message_loop->Run(); | |
360 } | |
361 | |
362 TEST_F(AudioVideoPipelineDeviceTest, H264Playback) { | |
363 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); | |
364 | |
365 ConfigureForVideoOnly("bear.h264", /* raw_h264 */ true); | |
damienv1
2014/10/03 23:33:45
Style issue ? /* raw_h264 */
I don't remember havi
gunsch
2014/10/04 00:37:40
Common, but usually on the other side. Updated all
| |
366 Start(); | |
367 message_loop->Run(); | |
368 } | |
369 | |
370 TEST_F(AudioVideoPipelineDeviceTest, WebmPlaybackWithPause) { | |
371 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); | |
372 | |
373 ConfigureForVideoOnly("bear-640x360.webm", /* raw_h264 */ false); | |
damienv1
2014/10/03 23:33:45
Same.
gunsch
2014/10/04 00:37:40
Done.
| |
374 | |
375 // Setup to pause for 1000ms every 500ms | |
376 AddPause(base::TimeDelta::FromMilliseconds(500), | |
damienv1
2014/10/03 23:33:45
nit: to keep the same style (i.e. group Configure/
gunsch
2014/10/04 00:37:40
Done.
| |
377 base::TimeDelta::FromMilliseconds(100)); | |
378 | |
379 Start(); | |
380 message_loop->Run(); | |
381 } | |
382 | |
383 TEST_F(AudioVideoPipelineDeviceTest, Vp8Playback) { | |
384 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); | |
385 | |
386 ConfigureForVideoOnly("bear-vp8a.webm", /* raw_h264 */ false); | |
387 Start(); | |
388 message_loop->Run(); | |
389 } | |
390 | |
391 TEST_F(AudioVideoPipelineDeviceTest, WebmPlayback) { | |
392 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); | |
393 | |
394 ConfigureForFile("bear-640x360.webm"); | |
395 Start(); | |
396 message_loop->Run(); | |
397 } | |
398 | |
399 } // namespace media | |
400 } // namespace chromecast | |
OLD | NEW |