OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <vector> | 5 #include <vector> |
6 | 6 |
7 #include "base/basictypes.h" | 7 #include "base/basictypes.h" |
8 #include "base/bind.h" | 8 #include "base/bind.h" |
9 #include "base/command_line.h" | 9 #include "base/command_line.h" |
10 #include "base/files/file_path.h" | 10 #include "base/files/file_path.h" |
11 #include "base/files/memory_mapped_file.h" | 11 #include "base/files/memory_mapped_file.h" |
12 #include "base/logging.h" | 12 #include "base/logging.h" |
13 #include "base/memory/ref_counted.h" | 13 #include "base/memory/ref_counted.h" |
14 #include "base/memory/scoped_ptr.h" | 14 #include "base/memory/scoped_ptr.h" |
15 #include "base/memory/scoped_vector.h" | 15 #include "base/memory/scoped_vector.h" |
16 #include "base/message_loop/message_loop.h" | 16 #include "base/message_loop/message_loop.h" |
17 #include "base/path_service.h" | 17 #include "base/path_service.h" |
18 #include "base/single_thread_task_runner.h" | 18 #include "base/single_thread_task_runner.h" |
19 #include "base/thread_task_runner_handle.h" | 19 #include "base/thread_task_runner_handle.h" |
20 #include "base/threading/thread.h" | 20 #include "base/threading/thread.h" |
21 #include "base/time/time.h" | 21 #include "base/time/time.h" |
22 #include "chromecast/base/task_runner_impl.h" | 22 #include "chromecast/base/task_runner_impl.h" |
| 23 #include "chromecast/media/cma/base/cast_decoder_buffer_impl.h" |
23 #include "chromecast/media/cma/base/decoder_buffer_adapter.h" | 24 #include "chromecast/media/cma/base/decoder_buffer_adapter.h" |
24 #include "chromecast/media/cma/base/decoder_config_adapter.h" | 25 #include "chromecast/media/cma/base/decoder_config_adapter.h" |
25 #include "chromecast/media/cma/test/frame_segmenter_for_test.h" | 26 #include "chromecast/media/cma/test/frame_segmenter_for_test.h" |
26 #include "chromecast/media/cma/test/media_component_device_feeder_for_test.h" | |
27 #include "chromecast/public/cast_media_shlib.h" | 27 #include "chromecast/public/cast_media_shlib.h" |
28 #include "chromecast/public/media/audio_pipeline_device.h" | |
29 #include "chromecast/public/media/cast_decoder_buffer.h" | 28 #include "chromecast/public/media/cast_decoder_buffer.h" |
30 #include "chromecast/public/media/decoder_config.h" | 29 #include "chromecast/public/media/decoder_config.h" |
31 #include "chromecast/public/media/media_clock_device.h" | |
32 #include "chromecast/public/media/media_pipeline_backend.h" | 30 #include "chromecast/public/media/media_pipeline_backend.h" |
33 #include "chromecast/public/media/media_pipeline_device_params.h" | 31 #include "chromecast/public/media/media_pipeline_device_params.h" |
34 #include "chromecast/public/media/video_pipeline_device.h" | |
35 #include "media/base/audio_decoder_config.h" | 32 #include "media/base/audio_decoder_config.h" |
36 #include "media/base/decoder_buffer.h" | 33 #include "media/base/decoder_buffer.h" |
37 #include "media/base/video_decoder_config.h" | 34 #include "media/base/video_decoder_config.h" |
38 #include "testing/gtest/include/gtest/gtest.h" | 35 #include "testing/gtest/include/gtest/gtest.h" |
39 | 36 |
40 namespace chromecast { | 37 namespace chromecast { |
41 namespace media { | 38 namespace media { |
42 | 39 |
43 namespace { | 40 namespace { |
44 | 41 |
45 typedef ScopedVector<MediaComponentDeviceFeederForTest>::iterator | |
46 ComponentDeviceIterator; | |
47 | |
48 const base::TimeDelta kMonitorLoopDelay = base::TimeDelta::FromMilliseconds(20); | 42 const base::TimeDelta kMonitorLoopDelay = base::TimeDelta::FromMilliseconds(20); |
49 | 43 |
50 base::FilePath GetTestDataFilePath(const std::string& name) { | 44 base::FilePath GetTestDataFilePath(const std::string& name) { |
51 base::FilePath file_path; | 45 base::FilePath file_path; |
52 CHECK(PathService::Get(base::DIR_SOURCE_ROOT, &file_path)); | 46 CHECK(PathService::Get(base::DIR_SOURCE_ROOT, &file_path)); |
53 | 47 |
54 file_path = file_path.Append(FILE_PATH_LITERAL("media")) | 48 file_path = file_path.Append(FILE_PATH_LITERAL("media")) |
55 .Append(FILE_PATH_LITERAL("test")).Append(FILE_PATH_LITERAL("data")) | 49 .Append(FILE_PATH_LITERAL("test")).Append(FILE_PATH_LITERAL("data")) |
56 .AppendASCII(name); | 50 .AppendASCII(name); |
57 return file_path; | 51 return file_path; |
58 } | 52 } |
59 | 53 |
60 } // namespace | 54 } // namespace |
61 | 55 |
62 class AudioVideoPipelineDeviceTest : public testing::Test { | 56 class AudioVideoPipelineDeviceTest : public testing::Test, |
| 57 public MediaPipelineBackend::Delegate { |
63 public: | 58 public: |
64 struct PauseInfo { | 59 struct PauseInfo { |
65 PauseInfo() {} | 60 PauseInfo() {} |
66 PauseInfo(base::TimeDelta d, base::TimeDelta l) : delay(d), length(l) {} | 61 PauseInfo(base::TimeDelta d, base::TimeDelta l) : delay(d), length(l) {} |
67 ~PauseInfo() {} | 62 ~PauseInfo() {} |
68 | 63 |
69 base::TimeDelta delay; | 64 base::TimeDelta delay; |
70 base::TimeDelta length; | 65 base::TimeDelta length; |
71 }; | 66 }; |
72 | 67 |
(...skipping 16 matching lines...) Expand all Loading... |
89 // Pattern loops, waiting >= pattern[i].delay against media clock between | 84 // Pattern loops, waiting >= pattern[i].delay against media clock between |
90 // pauses, then pausing for >= pattern[i].length against MessageLoop | 85 // pauses, then pausing for >= pattern[i].length against MessageLoop |
91 // A pause with delay <0 signals to stop sequence and do not loop | 86 // A pause with delay <0 signals to stop sequence and do not loop |
92 void SetPausePattern(const std::vector<PauseInfo> pattern); | 87 void SetPausePattern(const std::vector<PauseInfo> pattern); |
93 | 88 |
94 // Adds a pause to the end of pause pattern | 89 // Adds a pause to the end of pause pattern |
95 void AddPause(base::TimeDelta delay, base::TimeDelta length); | 90 void AddPause(base::TimeDelta delay, base::TimeDelta length); |
96 | 91 |
97 void Start(); | 92 void Start(); |
98 | 93 |
| 94 // MediaPipelineBackend::Delegate implementation: |
| 95 void OnVideoResolutionChanged(MediaPipelineBackend::VideoDecoder* decoder, |
| 96 const Size& size) override {} |
| 97 void OnPushBufferComplete(MediaPipelineBackend::Decoder* decoder, |
| 98 MediaPipelineBackend::BufferStatus status) override; |
| 99 void OnEndOfStream(MediaPipelineBackend::Decoder* decoder) override; |
| 100 void OnDecoderError(MediaPipelineBackend::Decoder* decoder) override; |
| 101 |
99 private: | 102 private: |
100 void Initialize(); | 103 void Initialize(); |
101 | 104 |
102 void LoadAudioStream(const std::string& filename); | 105 void LoadAudioStream(const std::string& filename); |
103 void LoadVideoStream(const std::string& filename, bool raw_h264); | 106 void LoadVideoStream(const std::string& filename, bool raw_h264); |
104 | 107 |
| 108 void FeedAudioBuffer(); |
| 109 void FeedVideoBuffer(); |
| 110 |
105 void MonitorLoop(); | 111 void MonitorLoop(); |
106 | 112 |
107 void OnPauseCompleted(); | 113 void OnPauseCompleted(); |
108 | 114 |
109 void OnEos(MediaComponentDeviceFeederForTest* device_feeder); | |
110 | |
111 scoped_ptr<TaskRunnerImpl> task_runner_; | 115 scoped_ptr<TaskRunnerImpl> task_runner_; |
112 scoped_ptr<MediaPipelineBackend> backend_; | 116 scoped_ptr<MediaPipelineBackend> backend_; |
113 MediaClockDevice* media_clock_device_; | 117 CastDecoderBufferImpl backend_audio_buffer_; |
114 | 118 CastDecoderBufferImpl backend_video_buffer_; |
115 // Devices to feed | |
116 ScopedVector<MediaComponentDeviceFeederForTest> | |
117 component_device_feeders_; | |
118 | 119 |
119 // Current media time. | 120 // Current media time. |
120 base::TimeDelta pause_time_; | 121 base::TimeDelta pause_time_; |
121 | 122 |
122 // Pause settings | 123 // Pause settings |
123 std::vector<PauseInfo> pause_pattern_; | 124 std::vector<PauseInfo> pause_pattern_; |
124 int pause_pattern_idx_; | 125 int pause_pattern_idx_; |
125 | 126 |
| 127 BufferList audio_buffers_; |
| 128 BufferList video_buffers_; |
| 129 |
| 130 MediaPipelineBackend::AudioDecoder* audio_decoder_; |
| 131 MediaPipelineBackend::VideoDecoder* video_decoder_; |
| 132 bool audio_feeding_completed_; |
| 133 bool video_feeding_completed_; |
| 134 |
126 DISALLOW_COPY_AND_ASSIGN(AudioVideoPipelineDeviceTest); | 135 DISALLOW_COPY_AND_ASSIGN(AudioVideoPipelineDeviceTest); |
127 }; | 136 }; |
128 | 137 |
129 AudioVideoPipelineDeviceTest::AudioVideoPipelineDeviceTest() | 138 AudioVideoPipelineDeviceTest::AudioVideoPipelineDeviceTest() |
130 : pause_pattern_() { | 139 : backend_audio_buffer_(nullptr), |
| 140 backend_video_buffer_(nullptr), |
| 141 pause_pattern_(), |
| 142 audio_decoder_(nullptr), |
| 143 video_decoder_(nullptr), |
| 144 audio_feeding_completed_(true), |
| 145 video_feeding_completed_(true) { |
131 } | 146 } |
132 | 147 |
133 AudioVideoPipelineDeviceTest::~AudioVideoPipelineDeviceTest() { | 148 AudioVideoPipelineDeviceTest::~AudioVideoPipelineDeviceTest() { |
134 } | 149 } |
135 | 150 |
136 void AudioVideoPipelineDeviceTest::AddPause(base::TimeDelta delay, | 151 void AudioVideoPipelineDeviceTest::AddPause(base::TimeDelta delay, |
137 base::TimeDelta length) { | 152 base::TimeDelta length) { |
138 pause_pattern_.push_back(PauseInfo(delay, length)); | 153 pause_pattern_.push_back(PauseInfo(delay, length)); |
139 } | 154 } |
140 | 155 |
141 void AudioVideoPipelineDeviceTest::SetPausePattern( | 156 void AudioVideoPipelineDeviceTest::SetPausePattern( |
142 const std::vector<PauseInfo> pattern) { | 157 const std::vector<PauseInfo> pattern) { |
143 pause_pattern_ = pattern; | 158 pause_pattern_ = pattern; |
144 } | 159 } |
145 | 160 |
146 void AudioVideoPipelineDeviceTest::ConfigureForAudioOnly( | 161 void AudioVideoPipelineDeviceTest::ConfigureForAudioOnly( |
147 const std::string& filename) { | 162 const std::string& filename) { |
148 Initialize(); | 163 Initialize(); |
149 LoadAudioStream(filename); | 164 LoadAudioStream(filename); |
| 165 bool success = backend_->Initialize(this); |
| 166 ASSERT_TRUE(success); |
150 } | 167 } |
151 | 168 |
152 void AudioVideoPipelineDeviceTest::ConfigureForVideoOnly( | 169 void AudioVideoPipelineDeviceTest::ConfigureForVideoOnly( |
153 const std::string& filename, | 170 const std::string& filename, |
154 bool raw_h264) { | 171 bool raw_h264) { |
155 Initialize(); | 172 Initialize(); |
156 LoadVideoStream(filename, raw_h264); | 173 LoadVideoStream(filename, raw_h264); |
| 174 bool success = backend_->Initialize(this); |
| 175 ASSERT_TRUE(success); |
157 } | 176 } |
158 | 177 |
159 void AudioVideoPipelineDeviceTest::ConfigureForFile( | 178 void AudioVideoPipelineDeviceTest::ConfigureForFile( |
160 const std::string& filename) { | 179 const std::string& filename) { |
161 Initialize(); | 180 Initialize(); |
162 LoadVideoStream(filename, false /* raw_h264 */); | 181 LoadVideoStream(filename, false /* raw_h264 */); |
163 LoadAudioStream(filename); | 182 LoadAudioStream(filename); |
| 183 bool success = backend_->Initialize(this); |
| 184 ASSERT_TRUE(success); |
164 } | 185 } |
165 | 186 |
166 void AudioVideoPipelineDeviceTest::LoadAudioStream( | 187 void AudioVideoPipelineDeviceTest::LoadAudioStream( |
167 const std::string& filename) { | 188 const std::string& filename) { |
168 base::FilePath file_path = GetTestDataFilePath(filename); | 189 base::FilePath file_path = GetTestDataFilePath(filename); |
169 DemuxResult demux_result = FFmpegDemuxForTest(file_path, true /* audio */); | 190 DemuxResult demux_result = FFmpegDemuxForTest(file_path, true /* audio */); |
170 BufferList frames = demux_result.frames; | |
171 | 191 |
172 AudioPipelineDevice* audio_pipeline_device = backend_->GetAudio(); | 192 audio_buffers_ = demux_result.frames; |
| 193 audio_decoder_ = backend_->CreateAudioDecoder(); |
| 194 audio_feeding_completed_ = false; |
173 | 195 |
174 bool success = audio_pipeline_device->SetConfig( | 196 bool success = |
175 DecoderConfigAdapter::ToCastAudioConfig(kPrimary, | 197 audio_decoder_->SetConfig(DecoderConfigAdapter::ToCastAudioConfig( |
176 demux_result.audio_config)); | 198 kPrimary, demux_result.audio_config)); |
177 ASSERT_TRUE(success); | 199 ASSERT_TRUE(success); |
178 | 200 |
179 VLOG(2) << "Got " << frames.size() << " audio input frames"; | 201 VLOG(2) << "Got " << audio_buffers_.size() << " audio input frames"; |
180 | 202 |
181 frames.push_back( | 203 audio_buffers_.push_back(scoped_refptr<DecoderBufferBase>( |
182 scoped_refptr<DecoderBufferBase>( | 204 new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer()))); |
183 new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer()))); | |
184 | |
185 MediaComponentDeviceFeederForTest* device_feeder = | |
186 new MediaComponentDeviceFeederForTest(audio_pipeline_device, frames); | |
187 device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos, | |
188 base::Unretained(this), | |
189 device_feeder)); | |
190 component_device_feeders_.push_back(device_feeder); | |
191 } | 205 } |
192 | 206 |
193 void AudioVideoPipelineDeviceTest::LoadVideoStream(const std::string& filename, | 207 void AudioVideoPipelineDeviceTest::LoadVideoStream(const std::string& filename, |
194 bool raw_h264) { | 208 bool raw_h264) { |
195 BufferList frames; | |
196 VideoConfig video_config; | 209 VideoConfig video_config; |
197 | 210 |
198 if (raw_h264) { | 211 if (raw_h264) { |
199 base::FilePath file_path = GetTestDataFilePath(filename); | 212 base::FilePath file_path = GetTestDataFilePath(filename); |
200 base::MemoryMappedFile video_stream; | 213 base::MemoryMappedFile video_stream; |
201 ASSERT_TRUE(video_stream.Initialize(file_path)) | 214 ASSERT_TRUE(video_stream.Initialize(file_path)) |
202 << "Couldn't open stream file: " << file_path.MaybeAsASCII(); | 215 << "Couldn't open stream file: " << file_path.MaybeAsASCII(); |
203 frames = H264SegmenterForTest(video_stream.data(), video_stream.length()); | 216 video_buffers_ = |
| 217 H264SegmenterForTest(video_stream.data(), video_stream.length()); |
204 | 218 |
205 // TODO(erickung): Either pull data from stream or make caller specify value | 219 // TODO(erickung): Either pull data from stream or make caller specify value |
206 video_config.codec = kCodecH264; | 220 video_config.codec = kCodecH264; |
207 video_config.profile = kH264Main; | 221 video_config.profile = kH264Main; |
208 video_config.additional_config = NULL; | 222 video_config.additional_config = NULL; |
209 video_config.is_encrypted = false; | 223 video_config.is_encrypted = false; |
210 } else { | 224 } else { |
211 base::FilePath file_path = GetTestDataFilePath(filename); | 225 base::FilePath file_path = GetTestDataFilePath(filename); |
212 DemuxResult demux_result = FFmpegDemuxForTest(file_path, | 226 DemuxResult demux_result = FFmpegDemuxForTest(file_path, |
213 /*audio*/ false); | 227 /*audio*/ false); |
214 frames = demux_result.frames; | 228 video_buffers_ = demux_result.frames; |
215 video_config = DecoderConfigAdapter::ToCastVideoConfig( | 229 video_config = DecoderConfigAdapter::ToCastVideoConfig( |
216 kPrimary, demux_result.video_config); | 230 kPrimary, demux_result.video_config); |
217 } | 231 } |
218 | 232 |
219 VideoPipelineDevice* video_pipeline_device = backend_->GetVideo(); | 233 video_decoder_ = backend_->CreateVideoDecoder(); |
220 | 234 video_feeding_completed_ = false; |
221 // Set configuration. | 235 bool success = video_decoder_->SetConfig(video_config); |
222 bool success = video_pipeline_device->SetConfig(video_config); | |
223 ASSERT_TRUE(success); | 236 ASSERT_TRUE(success); |
224 | 237 |
225 VLOG(2) << "Got " << frames.size() << " video input frames"; | 238 VLOG(2) << "Got " << video_buffers_.size() << " video input frames"; |
226 | 239 |
227 frames.push_back( | 240 video_buffers_.push_back(scoped_refptr<DecoderBufferBase>( |
228 scoped_refptr<DecoderBufferBase>(new DecoderBufferAdapter( | 241 new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer()))); |
229 ::media::DecoderBuffer::CreateEOSBuffer()))); | 242 } |
230 | 243 |
231 MediaComponentDeviceFeederForTest* device_feeder = | 244 void AudioVideoPipelineDeviceTest::FeedAudioBuffer() { |
232 new MediaComponentDeviceFeederForTest(video_pipeline_device, frames); | 245 // Possibly feed one frame |
233 device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos, | 246 DCHECK(!audio_buffers_.empty()); |
234 base::Unretained(this), | 247 if (audio_feeding_completed_) |
235 device_feeder)); | 248 return; |
236 component_device_feeders_.push_back(device_feeder); | 249 |
| 250 scoped_refptr<DecoderBufferBase> buffer = audio_buffers_.front(); |
| 251 backend_audio_buffer_.set_buffer(buffer); |
| 252 |
| 253 MediaPipelineBackend::BufferStatus status = |
| 254 audio_decoder_->PushBuffer(nullptr, // decrypt_context |
| 255 &backend_audio_buffer_); |
| 256 EXPECT_NE(status, MediaPipelineBackend::kBufferFailed); |
| 257 audio_buffers_.pop_front(); |
| 258 |
| 259 // Feeding is done, just wait for the end of stream callback. |
| 260 if (buffer->end_of_stream() || audio_buffers_.empty()) { |
| 261 if (audio_buffers_.empty() && !buffer->end_of_stream()) { |
| 262 LOG(WARNING) << "Stream emptied without feeding EOS frame"; |
| 263 } |
| 264 |
| 265 audio_feeding_completed_ = true; |
| 266 return; |
| 267 } |
| 268 |
| 269 if (status == MediaPipelineBackend::kBufferPending) |
| 270 return; |
| 271 |
| 272 OnPushBufferComplete(audio_decoder_, MediaPipelineBackend::kBufferSuccess); |
| 273 } |
| 274 |
| 275 void AudioVideoPipelineDeviceTest::FeedVideoBuffer() { |
| 276 // Possibly feed one frame |
| 277 DCHECK(!video_buffers_.empty()); |
| 278 if (video_feeding_completed_) |
| 279 return; |
| 280 |
| 281 scoped_refptr<DecoderBufferBase> buffer = video_buffers_.front(); |
| 282 backend_video_buffer_.set_buffer(buffer); |
| 283 |
| 284 MediaPipelineBackend::BufferStatus status = |
| 285 video_decoder_->PushBuffer(nullptr, // decrypt_context |
| 286 &backend_video_buffer_); |
| 287 EXPECT_NE(status, MediaPipelineBackend::kBufferFailed); |
| 288 video_buffers_.pop_front(); |
| 289 |
| 290 // Feeding is done, just wait for the end of stream callback. |
| 291 if (buffer->end_of_stream() || video_buffers_.empty()) { |
| 292 if (video_buffers_.empty() && !buffer->end_of_stream()) { |
| 293 LOG(WARNING) << "Stream emptied without feeding EOS frame"; |
| 294 } |
| 295 |
| 296 video_feeding_completed_ = true; |
| 297 return; |
| 298 } |
| 299 |
| 300 if (status == MediaPipelineBackend::kBufferPending) |
| 301 return; |
| 302 |
| 303 OnPushBufferComplete(video_decoder_, MediaPipelineBackend::kBufferSuccess); |
237 } | 304 } |
238 | 305 |
239 void AudioVideoPipelineDeviceTest::Start() { | 306 void AudioVideoPipelineDeviceTest::Start() { |
240 pause_time_ = base::TimeDelta(); | 307 pause_time_ = base::TimeDelta(); |
241 pause_pattern_idx_ = 0; | 308 pause_pattern_idx_ = 0; |
242 | 309 |
243 for (size_t i = 0; i < component_device_feeders_.size(); i++) { | 310 if (audio_decoder_) { |
244 base::ThreadTaskRunnerHandle::Get()->PostTask( | 311 base::ThreadTaskRunnerHandle::Get()->PostTask( |
245 FROM_HERE, base::Bind(&MediaComponentDeviceFeederForTest::Feed, | 312 FROM_HERE, |
246 base::Unretained(component_device_feeders_[i]))); | 313 base::Bind(&AudioVideoPipelineDeviceTest::FeedAudioBuffer, |
| 314 base::Unretained(this))); |
| 315 } |
| 316 if (video_decoder_) { |
| 317 base::ThreadTaskRunnerHandle::Get()->PostTask( |
| 318 FROM_HERE, |
| 319 base::Bind(&AudioVideoPipelineDeviceTest::FeedVideoBuffer, |
| 320 base::Unretained(this))); |
247 } | 321 } |
248 | 322 |
249 media_clock_device_->SetState(MediaClockDevice::kStateRunning); | 323 backend_->Start(0); |
250 | 324 |
251 base::ThreadTaskRunnerHandle::Get()->PostTask( | 325 base::ThreadTaskRunnerHandle::Get()->PostTask( |
252 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop, | 326 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop, |
253 base::Unretained(this))); | 327 base::Unretained(this))); |
254 } | 328 } |
255 | 329 |
| 330 void AudioVideoPipelineDeviceTest::OnEndOfStream( |
| 331 MediaPipelineBackend::Decoder* decoder) { |
| 332 bool success = backend_->Stop(); |
| 333 ASSERT_TRUE(success); |
| 334 |
| 335 if (decoder == audio_decoder_) |
| 336 audio_decoder_ = nullptr; |
| 337 else if (decoder == video_decoder_) |
| 338 video_decoder_ = nullptr; |
| 339 |
| 340 if (!audio_decoder_ && !video_decoder_) |
| 341 base::MessageLoop::current()->QuitWhenIdle(); |
| 342 } |
| 343 |
| 344 void AudioVideoPipelineDeviceTest::OnDecoderError( |
| 345 MediaPipelineBackend::Decoder* decoder) { |
| 346 ASSERT_TRUE(false); |
| 347 } |
| 348 |
| 349 void AudioVideoPipelineDeviceTest::OnPushBufferComplete( |
| 350 MediaPipelineBackend::Decoder* decoder, |
| 351 MediaPipelineBackend::BufferStatus status) { |
| 352 EXPECT_NE(status, MediaPipelineBackend::kBufferFailed); |
| 353 |
| 354 if (decoder == audio_decoder_) { |
| 355 if (audio_feeding_completed_) |
| 356 return; |
| 357 |
| 358 base::ThreadTaskRunnerHandle::Get()->PostTask( |
| 359 FROM_HERE, |
| 360 base::Bind(&AudioVideoPipelineDeviceTest::FeedAudioBuffer, |
| 361 base::Unretained(this))); |
| 362 } else if (decoder == video_decoder_) { |
| 363 if (video_feeding_completed_) |
| 364 return; |
| 365 |
| 366 base::ThreadTaskRunnerHandle::Get()->PostTask( |
| 367 FROM_HERE, |
| 368 base::Bind(&AudioVideoPipelineDeviceTest::FeedVideoBuffer, |
| 369 base::Unretained(this))); |
| 370 } |
| 371 } |
| 372 |
256 void AudioVideoPipelineDeviceTest::MonitorLoop() { | 373 void AudioVideoPipelineDeviceTest::MonitorLoop() { |
257 base::TimeDelta media_time = base::TimeDelta::FromMicroseconds( | 374 base::TimeDelta media_time = |
258 media_clock_device_->GetTimeMicroseconds()); | 375 base::TimeDelta::FromMicroseconds(backend_->GetCurrentPts()); |
259 | 376 |
260 if (!pause_pattern_.empty() && | 377 if (!pause_pattern_.empty() && |
261 pause_pattern_[pause_pattern_idx_].delay >= base::TimeDelta() && | 378 pause_pattern_[pause_pattern_idx_].delay >= base::TimeDelta() && |
262 media_time >= pause_time_ + pause_pattern_[pause_pattern_idx_].delay) { | 379 media_time >= pause_time_ + pause_pattern_[pause_pattern_idx_].delay) { |
263 // Do Pause | 380 // Do Pause |
264 media_clock_device_->SetRate(0.0); | 381 backend_->Pause(); |
265 pause_time_ = base::TimeDelta::FromMicroseconds( | 382 pause_time_ = base::TimeDelta::FromMicroseconds(backend_->GetCurrentPts()); |
266 media_clock_device_->GetTimeMicroseconds()); | |
267 | 383 |
268 VLOG(2) << "Pausing at " << pause_time_.InMilliseconds() << "ms for " << | 384 VLOG(2) << "Pausing at " << pause_time_.InMilliseconds() << "ms for " << |
269 pause_pattern_[pause_pattern_idx_].length.InMilliseconds() << "ms"; | 385 pause_pattern_[pause_pattern_idx_].length.InMilliseconds() << "ms"; |
270 | 386 |
271 // Wait for pause finish | 387 // Wait for pause finish |
272 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( | 388 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( |
273 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::OnPauseCompleted, | 389 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::OnPauseCompleted, |
274 base::Unretained(this)), | 390 base::Unretained(this)), |
275 pause_pattern_[pause_pattern_idx_].length); | 391 pause_pattern_[pause_pattern_idx_].length); |
276 return; | 392 return; |
277 } | 393 } |
278 | 394 |
279 // Check state again in a little while | 395 // Check state again in a little while |
280 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( | 396 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( |
281 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop, | 397 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop, |
282 base::Unretained(this)), | 398 base::Unretained(this)), |
283 kMonitorLoopDelay); | 399 kMonitorLoopDelay); |
284 } | 400 } |
285 | 401 |
286 void AudioVideoPipelineDeviceTest::OnPauseCompleted() { | 402 void AudioVideoPipelineDeviceTest::OnPauseCompleted() { |
287 // Make sure the media time didn't move during that time. | 403 // Make sure the media time didn't move during that time. |
288 base::TimeDelta media_time = base::TimeDelta::FromMicroseconds( | 404 base::TimeDelta media_time = |
289 media_clock_device_->GetTimeMicroseconds()); | 405 base::TimeDelta::FromMicroseconds(backend_->GetCurrentPts()); |
290 | 406 |
291 // TODO(damienv): | 407 // TODO(damienv): |
292 // Should be: | 408 // Should be: |
293 // EXPECT_EQ(media_time, media_time_); | 409 // EXPECT_EQ(media_time, media_time_); |
294 // However, some backends, when rendering the first frame while in paused | 410 // However, some backends, when rendering the first frame while in paused |
295 // mode moves the time forward. | 411 // mode moves the time forward. |
296 // This behaviour is not intended. | 412 // This behaviour is not intended. |
297 EXPECT_GE(media_time, pause_time_); | 413 EXPECT_GE(media_time, pause_time_); |
298 EXPECT_LE(media_time, pause_time_ + base::TimeDelta::FromMilliseconds(50)); | 414 EXPECT_LE(media_time, pause_time_ + base::TimeDelta::FromMilliseconds(50)); |
299 | 415 |
300 pause_time_ = media_time; | 416 pause_time_ = media_time; |
301 pause_pattern_idx_ = (pause_pattern_idx_ + 1) % pause_pattern_.size(); | 417 pause_pattern_idx_ = (pause_pattern_idx_ + 1) % pause_pattern_.size(); |
302 | 418 |
303 VLOG(2) << "Pause complete, restarting media clock"; | 419 VLOG(2) << "Pause complete, restarting media clock"; |
304 | 420 |
305 // Resume playback and frame feeding. | 421 // Resume playback and frame feeding. |
306 media_clock_device_->SetRate(1.0); | 422 backend_->Resume(); |
307 | 423 |
308 MonitorLoop(); | 424 MonitorLoop(); |
309 } | 425 } |
310 | 426 |
311 void AudioVideoPipelineDeviceTest::OnEos( | |
312 MediaComponentDeviceFeederForTest* device_feeder) { | |
313 for (ComponentDeviceIterator it = component_device_feeders_.begin(); | |
314 it != component_device_feeders_.end(); | |
315 ++it) { | |
316 if (*it == device_feeder) { | |
317 component_device_feeders_.erase(it); | |
318 break; | |
319 } | |
320 } | |
321 | |
322 // Check if all streams finished | |
323 if (component_device_feeders_.empty()) | |
324 base::MessageLoop::current()->QuitWhenIdle(); | |
325 } | |
326 | |
327 void AudioVideoPipelineDeviceTest::Initialize() { | 427 void AudioVideoPipelineDeviceTest::Initialize() { |
328 // Create the media device. | 428 // Create the media device. |
329 task_runner_.reset(new TaskRunnerImpl()); | 429 task_runner_.reset(new TaskRunnerImpl()); |
330 MediaPipelineDeviceParams params(task_runner_.get()); | 430 MediaPipelineDeviceParams params(task_runner_.get()); |
331 backend_.reset(CastMediaShlib::CreateMediaPipelineBackend(params)); | 431 backend_.reset(CastMediaShlib::CreateMediaPipelineBackend(params)); |
332 media_clock_device_ = backend_->GetClock(); | |
333 | |
334 // Clock initialization and configuration. | |
335 bool success = | |
336 media_clock_device_->SetState(MediaClockDevice::kStateIdle); | |
337 ASSERT_TRUE(success); | |
338 success = media_clock_device_->ResetTimeline(0); | |
339 ASSERT_TRUE(success); | |
340 media_clock_device_->SetRate(1.0); | |
341 } | 432 } |
342 | 433 |
343 TEST_F(AudioVideoPipelineDeviceTest, Mp3Playback) { | 434 TEST_F(AudioVideoPipelineDeviceTest, Mp3Playback) { |
344 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); | 435 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); |
345 | 436 |
346 ConfigureForAudioOnly("sfx.mp3"); | 437 ConfigureForAudioOnly("sfx.mp3"); |
347 Start(); | 438 Start(); |
348 message_loop->Run(); | 439 message_loop->Run(); |
349 } | 440 } |
350 | 441 |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
387 TEST_F(AudioVideoPipelineDeviceTest, WebmPlayback) { | 478 TEST_F(AudioVideoPipelineDeviceTest, WebmPlayback) { |
388 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); | 479 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); |
389 | 480 |
390 ConfigureForFile("bear-640x360.webm"); | 481 ConfigureForFile("bear-640x360.webm"); |
391 Start(); | 482 Start(); |
392 message_loop->Run(); | 483 message_loop->Run(); |
393 } | 484 } |
394 | 485 |
395 } // namespace media | 486 } // namespace media |
396 } // namespace chromecast | 487 } // namespace chromecast |
OLD | NEW |