OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <vector> | 5 #include <vector> |
6 | 6 |
7 #include "base/basictypes.h" | 7 #include "base/basictypes.h" |
8 #include "base/bind.h" | 8 #include "base/bind.h" |
9 #include "base/command_line.h" | 9 #include "base/command_line.h" |
10 #include "base/files/file_path.h" | 10 #include "base/files/file_path.h" |
11 #include "base/files/memory_mapped_file.h" | 11 #include "base/files/memory_mapped_file.h" |
12 #include "base/logging.h" | 12 #include "base/logging.h" |
13 #include "base/memory/ref_counted.h" | 13 #include "base/memory/ref_counted.h" |
14 #include "base/memory/scoped_ptr.h" | 14 #include "base/memory/scoped_ptr.h" |
15 #include "base/memory/scoped_vector.h" | 15 #include "base/memory/scoped_vector.h" |
16 #include "base/message_loop/message_loop.h" | 16 #include "base/message_loop/message_loop.h" |
17 #include "base/path_service.h" | 17 #include "base/path_service.h" |
18 #include "base/single_thread_task_runner.h" | 18 #include "base/single_thread_task_runner.h" |
19 #include "base/thread_task_runner_handle.h" | 19 #include "base/thread_task_runner_handle.h" |
20 #include "base/threading/thread.h" | 20 #include "base/threading/thread.h" |
21 #include "base/time/time.h" | 21 #include "base/time/time.h" |
22 #include "chromecast/base/task_runner_impl.h" | 22 #include "chromecast/base/task_runner_impl.h" |
| 23 #include "chromecast/media/cma/base/cast_decoder_buffer_impl.h" |
23 #include "chromecast/media/cma/base/decoder_buffer_adapter.h" | 24 #include "chromecast/media/cma/base/decoder_buffer_adapter.h" |
24 #include "chromecast/media/cma/base/decoder_config_adapter.h" | 25 #include "chromecast/media/cma/base/decoder_config_adapter.h" |
25 #include "chromecast/media/cma/test/frame_segmenter_for_test.h" | 26 #include "chromecast/media/cma/test/frame_segmenter_for_test.h" |
26 #include "chromecast/media/cma/test/media_component_device_feeder_for_test.h" | |
27 #include "chromecast/public/cast_media_shlib.h" | 27 #include "chromecast/public/cast_media_shlib.h" |
28 #include "chromecast/public/media/audio_pipeline_device.h" | |
29 #include "chromecast/public/media/cast_decoder_buffer.h" | 28 #include "chromecast/public/media/cast_decoder_buffer.h" |
30 #include "chromecast/public/media/decoder_config.h" | 29 #include "chromecast/public/media/decoder_config.h" |
31 #include "chromecast/public/media/media_clock_device.h" | |
32 #include "chromecast/public/media/media_pipeline_backend.h" | 30 #include "chromecast/public/media/media_pipeline_backend.h" |
33 #include "chromecast/public/media/media_pipeline_device_params.h" | 31 #include "chromecast/public/media/media_pipeline_device_params.h" |
34 #include "chromecast/public/media/video_pipeline_device.h" | |
35 #include "media/base/audio_decoder_config.h" | 32 #include "media/base/audio_decoder_config.h" |
36 #include "media/base/decoder_buffer.h" | 33 #include "media/base/decoder_buffer.h" |
37 #include "media/base/video_decoder_config.h" | 34 #include "media/base/video_decoder_config.h" |
38 #include "testing/gtest/include/gtest/gtest.h" | 35 #include "testing/gtest/include/gtest/gtest.h" |
39 | 36 |
40 namespace chromecast { | 37 namespace chromecast { |
41 namespace media { | 38 namespace media { |
42 | 39 |
43 namespace { | 40 namespace { |
44 | 41 |
45 typedef ScopedVector<MediaComponentDeviceFeederForTest>::iterator | |
46 ComponentDeviceIterator; | |
47 | |
48 const base::TimeDelta kMonitorLoopDelay = base::TimeDelta::FromMilliseconds(20); | 42 const base::TimeDelta kMonitorLoopDelay = base::TimeDelta::FromMilliseconds(20); |
49 | 43 |
50 base::FilePath GetTestDataFilePath(const std::string& name) { | 44 base::FilePath GetTestDataFilePath(const std::string& name) { |
51 base::FilePath file_path; | 45 base::FilePath file_path; |
52 CHECK(PathService::Get(base::DIR_SOURCE_ROOT, &file_path)); | 46 CHECK(PathService::Get(base::DIR_SOURCE_ROOT, &file_path)); |
53 | 47 |
54 file_path = file_path.Append(FILE_PATH_LITERAL("media")) | 48 file_path = file_path.Append(FILE_PATH_LITERAL("media")) |
55 .Append(FILE_PATH_LITERAL("test")).Append(FILE_PATH_LITERAL("data")) | 49 .Append(FILE_PATH_LITERAL("test")).Append(FILE_PATH_LITERAL("data")) |
56 .AppendASCII(name); | 50 .AppendASCII(name); |
57 return file_path; | 51 return file_path; |
58 } | 52 } |
59 | 53 |
60 } // namespace | 54 } // namespace |
61 | 55 |
62 class AudioVideoPipelineDeviceTest : public testing::Test { | 56 class AudioVideoPipelineDeviceTest : public testing::Test, |
| 57 public MediaPipelineBackend::Delegate { |
63 public: | 58 public: |
64 struct PauseInfo { | 59 struct PauseInfo { |
65 PauseInfo() {} | 60 PauseInfo() {} |
66 PauseInfo(base::TimeDelta d, base::TimeDelta l) : delay(d), length(l) {} | 61 PauseInfo(base::TimeDelta d, base::TimeDelta l) : delay(d), length(l) {} |
67 ~PauseInfo() {} | 62 ~PauseInfo() {} |
68 | 63 |
69 base::TimeDelta delay; | 64 base::TimeDelta delay; |
70 base::TimeDelta length; | 65 base::TimeDelta length; |
71 }; | 66 }; |
72 | 67 |
(...skipping 16 matching lines...) Expand all Loading... |
89 // Pattern loops, waiting >= pattern[i].delay against media clock between | 84 // Pattern loops, waiting >= pattern[i].delay against media clock between |
90 // pauses, then pausing for >= pattern[i].length against MessageLoop | 85 // pauses, then pausing for >= pattern[i].length against MessageLoop |
91 // A pause with delay <0 signals to stop sequence and do not loop | 86 // A pause with delay <0 signals to stop sequence and do not loop |
92 void SetPausePattern(const std::vector<PauseInfo> pattern); | 87 void SetPausePattern(const std::vector<PauseInfo> pattern); |
93 | 88 |
94 // Adds a pause to the end of pause pattern | 89 // Adds a pause to the end of pause pattern |
95 void AddPause(base::TimeDelta delay, base::TimeDelta length); | 90 void AddPause(base::TimeDelta delay, base::TimeDelta length); |
96 | 91 |
97 void Start(); | 92 void Start(); |
98 | 93 |
| 94 // MediaPipelineBackend::Delegate implementation: |
| 95 void OnVideoResolutionChanged(MediaPipelineBackend::Decoder* decoder, |
| 96 const Size& size) override {} |
| 97 void OnPushBufferComplete(MediaPipelineBackend::Decoder* decoder, |
| 98 MediaPipelineBackend::BufferStatus status) override; |
| 99 void OnEndOfStream(MediaPipelineBackend::Decoder* decoder) override; |
| 100 |
99 private: | 101 private: |
100 void Initialize(); | 102 void Initialize(); |
101 | 103 |
102 void LoadAudioStream(const std::string& filename); | 104 void LoadAudioStream(const std::string& filename); |
103 void LoadVideoStream(const std::string& filename, bool raw_h264); | 105 void LoadVideoStream(const std::string& filename, bool raw_h264); |
104 | 106 |
| 107 void FeedAudioBuffer(); |
| 108 void FeedVideoBuffer(); |
| 109 |
105 void MonitorLoop(); | 110 void MonitorLoop(); |
106 | 111 |
107 void OnPauseCompleted(); | 112 void OnPauseCompleted(); |
108 | 113 |
109 void OnEos(MediaComponentDeviceFeederForTest* device_feeder); | |
110 | |
111 scoped_ptr<TaskRunnerImpl> task_runner_; | 114 scoped_ptr<TaskRunnerImpl> task_runner_; |
112 scoped_ptr<MediaPipelineBackend> backend_; | 115 scoped_ptr<MediaPipelineBackend> backend_; |
113 MediaClockDevice* media_clock_device_; | 116 scoped_ptr<CastDecoderBufferImpl> backend_audio_buffer_; |
114 | 117 scoped_ptr<CastDecoderBufferImpl> backend_video_buffer_; |
115 // Devices to feed | |
116 ScopedVector<MediaComponentDeviceFeederForTest> | |
117 component_device_feeders_; | |
118 | 118 |
119 // Current media time. | 119 // Current media time. |
120 base::TimeDelta pause_time_; | 120 base::TimeDelta pause_time_; |
121 | 121 |
122 // Pause settings | 122 // Pause settings |
123 std::vector<PauseInfo> pause_pattern_; | 123 std::vector<PauseInfo> pause_pattern_; |
124 int pause_pattern_idx_; | 124 int pause_pattern_idx_; |
125 | 125 |
| 126 BufferList audio_buffers_; |
| 127 BufferList video_buffers_; |
| 128 |
| 129 MediaPipelineBackend::AudioDecoder* audio_decoder_; |
| 130 MediaPipelineBackend::VideoDecoder* video_decoder_; |
| 131 bool audio_feeding_completed_; |
| 132 bool video_feeding_completed_; |
| 133 |
126 DISALLOW_COPY_AND_ASSIGN(AudioVideoPipelineDeviceTest); | 134 DISALLOW_COPY_AND_ASSIGN(AudioVideoPipelineDeviceTest); |
127 }; | 135 }; |
128 | 136 |
129 AudioVideoPipelineDeviceTest::AudioVideoPipelineDeviceTest() | 137 AudioVideoPipelineDeviceTest::AudioVideoPipelineDeviceTest() |
130 : pause_pattern_() { | 138 : pause_pattern_(), |
| 139 audio_decoder_(nullptr), |
| 140 video_decoder_(nullptr), |
| 141 audio_feeding_completed_(true), |
| 142 video_feeding_completed_(true) { |
131 } | 143 } |
132 | 144 |
133 AudioVideoPipelineDeviceTest::~AudioVideoPipelineDeviceTest() { | 145 AudioVideoPipelineDeviceTest::~AudioVideoPipelineDeviceTest() { |
134 } | 146 } |
135 | 147 |
136 void AudioVideoPipelineDeviceTest::AddPause(base::TimeDelta delay, | 148 void AudioVideoPipelineDeviceTest::AddPause(base::TimeDelta delay, |
137 base::TimeDelta length) { | 149 base::TimeDelta length) { |
138 pause_pattern_.push_back(PauseInfo(delay, length)); | 150 pause_pattern_.push_back(PauseInfo(delay, length)); |
139 } | 151 } |
140 | 152 |
141 void AudioVideoPipelineDeviceTest::SetPausePattern( | 153 void AudioVideoPipelineDeviceTest::SetPausePattern( |
142 const std::vector<PauseInfo> pattern) { | 154 const std::vector<PauseInfo> pattern) { |
143 pause_pattern_ = pattern; | 155 pause_pattern_ = pattern; |
144 } | 156 } |
145 | 157 |
146 void AudioVideoPipelineDeviceTest::ConfigureForAudioOnly( | 158 void AudioVideoPipelineDeviceTest::ConfigureForAudioOnly( |
147 const std::string& filename) { | 159 const std::string& filename) { |
148 Initialize(); | 160 Initialize(); |
149 LoadAudioStream(filename); | 161 LoadAudioStream(filename); |
| 162 bool success = backend_->Initialize(this); |
| 163 ASSERT_TRUE(success); |
150 } | 164 } |
151 | 165 |
152 void AudioVideoPipelineDeviceTest::ConfigureForVideoOnly( | 166 void AudioVideoPipelineDeviceTest::ConfigureForVideoOnly( |
153 const std::string& filename, | 167 const std::string& filename, |
154 bool raw_h264) { | 168 bool raw_h264) { |
155 Initialize(); | 169 Initialize(); |
156 LoadVideoStream(filename, raw_h264); | 170 LoadVideoStream(filename, raw_h264); |
| 171 bool success = backend_->Initialize(this); |
| 172 ASSERT_TRUE(success); |
157 } | 173 } |
158 | 174 |
159 void AudioVideoPipelineDeviceTest::ConfigureForFile( | 175 void AudioVideoPipelineDeviceTest::ConfigureForFile( |
160 const std::string& filename) { | 176 const std::string& filename) { |
161 Initialize(); | 177 Initialize(); |
162 LoadVideoStream(filename, false /* raw_h264 */); | 178 LoadVideoStream(filename, false /* raw_h264 */); |
163 LoadAudioStream(filename); | 179 LoadAudioStream(filename); |
| 180 bool success = backend_->Initialize(this); |
| 181 ASSERT_TRUE(success); |
164 } | 182 } |
165 | 183 |
166 void AudioVideoPipelineDeviceTest::LoadAudioStream( | 184 void AudioVideoPipelineDeviceTest::LoadAudioStream( |
167 const std::string& filename) { | 185 const std::string& filename) { |
168 base::FilePath file_path = GetTestDataFilePath(filename); | 186 base::FilePath file_path = GetTestDataFilePath(filename); |
169 DemuxResult demux_result = FFmpegDemuxForTest(file_path, true /* audio */); | 187 DemuxResult demux_result = FFmpegDemuxForTest(file_path, true /* audio */); |
170 BufferList frames = demux_result.frames; | |
171 | 188 |
172 AudioPipelineDevice* audio_pipeline_device = backend_->GetAudio(); | 189 audio_buffers_ = demux_result.frames; |
| 190 audio_decoder_ = backend_->CreateAudioDecoder(); |
| 191 audio_feeding_completed_ = false; |
173 | 192 |
174 bool success = audio_pipeline_device->SetConfig( | 193 bool success = |
175 DecoderConfigAdapter::ToCastAudioConfig(kPrimary, | 194 audio_decoder_->SetConfig(DecoderConfigAdapter::ToCastAudioConfig( |
176 demux_result.audio_config)); | 195 kPrimary, demux_result.audio_config)); |
177 ASSERT_TRUE(success); | 196 ASSERT_TRUE(success); |
178 | 197 |
179 VLOG(2) << "Got " << frames.size() << " audio input frames"; | 198 VLOG(2) << "Got " << audio_buffers_.size() << " audio input frames"; |
180 | 199 |
181 frames.push_back( | 200 audio_buffers_.push_back(scoped_refptr<DecoderBufferBase>( |
182 scoped_refptr<DecoderBufferBase>( | 201 new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer()))); |
183 new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer()))); | |
184 | |
185 MediaComponentDeviceFeederForTest* device_feeder = | |
186 new MediaComponentDeviceFeederForTest(audio_pipeline_device, frames); | |
187 device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos, | |
188 base::Unretained(this), | |
189 device_feeder)); | |
190 component_device_feeders_.push_back(device_feeder); | |
191 } | 202 } |
192 | 203 |
193 void AudioVideoPipelineDeviceTest::LoadVideoStream(const std::string& filename, | 204 void AudioVideoPipelineDeviceTest::LoadVideoStream(const std::string& filename, |
194 bool raw_h264) { | 205 bool raw_h264) { |
195 BufferList frames; | |
196 VideoConfig video_config; | 206 VideoConfig video_config; |
197 | 207 |
198 if (raw_h264) { | 208 if (raw_h264) { |
199 base::FilePath file_path = GetTestDataFilePath(filename); | 209 base::FilePath file_path = GetTestDataFilePath(filename); |
200 base::MemoryMappedFile video_stream; | 210 base::MemoryMappedFile video_stream; |
201 ASSERT_TRUE(video_stream.Initialize(file_path)) | 211 ASSERT_TRUE(video_stream.Initialize(file_path)) |
202 << "Couldn't open stream file: " << file_path.MaybeAsASCII(); | 212 << "Couldn't open stream file: " << file_path.MaybeAsASCII(); |
203 frames = H264SegmenterForTest(video_stream.data(), video_stream.length()); | 213 video_buffers_ = |
| 214 H264SegmenterForTest(video_stream.data(), video_stream.length()); |
204 | 215 |
205 // TODO(erickung): Either pull data from stream or make caller specify value | 216 // TODO(erickung): Either pull data from stream or make caller specify value |
206 video_config.codec = kCodecH264; | 217 video_config.codec = kCodecH264; |
207 video_config.profile = kH264Main; | 218 video_config.profile = kH264Main; |
208 video_config.additional_config = NULL; | 219 video_config.additional_config = NULL; |
209 video_config.is_encrypted = false; | 220 video_config.is_encrypted = false; |
210 } else { | 221 } else { |
211 base::FilePath file_path = GetTestDataFilePath(filename); | 222 base::FilePath file_path = GetTestDataFilePath(filename); |
212 DemuxResult demux_result = FFmpegDemuxForTest(file_path, | 223 DemuxResult demux_result = FFmpegDemuxForTest(file_path, |
213 /*audio*/ false); | 224 /*audio*/ false); |
214 frames = demux_result.frames; | 225 video_buffers_ = demux_result.frames; |
215 video_config = DecoderConfigAdapter::ToCastVideoConfig( | 226 video_config = DecoderConfigAdapter::ToCastVideoConfig( |
216 kPrimary, demux_result.video_config); | 227 kPrimary, demux_result.video_config); |
217 } | 228 } |
218 | 229 |
219 VideoPipelineDevice* video_pipeline_device = backend_->GetVideo(); | 230 video_decoder_ = backend_->CreateVideoDecoder(); |
220 | 231 video_feeding_completed_ = false; |
221 // Set configuration. | 232 bool success = video_decoder_->SetConfig(video_config); |
222 bool success = video_pipeline_device->SetConfig(video_config); | |
223 ASSERT_TRUE(success); | 233 ASSERT_TRUE(success); |
224 | 234 |
225 VLOG(2) << "Got " << frames.size() << " video input frames"; | 235 VLOG(2) << "Got " << video_buffers_.size() << " video input frames"; |
226 | 236 |
227 frames.push_back( | 237 video_buffers_.push_back(scoped_refptr<DecoderBufferBase>( |
228 scoped_refptr<DecoderBufferBase>(new DecoderBufferAdapter( | 238 new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer()))); |
229 ::media::DecoderBuffer::CreateEOSBuffer()))); | 239 } |
230 | 240 |
231 MediaComponentDeviceFeederForTest* device_feeder = | 241 void AudioVideoPipelineDeviceTest::FeedAudioBuffer() { |
232 new MediaComponentDeviceFeederForTest(video_pipeline_device, frames); | 242 // Possibly feed one frame |
233 device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos, | 243 DCHECK(!audio_buffers_.empty()); |
234 base::Unretained(this), | 244 if (audio_feeding_completed_) |
235 device_feeder)); | 245 return; |
236 component_device_feeders_.push_back(device_feeder); | 246 |
| 247 scoped_refptr<DecoderBufferBase> buffer = audio_buffers_.front(); |
| 248 if (backend_audio_buffer_) |
| 249 backend_audio_buffer_->set_buffer(buffer); |
| 250 else |
| 251 backend_audio_buffer_.reset(new CastDecoderBufferImpl(buffer)); |
| 252 |
| 253 MediaPipelineBackend::BufferStatus status = |
| 254 audio_decoder_->PushBuffer(nullptr, // decrypt_context |
| 255 backend_audio_buffer_.get()); |
| 256 EXPECT_NE(status, MediaPipelineBackend::kBufferFailed); |
| 257 audio_buffers_.pop_front(); |
| 258 |
| 259 // Feeding is done, just wait for the end of stream callback. |
| 260 if (buffer->end_of_stream() || audio_buffers_.empty()) { |
| 261 if (audio_buffers_.empty() && !buffer->end_of_stream()) { |
| 262 LOG(WARNING) << "Stream emptied without feeding EOS frame"; |
| 263 } |
| 264 |
| 265 audio_feeding_completed_ = true; |
| 266 return; |
| 267 } |
| 268 |
| 269 if (status == MediaPipelineBackend::kBufferPending) |
| 270 return; |
| 271 |
| 272 OnPushBufferComplete(audio_decoder_, MediaPipelineBackend::kBufferSuccess); |
| 273 } |
| 274 |
| 275 void AudioVideoPipelineDeviceTest::FeedVideoBuffer() { |
| 276 // Possibly feed one frame |
| 277 DCHECK(!video_buffers_.empty()); |
| 278 if (video_feeding_completed_) |
| 279 return; |
| 280 |
| 281 scoped_refptr<DecoderBufferBase> buffer = video_buffers_.front(); |
| 282 if (backend_video_buffer_) |
| 283 backend_video_buffer_->set_buffer(buffer); |
| 284 else |
| 285 backend_video_buffer_.reset(new CastDecoderBufferImpl(buffer)); |
| 286 |
| 287 MediaPipelineBackend::BufferStatus status = |
| 288 video_decoder_->PushBuffer(nullptr, // decrypt_context |
| 289 backend_video_buffer_.get()); |
| 290 EXPECT_NE(status, MediaPipelineBackend::kBufferFailed); |
| 291 video_buffers_.pop_front(); |
| 292 |
| 293 // Feeding is done, just wait for the end of stream callback. |
| 294 if (buffer->end_of_stream() || video_buffers_.empty()) { |
| 295 if (video_buffers_.empty() && !buffer->end_of_stream()) { |
| 296 LOG(WARNING) << "Stream emptied without feeding EOS frame"; |
| 297 } |
| 298 |
| 299 video_feeding_completed_ = true; |
| 300 return; |
| 301 } |
| 302 |
| 303 if (status == MediaPipelineBackend::kBufferPending) |
| 304 return; |
| 305 |
| 306 OnPushBufferComplete(video_decoder_, MediaPipelineBackend::kBufferSuccess); |
237 } | 307 } |
238 | 308 |
239 void AudioVideoPipelineDeviceTest::Start() { | 309 void AudioVideoPipelineDeviceTest::Start() { |
240 pause_time_ = base::TimeDelta(); | 310 pause_time_ = base::TimeDelta(); |
241 pause_pattern_idx_ = 0; | 311 pause_pattern_idx_ = 0; |
242 | 312 |
243 for (size_t i = 0; i < component_device_feeders_.size(); i++) { | 313 if (audio_decoder_) { |
244 base::ThreadTaskRunnerHandle::Get()->PostTask( | 314 base::ThreadTaskRunnerHandle::Get()->PostTask( |
245 FROM_HERE, base::Bind(&MediaComponentDeviceFeederForTest::Feed, | 315 FROM_HERE, |
246 base::Unretained(component_device_feeders_[i]))); | 316 base::Bind(&AudioVideoPipelineDeviceTest::FeedAudioBuffer, |
| 317 base::Unretained(this))); |
| 318 } |
| 319 if (video_decoder_) { |
| 320 base::ThreadTaskRunnerHandle::Get()->PostTask( |
| 321 FROM_HERE, |
| 322 base::Bind(&AudioVideoPipelineDeviceTest::FeedVideoBuffer, |
| 323 base::Unretained(this))); |
247 } | 324 } |
248 | 325 |
249 media_clock_device_->SetState(MediaClockDevice::kStateRunning); | 326 backend_->Start(0); |
250 | 327 |
251 base::ThreadTaskRunnerHandle::Get()->PostTask( | 328 base::ThreadTaskRunnerHandle::Get()->PostTask( |
252 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop, | 329 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop, |
253 base::Unretained(this))); | 330 base::Unretained(this))); |
254 } | 331 } |
255 | 332 |
| 333 void AudioVideoPipelineDeviceTest::OnEndOfStream( |
| 334 MediaPipelineBackend::Decoder* decoder) { |
| 335 bool success = backend_->Stop(); |
| 336 ASSERT_TRUE(success); |
| 337 |
| 338 if (decoder == audio_decoder_) |
| 339 audio_decoder_ = nullptr; |
| 340 else if (decoder == video_decoder_) |
| 341 video_decoder_ = nullptr; |
| 342 |
| 343 if (!audio_decoder_ && !video_decoder_) |
| 344 base::MessageLoop::current()->QuitWhenIdle(); |
| 345 } |
| 346 |
| 347 void AudioVideoPipelineDeviceTest::OnPushBufferComplete( |
| 348 MediaPipelineBackend::Decoder* decoder, |
| 349 MediaPipelineBackend::BufferStatus status) { |
| 350 EXPECT_NE(status, MediaPipelineBackend::kBufferFailed); |
| 351 |
| 352 if (decoder == audio_decoder_) { |
| 353 if (audio_feeding_completed_) |
| 354 return; |
| 355 |
| 356 base::ThreadTaskRunnerHandle::Get()->PostTask( |
| 357 FROM_HERE, |
| 358 base::Bind(&AudioVideoPipelineDeviceTest::FeedAudioBuffer, |
| 359 base::Unretained(this))); |
| 360 } else if (decoder == video_decoder_) { |
| 361 if (video_feeding_completed_) |
| 362 return; |
| 363 |
| 364 base::ThreadTaskRunnerHandle::Get()->PostTask( |
| 365 FROM_HERE, |
| 366 base::Bind(&AudioVideoPipelineDeviceTest::FeedVideoBuffer, |
| 367 base::Unretained(this))); |
| 368 } |
| 369 } |
| 370 |
256 void AudioVideoPipelineDeviceTest::MonitorLoop() { | 371 void AudioVideoPipelineDeviceTest::MonitorLoop() { |
257 base::TimeDelta media_time = base::TimeDelta::FromMicroseconds( | 372 base::TimeDelta media_time = |
258 media_clock_device_->GetTimeMicroseconds()); | 373 base::TimeDelta::FromMicroseconds(backend_->GetCurrentPts()); |
259 | 374 |
260 if (!pause_pattern_.empty() && | 375 if (!pause_pattern_.empty() && |
261 pause_pattern_[pause_pattern_idx_].delay >= base::TimeDelta() && | 376 pause_pattern_[pause_pattern_idx_].delay >= base::TimeDelta() && |
262 media_time >= pause_time_ + pause_pattern_[pause_pattern_idx_].delay) { | 377 media_time >= pause_time_ + pause_pattern_[pause_pattern_idx_].delay) { |
263 // Do Pause | 378 // Do Pause |
264 media_clock_device_->SetRate(0.0); | 379 backend_->Pause(); |
265 pause_time_ = base::TimeDelta::FromMicroseconds( | 380 pause_time_ = base::TimeDelta::FromMicroseconds(backend_->GetCurrentPts()); |
266 media_clock_device_->GetTimeMicroseconds()); | |
267 | 381 |
268 VLOG(2) << "Pausing at " << pause_time_.InMilliseconds() << "ms for " << | 382 VLOG(2) << "Pausing at " << pause_time_.InMilliseconds() << "ms for " << |
269 pause_pattern_[pause_pattern_idx_].length.InMilliseconds() << "ms"; | 383 pause_pattern_[pause_pattern_idx_].length.InMilliseconds() << "ms"; |
270 | 384 |
271 // Wait for pause finish | 385 // Wait for pause finish |
272 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( | 386 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( |
273 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::OnPauseCompleted, | 387 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::OnPauseCompleted, |
274 base::Unretained(this)), | 388 base::Unretained(this)), |
275 pause_pattern_[pause_pattern_idx_].length); | 389 pause_pattern_[pause_pattern_idx_].length); |
276 return; | 390 return; |
277 } | 391 } |
278 | 392 |
279 // Check state again in a little while | 393 // Check state again in a little while |
280 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( | 394 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( |
281 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop, | 395 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop, |
282 base::Unretained(this)), | 396 base::Unretained(this)), |
283 kMonitorLoopDelay); | 397 kMonitorLoopDelay); |
284 } | 398 } |
285 | 399 |
286 void AudioVideoPipelineDeviceTest::OnPauseCompleted() { | 400 void AudioVideoPipelineDeviceTest::OnPauseCompleted() { |
287 // Make sure the media time didn't move during that time. | 401 // Make sure the media time didn't move during that time. |
288 base::TimeDelta media_time = base::TimeDelta::FromMicroseconds( | 402 base::TimeDelta media_time = |
289 media_clock_device_->GetTimeMicroseconds()); | 403 base::TimeDelta::FromMicroseconds(backend_->GetCurrentPts()); |
290 | 404 |
291 // TODO(damienv): | 405 // TODO(damienv): |
292 // Should be: | 406 // Should be: |
293 // EXPECT_EQ(media_time, media_time_); | 407 // EXPECT_EQ(media_time, media_time_); |
294 // However, some backends, when rendering the first frame while in paused | 408 // However, some backends, when rendering the first frame while in paused |
295 // mode moves the time forward. | 409 // mode moves the time forward. |
296 // This behaviour is not intended. | 410 // This behaviour is not intended. |
297 EXPECT_GE(media_time, pause_time_); | 411 EXPECT_GE(media_time, pause_time_); |
298 EXPECT_LE(media_time, pause_time_ + base::TimeDelta::FromMilliseconds(50)); | 412 EXPECT_LE(media_time, pause_time_ + base::TimeDelta::FromMilliseconds(50)); |
299 | 413 |
300 pause_time_ = media_time; | 414 pause_time_ = media_time; |
301 pause_pattern_idx_ = (pause_pattern_idx_ + 1) % pause_pattern_.size(); | 415 pause_pattern_idx_ = (pause_pattern_idx_ + 1) % pause_pattern_.size(); |
302 | 416 |
303 VLOG(2) << "Pause complete, restarting media clock"; | 417 VLOG(2) << "Pause complete, restarting media clock"; |
304 | 418 |
305 // Resume playback and frame feeding. | 419 // Resume playback and frame feeding. |
306 media_clock_device_->SetRate(1.0); | 420 backend_->Resume(); |
307 | 421 |
308 MonitorLoop(); | 422 MonitorLoop(); |
309 } | 423 } |
310 | 424 |
311 void AudioVideoPipelineDeviceTest::OnEos( | |
312 MediaComponentDeviceFeederForTest* device_feeder) { | |
313 for (ComponentDeviceIterator it = component_device_feeders_.begin(); | |
314 it != component_device_feeders_.end(); | |
315 ++it) { | |
316 if (*it == device_feeder) { | |
317 component_device_feeders_.erase(it); | |
318 break; | |
319 } | |
320 } | |
321 | |
322 // Check if all streams finished | |
323 if (component_device_feeders_.empty()) | |
324 base::MessageLoop::current()->QuitWhenIdle(); | |
325 } | |
326 | |
327 void AudioVideoPipelineDeviceTest::Initialize() { | 425 void AudioVideoPipelineDeviceTest::Initialize() { |
328 // Create the media device. | 426 // Create the media device. |
329 task_runner_.reset(new TaskRunnerImpl()); | 427 task_runner_.reset(new TaskRunnerImpl()); |
330 MediaPipelineDeviceParams params(task_runner_.get()); | 428 MediaPipelineDeviceParams params(task_runner_.get()); |
331 backend_.reset(CastMediaShlib::CreateMediaPipelineBackend(params)); | 429 backend_.reset(CastMediaShlib::CreateMediaPipelineBackend(params)); |
332 media_clock_device_ = backend_->GetClock(); | |
333 | |
334 // Clock initialization and configuration. | |
335 bool success = | |
336 media_clock_device_->SetState(MediaClockDevice::kStateIdle); | |
337 ASSERT_TRUE(success); | |
338 success = media_clock_device_->ResetTimeline(0); | |
339 ASSERT_TRUE(success); | |
340 media_clock_device_->SetRate(1.0); | |
341 } | 430 } |
342 | 431 |
343 TEST_F(AudioVideoPipelineDeviceTest, Mp3Playback) { | 432 TEST_F(AudioVideoPipelineDeviceTest, Mp3Playback) { |
344 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); | 433 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); |
345 | 434 |
346 ConfigureForAudioOnly("sfx.mp3"); | 435 ConfigureForAudioOnly("sfx.mp3"); |
347 Start(); | 436 Start(); |
348 message_loop->Run(); | 437 message_loop->Run(); |
349 } | 438 } |
350 | 439 |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
387 TEST_F(AudioVideoPipelineDeviceTest, WebmPlayback) { | 476 TEST_F(AudioVideoPipelineDeviceTest, WebmPlayback) { |
388 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); | 477 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); |
389 | 478 |
390 ConfigureForFile("bear-640x360.webm"); | 479 ConfigureForFile("bear-640x360.webm"); |
391 Start(); | 480 Start(); |
392 message_loop->Run(); | 481 message_loop->Run(); |
393 } | 482 } |
394 | 483 |
395 } // namespace media | 484 } // namespace media |
396 } // namespace chromecast | 485 } // namespace chromecast |
OLD | NEW |