| OLD | NEW |
| 1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include <vector> | 5 #include <vector> |
| 6 | 6 |
| 7 #include "base/basictypes.h" | 7 #include "base/basictypes.h" |
| 8 #include "base/bind.h" | 8 #include "base/bind.h" |
| 9 #include "base/command_line.h" | 9 #include "base/command_line.h" |
| 10 #include "base/files/file_path.h" | 10 #include "base/files/file_path.h" |
| 11 #include "base/files/memory_mapped_file.h" | 11 #include "base/files/memory_mapped_file.h" |
| 12 #include "base/logging.h" | 12 #include "base/logging.h" |
| 13 #include "base/memory/ref_counted.h" | 13 #include "base/memory/ref_counted.h" |
| 14 #include "base/memory/scoped_ptr.h" | 14 #include "base/memory/scoped_ptr.h" |
| 15 #include "base/memory/scoped_vector.h" | 15 #include "base/memory/scoped_vector.h" |
| 16 #include "base/message_loop/message_loop.h" | 16 #include "base/message_loop/message_loop.h" |
| 17 #include "base/path_service.h" | 17 #include "base/path_service.h" |
| 18 #include "base/single_thread_task_runner.h" | 18 #include "base/single_thread_task_runner.h" |
| 19 #include "base/thread_task_runner_handle.h" | 19 #include "base/thread_task_runner_handle.h" |
| 20 #include "base/threading/thread.h" | 20 #include "base/threading/thread.h" |
| 21 #include "base/time/time.h" | 21 #include "base/time/time.h" |
| 22 #include "chromecast/base/task_runner_impl.h" | 22 #include "chromecast/base/task_runner_impl.h" |
| 23 #include "chromecast/media/cma/base/cast_decoder_buffer_impl.h" |
| 23 #include "chromecast/media/cma/base/decoder_buffer_adapter.h" | 24 #include "chromecast/media/cma/base/decoder_buffer_adapter.h" |
| 24 #include "chromecast/media/cma/base/decoder_config_adapter.h" | 25 #include "chromecast/media/cma/base/decoder_config_adapter.h" |
| 25 #include "chromecast/media/cma/test/frame_segmenter_for_test.h" | 26 #include "chromecast/media/cma/test/frame_segmenter_for_test.h" |
| 26 #include "chromecast/media/cma/test/media_component_device_feeder_for_test.h" | |
| 27 #include "chromecast/public/cast_media_shlib.h" | 27 #include "chromecast/public/cast_media_shlib.h" |
| 28 #include "chromecast/public/media/audio_pipeline_device.h" | |
| 29 #include "chromecast/public/media/cast_decoder_buffer.h" | 28 #include "chromecast/public/media/cast_decoder_buffer.h" |
| 30 #include "chromecast/public/media/decoder_config.h" | 29 #include "chromecast/public/media/decoder_config.h" |
| 31 #include "chromecast/public/media/media_clock_device.h" | |
| 32 #include "chromecast/public/media/media_pipeline_backend.h" | 30 #include "chromecast/public/media/media_pipeline_backend.h" |
| 33 #include "chromecast/public/media/media_pipeline_device_params.h" | 31 #include "chromecast/public/media/media_pipeline_device_params.h" |
| 34 #include "chromecast/public/media/video_pipeline_device.h" | |
| 35 #include "media/base/audio_decoder_config.h" | 32 #include "media/base/audio_decoder_config.h" |
| 36 #include "media/base/decoder_buffer.h" | 33 #include "media/base/decoder_buffer.h" |
| 37 #include "media/base/video_decoder_config.h" | 34 #include "media/base/video_decoder_config.h" |
| 38 #include "testing/gtest/include/gtest/gtest.h" | 35 #include "testing/gtest/include/gtest/gtest.h" |
| 39 | 36 |
| 40 namespace chromecast { | 37 namespace chromecast { |
| 41 namespace media { | 38 namespace media { |
| 42 | 39 |
| 43 namespace { | 40 namespace { |
| 44 | 41 |
| 45 typedef ScopedVector<MediaComponentDeviceFeederForTest>::iterator | |
| 46 ComponentDeviceIterator; | |
| 47 | |
| 48 const base::TimeDelta kMonitorLoopDelay = base::TimeDelta::FromMilliseconds(20); | 42 const base::TimeDelta kMonitorLoopDelay = base::TimeDelta::FromMilliseconds(20); |
| 49 | 43 |
| 50 base::FilePath GetTestDataFilePath(const std::string& name) { | 44 base::FilePath GetTestDataFilePath(const std::string& name) { |
| 51 base::FilePath file_path; | 45 base::FilePath file_path; |
| 52 CHECK(PathService::Get(base::DIR_SOURCE_ROOT, &file_path)); | 46 CHECK(PathService::Get(base::DIR_SOURCE_ROOT, &file_path)); |
| 53 | 47 |
| 54 file_path = file_path.Append(FILE_PATH_LITERAL("media")) | 48 file_path = file_path.Append(FILE_PATH_LITERAL("media")) |
| 55 .Append(FILE_PATH_LITERAL("test")).Append(FILE_PATH_LITERAL("data")) | 49 .Append(FILE_PATH_LITERAL("test")).Append(FILE_PATH_LITERAL("data")) |
| 56 .AppendASCII(name); | 50 .AppendASCII(name); |
| 57 return file_path; | 51 return file_path; |
| 58 } | 52 } |
| 59 | 53 |
| 60 } // namespace | 54 } // namespace |
| 61 | 55 |
| 62 class AudioVideoPipelineDeviceTest : public testing::Test { | 56 class AudioVideoPipelineDeviceTest : public testing::Test, |
| 57 public MediaPipelineBackend::Delegate { |
| 63 public: | 58 public: |
| 64 struct PauseInfo { | 59 struct PauseInfo { |
| 65 PauseInfo() {} | 60 PauseInfo() {} |
| 66 PauseInfo(base::TimeDelta d, base::TimeDelta l) : delay(d), length(l) {} | 61 PauseInfo(base::TimeDelta d, base::TimeDelta l) : delay(d), length(l) {} |
| 67 ~PauseInfo() {} | 62 ~PauseInfo() {} |
| 68 | 63 |
| 69 base::TimeDelta delay; | 64 base::TimeDelta delay; |
| 70 base::TimeDelta length; | 65 base::TimeDelta length; |
| 71 }; | 66 }; |
| 72 | 67 |
| (...skipping 16 matching lines...) Expand all Loading... |
| 89 // Pattern loops, waiting >= pattern[i].delay against media clock between | 84 // Pattern loops, waiting >= pattern[i].delay against media clock between |
| 90 // pauses, then pausing for >= pattern[i].length against MessageLoop | 85 // pauses, then pausing for >= pattern[i].length against MessageLoop |
| 91 // A pause with delay <0 signals to stop sequence and do not loop | 86 // A pause with delay <0 signals to stop sequence and do not loop |
| 92 void SetPausePattern(const std::vector<PauseInfo> pattern); | 87 void SetPausePattern(const std::vector<PauseInfo> pattern); |
| 93 | 88 |
| 94 // Adds a pause to the end of pause pattern | 89 // Adds a pause to the end of pause pattern |
| 95 void AddPause(base::TimeDelta delay, base::TimeDelta length); | 90 void AddPause(base::TimeDelta delay, base::TimeDelta length); |
| 96 | 91 |
| 97 void Start(); | 92 void Start(); |
| 98 | 93 |
| 94 // MediaPipelineBackend::Delegate implementation: |
| 95 void OnVideoResolutionChanged(MediaPipelineBackend::VideoDecoder* decoder, |
| 96 const Size& size) override {} |
| 97 void OnPushBufferComplete(MediaPipelineBackend::Decoder* decoder, |
| 98 MediaPipelineBackend::BufferStatus status) override; |
| 99 void OnEndOfStream(MediaPipelineBackend::Decoder* decoder) override; |
| 100 void OnDecoderError(MediaPipelineBackend::Decoder* decoder) override; |
| 101 |
| 99 private: | 102 private: |
| 100 void Initialize(); | 103 void Initialize(); |
| 101 | 104 |
| 102 void LoadAudioStream(const std::string& filename); | 105 void LoadAudioStream(const std::string& filename); |
| 103 void LoadVideoStream(const std::string& filename, bool raw_h264); | 106 void LoadVideoStream(const std::string& filename, bool raw_h264); |
| 104 | 107 |
| 108 void FeedAudioBuffer(); |
| 109 void FeedVideoBuffer(); |
| 110 |
| 105 void MonitorLoop(); | 111 void MonitorLoop(); |
| 106 | 112 |
| 107 void OnPauseCompleted(); | 113 void OnPauseCompleted(); |
| 108 | 114 |
| 109 void OnEos(MediaComponentDeviceFeederForTest* device_feeder); | |
| 110 | |
| 111 scoped_ptr<TaskRunnerImpl> task_runner_; | 115 scoped_ptr<TaskRunnerImpl> task_runner_; |
| 112 scoped_ptr<MediaPipelineBackend> backend_; | 116 scoped_ptr<MediaPipelineBackend> backend_; |
| 113 MediaClockDevice* media_clock_device_; | 117 scoped_ptr<CastDecoderBufferImpl> backend_audio_buffer_; |
| 114 | 118 scoped_ptr<CastDecoderBufferImpl> backend_video_buffer_; |
| 115 // Devices to feed | |
| 116 ScopedVector<MediaComponentDeviceFeederForTest> | |
| 117 component_device_feeders_; | |
| 118 | 119 |
| 119 // Current media time. | 120 // Current media time. |
| 120 base::TimeDelta pause_time_; | 121 base::TimeDelta pause_time_; |
| 121 | 122 |
| 122 // Pause settings | 123 // Pause settings |
| 123 std::vector<PauseInfo> pause_pattern_; | 124 std::vector<PauseInfo> pause_pattern_; |
| 124 int pause_pattern_idx_; | 125 int pause_pattern_idx_; |
| 125 | 126 |
| 127 BufferList audio_buffers_; |
| 128 BufferList video_buffers_; |
| 129 |
| 130 MediaPipelineBackend::AudioDecoder* audio_decoder_; |
| 131 MediaPipelineBackend::VideoDecoder* video_decoder_; |
| 132 bool audio_feeding_completed_; |
| 133 bool video_feeding_completed_; |
| 134 |
| 126 DISALLOW_COPY_AND_ASSIGN(AudioVideoPipelineDeviceTest); | 135 DISALLOW_COPY_AND_ASSIGN(AudioVideoPipelineDeviceTest); |
| 127 }; | 136 }; |
| 128 | 137 |
| 129 AudioVideoPipelineDeviceTest::AudioVideoPipelineDeviceTest() | 138 AudioVideoPipelineDeviceTest::AudioVideoPipelineDeviceTest() |
| 130 : pause_pattern_() { | 139 : pause_pattern_(), |
| 140 audio_decoder_(nullptr), |
| 141 video_decoder_(nullptr), |
| 142 audio_feeding_completed_(true), |
| 143 video_feeding_completed_(true) { |
| 131 } | 144 } |
| 132 | 145 |
| 133 AudioVideoPipelineDeviceTest::~AudioVideoPipelineDeviceTest() { | 146 AudioVideoPipelineDeviceTest::~AudioVideoPipelineDeviceTest() { |
| 134 } | 147 } |
| 135 | 148 |
| 136 void AudioVideoPipelineDeviceTest::AddPause(base::TimeDelta delay, | 149 void AudioVideoPipelineDeviceTest::AddPause(base::TimeDelta delay, |
| 137 base::TimeDelta length) { | 150 base::TimeDelta length) { |
| 138 pause_pattern_.push_back(PauseInfo(delay, length)); | 151 pause_pattern_.push_back(PauseInfo(delay, length)); |
| 139 } | 152 } |
| 140 | 153 |
| 141 void AudioVideoPipelineDeviceTest::SetPausePattern( | 154 void AudioVideoPipelineDeviceTest::SetPausePattern( |
| 142 const std::vector<PauseInfo> pattern) { | 155 const std::vector<PauseInfo> pattern) { |
| 143 pause_pattern_ = pattern; | 156 pause_pattern_ = pattern; |
| 144 } | 157 } |
| 145 | 158 |
| 146 void AudioVideoPipelineDeviceTest::ConfigureForAudioOnly( | 159 void AudioVideoPipelineDeviceTest::ConfigureForAudioOnly( |
| 147 const std::string& filename) { | 160 const std::string& filename) { |
| 148 Initialize(); | 161 Initialize(); |
| 149 LoadAudioStream(filename); | 162 LoadAudioStream(filename); |
| 163 bool success = backend_->Initialize(this); |
| 164 ASSERT_TRUE(success); |
| 150 } | 165 } |
| 151 | 166 |
| 152 void AudioVideoPipelineDeviceTest::ConfigureForVideoOnly( | 167 void AudioVideoPipelineDeviceTest::ConfigureForVideoOnly( |
| 153 const std::string& filename, | 168 const std::string& filename, |
| 154 bool raw_h264) { | 169 bool raw_h264) { |
| 155 Initialize(); | 170 Initialize(); |
| 156 LoadVideoStream(filename, raw_h264); | 171 LoadVideoStream(filename, raw_h264); |
| 172 bool success = backend_->Initialize(this); |
| 173 ASSERT_TRUE(success); |
| 157 } | 174 } |
| 158 | 175 |
| 159 void AudioVideoPipelineDeviceTest::ConfigureForFile( | 176 void AudioVideoPipelineDeviceTest::ConfigureForFile( |
| 160 const std::string& filename) { | 177 const std::string& filename) { |
| 161 Initialize(); | 178 Initialize(); |
| 162 LoadVideoStream(filename, false /* raw_h264 */); | 179 LoadVideoStream(filename, false /* raw_h264 */); |
| 163 LoadAudioStream(filename); | 180 LoadAudioStream(filename); |
| 181 bool success = backend_->Initialize(this); |
| 182 ASSERT_TRUE(success); |
| 164 } | 183 } |
| 165 | 184 |
| 166 void AudioVideoPipelineDeviceTest::LoadAudioStream( | 185 void AudioVideoPipelineDeviceTest::LoadAudioStream( |
| 167 const std::string& filename) { | 186 const std::string& filename) { |
| 168 base::FilePath file_path = GetTestDataFilePath(filename); | 187 base::FilePath file_path = GetTestDataFilePath(filename); |
| 169 DemuxResult demux_result = FFmpegDemuxForTest(file_path, true /* audio */); | 188 DemuxResult demux_result = FFmpegDemuxForTest(file_path, true /* audio */); |
| 170 BufferList frames = demux_result.frames; | |
| 171 | 189 |
| 172 AudioPipelineDevice* audio_pipeline_device = backend_->GetAudio(); | 190 audio_buffers_ = demux_result.frames; |
| 191 audio_decoder_ = backend_->CreateAudioDecoder(); |
| 192 audio_feeding_completed_ = false; |
| 173 | 193 |
| 174 bool success = audio_pipeline_device->SetConfig( | 194 bool success = |
| 175 DecoderConfigAdapter::ToCastAudioConfig(kPrimary, | 195 audio_decoder_->SetConfig(DecoderConfigAdapter::ToCastAudioConfig( |
| 176 demux_result.audio_config)); | 196 kPrimary, demux_result.audio_config)); |
| 177 ASSERT_TRUE(success); | 197 ASSERT_TRUE(success); |
| 178 | 198 |
| 179 VLOG(2) << "Got " << frames.size() << " audio input frames"; | 199 VLOG(2) << "Got " << audio_buffers_.size() << " audio input frames"; |
| 180 | 200 |
| 181 frames.push_back( | 201 audio_buffers_.push_back(scoped_refptr<DecoderBufferBase>( |
| 182 scoped_refptr<DecoderBufferBase>( | 202 new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer()))); |
| 183 new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer()))); | |
| 184 | |
| 185 MediaComponentDeviceFeederForTest* device_feeder = | |
| 186 new MediaComponentDeviceFeederForTest(audio_pipeline_device, frames); | |
| 187 device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos, | |
| 188 base::Unretained(this), | |
| 189 device_feeder)); | |
| 190 component_device_feeders_.push_back(device_feeder); | |
| 191 } | 203 } |
| 192 | 204 |
| 193 void AudioVideoPipelineDeviceTest::LoadVideoStream(const std::string& filename, | 205 void AudioVideoPipelineDeviceTest::LoadVideoStream(const std::string& filename, |
| 194 bool raw_h264) { | 206 bool raw_h264) { |
| 195 BufferList frames; | |
| 196 VideoConfig video_config; | 207 VideoConfig video_config; |
| 197 | 208 |
| 198 if (raw_h264) { | 209 if (raw_h264) { |
| 199 base::FilePath file_path = GetTestDataFilePath(filename); | 210 base::FilePath file_path = GetTestDataFilePath(filename); |
| 200 base::MemoryMappedFile video_stream; | 211 base::MemoryMappedFile video_stream; |
| 201 ASSERT_TRUE(video_stream.Initialize(file_path)) | 212 ASSERT_TRUE(video_stream.Initialize(file_path)) |
| 202 << "Couldn't open stream file: " << file_path.MaybeAsASCII(); | 213 << "Couldn't open stream file: " << file_path.MaybeAsASCII(); |
| 203 frames = H264SegmenterForTest(video_stream.data(), video_stream.length()); | 214 video_buffers_ = |
| 215 H264SegmenterForTest(video_stream.data(), video_stream.length()); |
| 204 | 216 |
| 205 // TODO(erickung): Either pull data from stream or make caller specify value | 217 // TODO(erickung): Either pull data from stream or make caller specify value |
| 206 video_config.codec = kCodecH264; | 218 video_config.codec = kCodecH264; |
| 207 video_config.profile = kH264Main; | 219 video_config.profile = kH264Main; |
| 208 video_config.additional_config = NULL; | 220 video_config.additional_config = NULL; |
| 209 video_config.is_encrypted = false; | 221 video_config.is_encrypted = false; |
| 210 } else { | 222 } else { |
| 211 base::FilePath file_path = GetTestDataFilePath(filename); | 223 base::FilePath file_path = GetTestDataFilePath(filename); |
| 212 DemuxResult demux_result = FFmpegDemuxForTest(file_path, | 224 DemuxResult demux_result = FFmpegDemuxForTest(file_path, |
| 213 /*audio*/ false); | 225 /*audio*/ false); |
| 214 frames = demux_result.frames; | 226 video_buffers_ = demux_result.frames; |
| 215 video_config = DecoderConfigAdapter::ToCastVideoConfig( | 227 video_config = DecoderConfigAdapter::ToCastVideoConfig( |
| 216 kPrimary, demux_result.video_config); | 228 kPrimary, demux_result.video_config); |
| 217 } | 229 } |
| 218 | 230 |
| 219 VideoPipelineDevice* video_pipeline_device = backend_->GetVideo(); | 231 video_decoder_ = backend_->CreateVideoDecoder(); |
| 220 | 232 video_feeding_completed_ = false; |
| 221 // Set configuration. | 233 bool success = video_decoder_->SetConfig(video_config); |
| 222 bool success = video_pipeline_device->SetConfig(video_config); | |
| 223 ASSERT_TRUE(success); | 234 ASSERT_TRUE(success); |
| 224 | 235 |
| 225 VLOG(2) << "Got " << frames.size() << " video input frames"; | 236 VLOG(2) << "Got " << video_buffers_.size() << " video input frames"; |
| 226 | 237 |
| 227 frames.push_back( | 238 video_buffers_.push_back(scoped_refptr<DecoderBufferBase>( |
| 228 scoped_refptr<DecoderBufferBase>(new DecoderBufferAdapter( | 239 new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer()))); |
| 229 ::media::DecoderBuffer::CreateEOSBuffer()))); | 240 } |
| 230 | 241 |
| 231 MediaComponentDeviceFeederForTest* device_feeder = | 242 void AudioVideoPipelineDeviceTest::FeedAudioBuffer() { |
| 232 new MediaComponentDeviceFeederForTest(video_pipeline_device, frames); | 243 // Possibly feed one frame |
| 233 device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos, | 244 DCHECK(!audio_buffers_.empty()); |
| 234 base::Unretained(this), | 245 if (audio_feeding_completed_) |
| 235 device_feeder)); | 246 return; |
| 236 component_device_feeders_.push_back(device_feeder); | 247 |
| 248 scoped_refptr<DecoderBufferBase> buffer = audio_buffers_.front(); |
| 249 if (backend_audio_buffer_) |
| 250 backend_audio_buffer_->set_buffer(buffer); |
| 251 else |
| 252 backend_audio_buffer_.reset(new CastDecoderBufferImpl(buffer)); |
| 253 |
| 254 MediaPipelineBackend::BufferStatus status = |
| 255 audio_decoder_->PushBuffer(nullptr, // decrypt_context |
| 256 backend_audio_buffer_.get()); |
| 257 EXPECT_NE(status, MediaPipelineBackend::kBufferFailed); |
| 258 audio_buffers_.pop_front(); |
| 259 |
| 260 // Feeding is done, just wait for the end of stream callback. |
| 261 if (buffer->end_of_stream() || audio_buffers_.empty()) { |
| 262 if (audio_buffers_.empty() && !buffer->end_of_stream()) { |
| 263 LOG(WARNING) << "Stream emptied without feeding EOS frame"; |
| 264 } |
| 265 |
| 266 audio_feeding_completed_ = true; |
| 267 return; |
| 268 } |
| 269 |
| 270 if (status == MediaPipelineBackend::kBufferPending) |
| 271 return; |
| 272 |
| 273 OnPushBufferComplete(audio_decoder_, MediaPipelineBackend::kBufferSuccess); |
| 274 } |
| 275 |
| 276 void AudioVideoPipelineDeviceTest::FeedVideoBuffer() { |
| 277 // Possibly feed one frame |
| 278 DCHECK(!video_buffers_.empty()); |
| 279 if (video_feeding_completed_) |
| 280 return; |
| 281 |
| 282 scoped_refptr<DecoderBufferBase> buffer = video_buffers_.front(); |
| 283 if (backend_video_buffer_) |
| 284 backend_video_buffer_->set_buffer(buffer); |
| 285 else |
| 286 backend_video_buffer_.reset(new CastDecoderBufferImpl(buffer)); |
| 287 |
| 288 MediaPipelineBackend::BufferStatus status = |
| 289 video_decoder_->PushBuffer(nullptr, // decrypt_context |
| 290 backend_video_buffer_.get()); |
| 291 EXPECT_NE(status, MediaPipelineBackend::kBufferFailed); |
| 292 video_buffers_.pop_front(); |
| 293 |
| 294 // Feeding is done, just wait for the end of stream callback. |
| 295 if (buffer->end_of_stream() || video_buffers_.empty()) { |
| 296 if (video_buffers_.empty() && !buffer->end_of_stream()) { |
| 297 LOG(WARNING) << "Stream emptied without feeding EOS frame"; |
| 298 } |
| 299 |
| 300 video_feeding_completed_ = true; |
| 301 return; |
| 302 } |
| 303 |
| 304 if (status == MediaPipelineBackend::kBufferPending) |
| 305 return; |
| 306 |
| 307 OnPushBufferComplete(video_decoder_, MediaPipelineBackend::kBufferSuccess); |
| 237 } | 308 } |
| 238 | 309 |
| 239 void AudioVideoPipelineDeviceTest::Start() { | 310 void AudioVideoPipelineDeviceTest::Start() { |
| 240 pause_time_ = base::TimeDelta(); | 311 pause_time_ = base::TimeDelta(); |
| 241 pause_pattern_idx_ = 0; | 312 pause_pattern_idx_ = 0; |
| 242 | 313 |
| 243 for (size_t i = 0; i < component_device_feeders_.size(); i++) { | 314 if (audio_decoder_) { |
| 244 base::ThreadTaskRunnerHandle::Get()->PostTask( | 315 base::ThreadTaskRunnerHandle::Get()->PostTask( |
| 245 FROM_HERE, base::Bind(&MediaComponentDeviceFeederForTest::Feed, | 316 FROM_HERE, |
| 246 base::Unretained(component_device_feeders_[i]))); | 317 base::Bind(&AudioVideoPipelineDeviceTest::FeedAudioBuffer, |
| 318 base::Unretained(this))); |
| 319 } |
| 320 if (video_decoder_) { |
| 321 base::ThreadTaskRunnerHandle::Get()->PostTask( |
| 322 FROM_HERE, |
| 323 base::Bind(&AudioVideoPipelineDeviceTest::FeedVideoBuffer, |
| 324 base::Unretained(this))); |
| 247 } | 325 } |
| 248 | 326 |
| 249 media_clock_device_->SetState(MediaClockDevice::kStateRunning); | 327 backend_->Start(0); |
| 250 | 328 |
| 251 base::ThreadTaskRunnerHandle::Get()->PostTask( | 329 base::ThreadTaskRunnerHandle::Get()->PostTask( |
| 252 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop, | 330 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop, |
| 253 base::Unretained(this))); | 331 base::Unretained(this))); |
| 254 } | 332 } |
| 255 | 333 |
| 334 void AudioVideoPipelineDeviceTest::OnEndOfStream( |
| 335 MediaPipelineBackend::Decoder* decoder) { |
| 336 bool success = backend_->Stop(); |
| 337 ASSERT_TRUE(success); |
| 338 |
| 339 if (decoder == audio_decoder_) |
| 340 audio_decoder_ = nullptr; |
| 341 else if (decoder == video_decoder_) |
| 342 video_decoder_ = nullptr; |
| 343 |
| 344 if (!audio_decoder_ && !video_decoder_) |
| 345 base::MessageLoop::current()->QuitWhenIdle(); |
| 346 } |
| 347 |
| 348 void AudioVideoPipelineDeviceTest::OnDecoderError( |
| 349 MediaPipelineBackend::Decoder* decoder) { |
| 350 ASSERT_TRUE(false); |
| 351 } |
| 352 |
| 353 void AudioVideoPipelineDeviceTest::OnPushBufferComplete( |
| 354 MediaPipelineBackend::Decoder* decoder, |
| 355 MediaPipelineBackend::BufferStatus status) { |
| 356 EXPECT_NE(status, MediaPipelineBackend::kBufferFailed); |
| 357 |
| 358 if (decoder == audio_decoder_) { |
| 359 if (audio_feeding_completed_) |
| 360 return; |
| 361 |
| 362 base::ThreadTaskRunnerHandle::Get()->PostTask( |
| 363 FROM_HERE, |
| 364 base::Bind(&AudioVideoPipelineDeviceTest::FeedAudioBuffer, |
| 365 base::Unretained(this))); |
| 366 } else if (decoder == video_decoder_) { |
| 367 if (video_feeding_completed_) |
| 368 return; |
| 369 |
| 370 base::ThreadTaskRunnerHandle::Get()->PostTask( |
| 371 FROM_HERE, |
| 372 base::Bind(&AudioVideoPipelineDeviceTest::FeedVideoBuffer, |
| 373 base::Unretained(this))); |
| 374 } |
| 375 } |
| 376 |
| 256 void AudioVideoPipelineDeviceTest::MonitorLoop() { | 377 void AudioVideoPipelineDeviceTest::MonitorLoop() { |
| 257 base::TimeDelta media_time = base::TimeDelta::FromMicroseconds( | 378 base::TimeDelta media_time = |
| 258 media_clock_device_->GetTimeMicroseconds()); | 379 base::TimeDelta::FromMicroseconds(backend_->GetCurrentPts()); |
| 259 | 380 |
| 260 if (!pause_pattern_.empty() && | 381 if (!pause_pattern_.empty() && |
| 261 pause_pattern_[pause_pattern_idx_].delay >= base::TimeDelta() && | 382 pause_pattern_[pause_pattern_idx_].delay >= base::TimeDelta() && |
| 262 media_time >= pause_time_ + pause_pattern_[pause_pattern_idx_].delay) { | 383 media_time >= pause_time_ + pause_pattern_[pause_pattern_idx_].delay) { |
| 263 // Do Pause | 384 // Do Pause |
| 264 media_clock_device_->SetRate(0.0); | 385 backend_->Pause(); |
| 265 pause_time_ = base::TimeDelta::FromMicroseconds( | 386 pause_time_ = base::TimeDelta::FromMicroseconds(backend_->GetCurrentPts()); |
| 266 media_clock_device_->GetTimeMicroseconds()); | |
| 267 | 387 |
| 268 VLOG(2) << "Pausing at " << pause_time_.InMilliseconds() << "ms for " << | 388 VLOG(2) << "Pausing at " << pause_time_.InMilliseconds() << "ms for " << |
| 269 pause_pattern_[pause_pattern_idx_].length.InMilliseconds() << "ms"; | 389 pause_pattern_[pause_pattern_idx_].length.InMilliseconds() << "ms"; |
| 270 | 390 |
| 271 // Wait for pause finish | 391 // Wait for pause finish |
| 272 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( | 392 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( |
| 273 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::OnPauseCompleted, | 393 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::OnPauseCompleted, |
| 274 base::Unretained(this)), | 394 base::Unretained(this)), |
| 275 pause_pattern_[pause_pattern_idx_].length); | 395 pause_pattern_[pause_pattern_idx_].length); |
| 276 return; | 396 return; |
| 277 } | 397 } |
| 278 | 398 |
| 279 // Check state again in a little while | 399 // Check state again in a little while |
| 280 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( | 400 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( |
| 281 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop, | 401 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop, |
| 282 base::Unretained(this)), | 402 base::Unretained(this)), |
| 283 kMonitorLoopDelay); | 403 kMonitorLoopDelay); |
| 284 } | 404 } |
| 285 | 405 |
| 286 void AudioVideoPipelineDeviceTest::OnPauseCompleted() { | 406 void AudioVideoPipelineDeviceTest::OnPauseCompleted() { |
| 287 // Make sure the media time didn't move during that time. | 407 // Make sure the media time didn't move during that time. |
| 288 base::TimeDelta media_time = base::TimeDelta::FromMicroseconds( | 408 base::TimeDelta media_time = |
| 289 media_clock_device_->GetTimeMicroseconds()); | 409 base::TimeDelta::FromMicroseconds(backend_->GetCurrentPts()); |
| 290 | 410 |
| 291 // TODO(damienv): | 411 // TODO(damienv): |
| 292 // Should be: | 412 // Should be: |
| 293 // EXPECT_EQ(media_time, media_time_); | 413 // EXPECT_EQ(media_time, media_time_); |
| 294 // However, some backends, when rendering the first frame while in paused | 414 // However, some backends, when rendering the first frame while in paused |
| 295 // mode moves the time forward. | 415 // mode moves the time forward. |
| 296 // This behaviour is not intended. | 416 // This behaviour is not intended. |
| 297 EXPECT_GE(media_time, pause_time_); | 417 EXPECT_GE(media_time, pause_time_); |
| 298 EXPECT_LE(media_time, pause_time_ + base::TimeDelta::FromMilliseconds(50)); | 418 EXPECT_LE(media_time, pause_time_ + base::TimeDelta::FromMilliseconds(50)); |
| 299 | 419 |
| 300 pause_time_ = media_time; | 420 pause_time_ = media_time; |
| 301 pause_pattern_idx_ = (pause_pattern_idx_ + 1) % pause_pattern_.size(); | 421 pause_pattern_idx_ = (pause_pattern_idx_ + 1) % pause_pattern_.size(); |
| 302 | 422 |
| 303 VLOG(2) << "Pause complete, restarting media clock"; | 423 VLOG(2) << "Pause complete, restarting media clock"; |
| 304 | 424 |
| 305 // Resume playback and frame feeding. | 425 // Resume playback and frame feeding. |
| 306 media_clock_device_->SetRate(1.0); | 426 backend_->Resume(); |
| 307 | 427 |
| 308 MonitorLoop(); | 428 MonitorLoop(); |
| 309 } | 429 } |
| 310 | 430 |
| 311 void AudioVideoPipelineDeviceTest::OnEos( | |
| 312 MediaComponentDeviceFeederForTest* device_feeder) { | |
| 313 for (ComponentDeviceIterator it = component_device_feeders_.begin(); | |
| 314 it != component_device_feeders_.end(); | |
| 315 ++it) { | |
| 316 if (*it == device_feeder) { | |
| 317 component_device_feeders_.erase(it); | |
| 318 break; | |
| 319 } | |
| 320 } | |
| 321 | |
| 322 // Check if all streams finished | |
| 323 if (component_device_feeders_.empty()) | |
| 324 base::MessageLoop::current()->QuitWhenIdle(); | |
| 325 } | |
| 326 | |
| 327 void AudioVideoPipelineDeviceTest::Initialize() { | 431 void AudioVideoPipelineDeviceTest::Initialize() { |
| 328 // Create the media device. | 432 // Create the media device. |
| 329 task_runner_.reset(new TaskRunnerImpl()); | 433 task_runner_.reset(new TaskRunnerImpl()); |
| 330 MediaPipelineDeviceParams params(task_runner_.get()); | 434 MediaPipelineDeviceParams params(task_runner_.get()); |
| 331 backend_.reset(CastMediaShlib::CreateMediaPipelineBackend(params)); | 435 backend_.reset(CastMediaShlib::CreateMediaPipelineBackend(params)); |
| 332 media_clock_device_ = backend_->GetClock(); | |
| 333 | |
| 334 // Clock initialization and configuration. | |
| 335 bool success = | |
| 336 media_clock_device_->SetState(MediaClockDevice::kStateIdle); | |
| 337 ASSERT_TRUE(success); | |
| 338 success = media_clock_device_->ResetTimeline(0); | |
| 339 ASSERT_TRUE(success); | |
| 340 media_clock_device_->SetRate(1.0); | |
| 341 } | 436 } |
| 342 | 437 |
| 343 TEST_F(AudioVideoPipelineDeviceTest, Mp3Playback) { | 438 TEST_F(AudioVideoPipelineDeviceTest, Mp3Playback) { |
| 344 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); | 439 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); |
| 345 | 440 |
| 346 ConfigureForAudioOnly("sfx.mp3"); | 441 ConfigureForAudioOnly("sfx.mp3"); |
| 347 Start(); | 442 Start(); |
| 348 message_loop->Run(); | 443 message_loop->Run(); |
| 349 } | 444 } |
| 350 | 445 |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 387 TEST_F(AudioVideoPipelineDeviceTest, WebmPlayback) { | 482 TEST_F(AudioVideoPipelineDeviceTest, WebmPlayback) { |
| 388 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); | 483 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); |
| 389 | 484 |
| 390 ConfigureForFile("bear-640x360.webm"); | 485 ConfigureForFile("bear-640x360.webm"); |
| 391 Start(); | 486 Start(); |
| 392 message_loop->Run(); | 487 message_loop->Run(); |
| 393 } | 488 } |
| 394 | 489 |
| 395 } // namespace media | 490 } // namespace media |
| 396 } // namespace chromecast | 491 } // namespace chromecast |
| OLD | NEW |