Index: chromecast/media/service/cast_renderer.cc |
diff --git a/chromecast/media/service/cast_renderer.cc b/chromecast/media/service/cast_renderer.cc |
index 42964baf9931ca51a38fc5d8e885dda2567a16da..0ae827aa310915caadf14a6f9bbdf1dde1df145d 100644 |
--- a/chromecast/media/service/cast_renderer.cc |
+++ b/chromecast/media/service/cast_renderer.cc |
@@ -91,9 +91,23 @@ void CastRenderer::Initialize(::media::MediaResource* media_resource, |
pipeline_->SetClient(pipeline_client); |
pipeline_->Initialize(load_type, std::move(backend)); |
+ // TODO(servolk): Implement support for multiple streams. For now use the |
+ // first enabled audio and video streams to preserve the existing behavior. |
+ std::vector<::media::DemuxerStream*> streams = media_resource->GetStreams(); |
+ ::media::DemuxerStream* audio_stream = nullptr; |
+ ::media::DemuxerStream* video_stream = nullptr; |
+ for (const auto& stream : streams) { |
+ if (!audio_stream && stream->enabled() && |
+ stream->type() == ::media::DemuxerStream::AUDIO) { |
+ audio_stream = stream; |
+ } |
+ if (!video_stream && stream->enabled() && |
+ stream->type() == ::media::DemuxerStream::VIDEO) { |
+ video_stream = stream; |
+ } |
+ } |
+ |
// Initialize audio. |
- ::media::DemuxerStream* audio_stream = |
- media_resource->GetStream(::media::DemuxerStream::AUDIO); |
if (audio_stream) { |
AvPipelineClient audio_client; |
audio_client.wait_for_key_cb = base::Bind( |
@@ -118,8 +132,6 @@ void CastRenderer::Initialize(::media::MediaResource* media_resource, |
} |
// Initialize video. |
- ::media::DemuxerStream* video_stream = |
- media_resource->GetStream(::media::DemuxerStream::VIDEO); |
if (video_stream) { |
VideoPipelineClient video_client; |
video_client.av_pipeline_client.wait_for_key_cb = base::Bind( |