| OLD | NEW |
| 1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/renderer/media/webmediaplayer_impl.h" | 5 #include "media/blink/webmediaplayer_impl.h" |
| 6 | 6 |
| 7 #include <algorithm> | 7 #include <algorithm> |
| 8 #include <limits> | 8 #include <limits> |
| 9 #include <string> | 9 #include <string> |
| 10 #include <vector> | 10 #include <vector> |
| 11 | 11 |
| 12 #include "base/bind.h" | 12 #include "base/bind.h" |
| 13 #include "base/callback.h" | 13 #include "base/callback.h" |
| 14 #include "base/callback_helpers.h" | 14 #include "base/callback_helpers.h" |
| 15 #include "base/debug/alias.h" | 15 #include "base/debug/alias.h" |
| 16 #include "base/debug/crash_logging.h" | 16 #include "base/debug/crash_logging.h" |
| 17 #include "base/debug/trace_event.h" | 17 #include "base/debug/trace_event.h" |
| 18 #include "base/message_loop/message_loop_proxy.h" | 18 #include "base/message_loop/message_loop_proxy.h" |
| 19 #include "base/metrics/histogram.h" | 19 #include "base/metrics/histogram.h" |
| 20 #include "base/single_thread_task_runner.h" | 20 #include "base/single_thread_task_runner.h" |
| 21 #include "base/synchronization/waitable_event.h" | 21 #include "base/synchronization/waitable_event.h" |
| 22 #include "cc/blink/web_layer_impl.h" | 22 #include "cc/blink/web_layer_impl.h" |
| 23 #include "cc/layers/video_layer.h" | 23 #include "cc/layers/video_layer.h" |
| 24 #include "content/renderer/media/buffered_data_source.h" | |
| 25 #include "content/renderer/media/crypto/encrypted_media_player_support.h" | |
| 26 #include "content/renderer/media/texttrack_impl.h" | |
| 27 #include "content/renderer/media/webaudiosourceprovider_impl.h" | |
| 28 #include "content/renderer/media/webinbandtexttrack_impl.h" | |
| 29 #include "content/renderer/media/webmediaplayer_delegate.h" | |
| 30 #include "content/renderer/media/webmediaplayer_params.h" | |
| 31 #include "content/renderer/media/webmediaplayer_util.h" | |
| 32 #include "content/renderer/media/webmediasource_impl.h" | |
| 33 #include "gpu/GLES2/gl2extchromium.h" | 24 #include "gpu/GLES2/gl2extchromium.h" |
| 34 #include "gpu/command_buffer/common/mailbox_holder.h" | 25 #include "gpu/command_buffer/common/mailbox_holder.h" |
| 35 #include "media/audio/null_audio_sink.h" | 26 #include "media/audio/null_audio_sink.h" |
| 36 #include "media/base/audio_hardware_config.h" | 27 #include "media/base/audio_hardware_config.h" |
| 37 #include "media/base/bind_to_current_loop.h" | 28 #include "media/base/bind_to_current_loop.h" |
| 38 #include "media/base/limits.h" | 29 #include "media/base/limits.h" |
| 39 #include "media/base/media_log.h" | 30 #include "media/base/media_log.h" |
| 40 #include "media/base/pipeline.h" | 31 #include "media/base/pipeline.h" |
| 41 #include "media/base/text_renderer.h" | 32 #include "media/base/text_renderer.h" |
| 42 #include "media/base/video_frame.h" | 33 #include "media/base/video_frame.h" |
| 34 #include "media/blink/buffered_data_source.h" |
| 35 #include "media/blink/encrypted_media_player_support.h" |
| 36 #include "media/blink/texttrack_impl.h" |
| 37 #include "media/blink/webaudiosourceprovider_impl.h" |
| 38 #include "media/blink/webinbandtexttrack_impl.h" |
| 39 #include "media/blink/webmediaplayer_delegate.h" |
| 40 #include "media/blink/webmediaplayer_params.h" |
| 41 #include "media/blink/webmediaplayer_util.h" |
| 42 #include "media/blink/webmediasource_impl.h" |
| 43 #include "media/filters/audio_renderer_impl.h" | 43 #include "media/filters/audio_renderer_impl.h" |
| 44 #include "media/filters/chunk_demuxer.h" | 44 #include "media/filters/chunk_demuxer.h" |
| 45 #include "media/filters/ffmpeg_audio_decoder.h" | 45 #include "media/filters/ffmpeg_audio_decoder.h" |
| 46 #include "media/filters/ffmpeg_demuxer.h" | 46 #include "media/filters/ffmpeg_demuxer.h" |
| 47 #include "media/filters/ffmpeg_video_decoder.h" | 47 #include "media/filters/ffmpeg_video_decoder.h" |
| 48 #include "media/filters/gpu_video_accelerator_factories.h" | 48 #include "media/filters/gpu_video_accelerator_factories.h" |
| 49 #include "media/filters/gpu_video_decoder.h" | 49 #include "media/filters/gpu_video_decoder.h" |
| 50 #include "media/filters/opus_audio_decoder.h" | 50 #include "media/filters/opus_audio_decoder.h" |
| 51 #include "media/filters/renderer_impl.h" | 51 #include "media/filters/renderer_impl.h" |
| 52 #include "media/filters/video_renderer_impl.h" | 52 #include "media/filters/video_renderer_impl.h" |
| 53 #include "media/filters/vpx_video_decoder.h" | 53 #include "media/filters/vpx_video_decoder.h" |
| 54 #include "third_party/WebKit/public/platform/WebMediaSource.h" | 54 #include "third_party/WebKit/public/platform/WebMediaSource.h" |
| 55 #include "third_party/WebKit/public/platform/WebRect.h" | 55 #include "third_party/WebKit/public/platform/WebRect.h" |
| 56 #include "third_party/WebKit/public/platform/WebSize.h" | 56 #include "third_party/WebKit/public/platform/WebSize.h" |
| 57 #include "third_party/WebKit/public/platform/WebString.h" | 57 #include "third_party/WebKit/public/platform/WebString.h" |
| 58 #include "third_party/WebKit/public/platform/WebURL.h" | 58 #include "third_party/WebKit/public/platform/WebURL.h" |
| 59 #include "third_party/WebKit/public/web/WebLocalFrame.h" | 59 #include "third_party/WebKit/public/web/WebLocalFrame.h" |
| 60 #include "third_party/WebKit/public/web/WebSecurityOrigin.h" | 60 #include "third_party/WebKit/public/web/WebSecurityOrigin.h" |
| 61 #include "third_party/WebKit/public/web/WebView.h" | 61 #include "third_party/WebKit/public/web/WebView.h" |
| 62 | 62 |
| 63 using blink::WebCanvas; | 63 using blink::WebCanvas; |
| 64 using blink::WebMediaPlayer; | 64 using blink::WebMediaPlayer; |
| 65 using blink::WebRect; | 65 using blink::WebRect; |
| 66 using blink::WebSize; | 66 using blink::WebSize; |
| 67 using blink::WebString; | 67 using blink::WebString; |
| 68 using media::PipelineStatus; | |
| 69 | 68 |
| 70 namespace { | 69 namespace { |
| 71 | 70 |
| 72 // Limits the range of playback rate. | 71 // Limits the range of playback rate. |
| 73 // | 72 // |
| 74 // TODO(kylep): Revisit these. | 73 // TODO(kylep): Revisit these. |
| 75 // | 74 // |
| 76 // Vista has substantially lower performance than XP or Windows7. If you speed | 75 // Vista has substantially lower performance than XP or Windows7. If you speed |
| 77 // up a video too much, it can't keep up, and rendering stops updating except on | 76 // up a video too much, it can't keep up, and rendering stops updating except on |
| 78 // the time bar. For really high speeds, audio becomes a bottleneck and we just | 77 // the time bar. For really high speeds, audio becomes a bottleneck and we just |
| (...skipping 21 matching lines...) Expand all Loading... |
| 100 virtual void WaitSyncPoint(uint32 sync_point) OVERRIDE { | 99 virtual void WaitSyncPoint(uint32 sync_point) OVERRIDE { |
| 101 web_graphics_context_->waitSyncPoint(sync_point); | 100 web_graphics_context_->waitSyncPoint(sync_point); |
| 102 } | 101 } |
| 103 | 102 |
| 104 private: | 103 private: |
| 105 blink::WebGraphicsContext3D* web_graphics_context_; | 104 blink::WebGraphicsContext3D* web_graphics_context_; |
| 106 }; | 105 }; |
| 107 | 106 |
| 108 } // namespace | 107 } // namespace |
| 109 | 108 |
| 110 namespace content { | 109 namespace media { |
| 111 | 110 |
| 112 class BufferedDataSourceHostImpl; | 111 class BufferedDataSourceHostImpl; |
| 113 | 112 |
| 114 #define COMPILE_ASSERT_MATCHING_ENUM(name) \ | 113 #define COMPILE_ASSERT_MATCHING_ENUM(name) \ |
| 115 COMPILE_ASSERT(static_cast<int>(WebMediaPlayer::CORSMode ## name) == \ | 114 COMPILE_ASSERT(static_cast<int>(WebMediaPlayer::CORSMode ## name) == \ |
| 116 static_cast<int>(BufferedResourceLoader::k ## name), \ | 115 static_cast<int>(BufferedResourceLoader::k ## name), \ |
| 117 mismatching_enums) | 116 mismatching_enums) |
| 118 COMPILE_ASSERT_MATCHING_ENUM(Unspecified); | 117 COMPILE_ASSERT_MATCHING_ENUM(Unspecified); |
| 119 COMPILE_ASSERT_MATCHING_ENUM(Anonymous); | 118 COMPILE_ASSERT_MATCHING_ENUM(Anonymous); |
| 120 COMPILE_ASSERT_MATCHING_ENUM(UseCredentials); | 119 COMPILE_ASSERT_MATCHING_ENUM(UseCredentials); |
| 121 #undef COMPILE_ASSERT_MATCHING_ENUM | 120 #undef COMPILE_ASSERT_MATCHING_ENUM |
| 122 | 121 |
| 123 #define BIND_TO_RENDER_LOOP(function) \ | 122 #define BIND_TO_RENDER_LOOP(function) \ |
| 124 (DCHECK(main_task_runner_->BelongsToCurrentThread()), \ | 123 (DCHECK(main_task_runner_->BelongsToCurrentThread()), \ |
| 125 media::BindToCurrentLoop(base::Bind(function, AsWeakPtr()))) | 124 BindToCurrentLoop(base::Bind(function, AsWeakPtr()))) |
| 126 | 125 |
| 127 #define BIND_TO_RENDER_LOOP1(function, arg1) \ | 126 #define BIND_TO_RENDER_LOOP1(function, arg1) \ |
| 128 (DCHECK(main_task_runner_->BelongsToCurrentThread()), \ | 127 (DCHECK(main_task_runner_->BelongsToCurrentThread()), \ |
| 129 media::BindToCurrentLoop(base::Bind(function, AsWeakPtr(), arg1))) | 128 BindToCurrentLoop(base::Bind(function, AsWeakPtr(), arg1))) |
| 130 | 129 |
| 131 static void LogMediaSourceError(const scoped_refptr<media::MediaLog>& media_log, | 130 static void LogMediaSourceError(const scoped_refptr<MediaLog>& media_log, |
| 132 const std::string& error) { | 131 const std::string& error) { |
| 133 media_log->AddEvent(media_log->CreateMediaSourceErrorEvent(error)); | 132 media_log->AddEvent(media_log->CreateMediaSourceErrorEvent(error)); |
| 134 } | 133 } |
| 135 | 134 |
| 136 WebMediaPlayerImpl::WebMediaPlayerImpl( | 135 WebMediaPlayerImpl::WebMediaPlayerImpl( |
| 137 blink::WebLocalFrame* frame, | 136 blink::WebLocalFrame* frame, |
| 138 blink::WebMediaPlayerClient* client, | 137 blink::WebMediaPlayerClient* client, |
| 139 base::WeakPtr<WebMediaPlayerDelegate> delegate, | 138 base::WeakPtr<WebMediaPlayerDelegate> delegate, |
| 140 const WebMediaPlayerParams& params) | 139 const WebMediaPlayerParams& params) |
| 141 : frame_(frame), | 140 : frame_(frame), |
| (...skipping 27 matching lines...) Expand all Loading... |
| 169 encrypted_media_support_( | 168 encrypted_media_support_( |
| 170 params.CreateEncryptedMediaPlayerSupport(client)), | 169 params.CreateEncryptedMediaPlayerSupport(client)), |
| 171 audio_hardware_config_(params.audio_hardware_config()) { | 170 audio_hardware_config_(params.audio_hardware_config()) { |
| 172 DCHECK(encrypted_media_support_); | 171 DCHECK(encrypted_media_support_); |
| 173 | 172 |
| 174 // Threaded compositing isn't enabled universally yet. | 173 // Threaded compositing isn't enabled universally yet. |
| 175 if (!compositor_task_runner_.get()) | 174 if (!compositor_task_runner_.get()) |
| 176 compositor_task_runner_ = base::MessageLoopProxy::current(); | 175 compositor_task_runner_ = base::MessageLoopProxy::current(); |
| 177 | 176 |
| 178 media_log_->AddEvent( | 177 media_log_->AddEvent( |
| 179 media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_CREATED)); | 178 media_log_->CreateEvent(MediaLogEvent::WEBMEDIAPLAYER_CREATED)); |
| 180 | 179 |
| 181 // |gpu_factories_| requires that its entry points be called on its | 180 // |gpu_factories_| requires that its entry points be called on its |
| 182 // |GetTaskRunner()|. Since |pipeline_| will own decoders created from the | 181 // |GetTaskRunner()|. Since |pipeline_| will own decoders created from the |
| 183 // factories, require that their message loops are identical. | 182 // factories, require that their message loops are identical. |
| 184 DCHECK(!gpu_factories_.get() || | 183 DCHECK(!gpu_factories_.get() || |
| 185 (gpu_factories_->GetTaskRunner() == media_task_runner_.get())); | 184 (gpu_factories_->GetTaskRunner() == media_task_runner_.get())); |
| 186 | 185 |
| 187 // Use the null sink if no sink was provided. | 186 // Use the null sink if no sink was provided. |
| 188 audio_source_provider_ = new WebAudioSourceProviderImpl( | 187 audio_source_provider_ = new WebAudioSourceProviderImpl( |
| 189 params.audio_renderer_sink().get() | 188 params.audio_renderer_sink().get() |
| 190 ? params.audio_renderer_sink() | 189 ? params.audio_renderer_sink() |
| 191 : new media::NullAudioSink(media_task_runner_)); | 190 : new NullAudioSink(media_task_runner_)); |
| 192 } | 191 } |
| 193 | 192 |
| 194 WebMediaPlayerImpl::~WebMediaPlayerImpl() { | 193 WebMediaPlayerImpl::~WebMediaPlayerImpl() { |
| 195 client_->setWebLayer(NULL); | 194 client_->setWebLayer(NULL); |
| 196 | 195 |
| 197 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 196 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 198 media_log_->AddEvent( | 197 media_log_->AddEvent( |
| 199 media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_DESTROYED)); | 198 media_log_->CreateEvent(MediaLogEvent::WEBMEDIAPLAYER_DESTROYED)); |
| 200 | 199 |
| 201 if (delegate_.get()) | 200 if (delegate_.get()) |
| 202 delegate_->PlayerGone(this); | 201 delegate_->PlayerGone(this); |
| 203 | 202 |
| 204 // Abort any pending IO so stopping the pipeline doesn't get blocked. | 203 // Abort any pending IO so stopping the pipeline doesn't get blocked. |
| 205 if (data_source_) | 204 if (data_source_) |
| 206 data_source_->Abort(); | 205 data_source_->Abort(); |
| 207 if (chunk_demuxer_) { | 206 if (chunk_demuxer_) { |
| 208 chunk_demuxer_->Shutdown(); | 207 chunk_demuxer_->Shutdown(); |
| 209 chunk_demuxer_ = NULL; | 208 chunk_demuxer_ = NULL; |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 273 | 272 |
| 274 void WebMediaPlayerImpl::play() { | 273 void WebMediaPlayerImpl::play() { |
| 275 DVLOG(1) << __FUNCTION__; | 274 DVLOG(1) << __FUNCTION__; |
| 276 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 275 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 277 | 276 |
| 278 paused_ = false; | 277 paused_ = false; |
| 279 pipeline_.SetPlaybackRate(playback_rate_); | 278 pipeline_.SetPlaybackRate(playback_rate_); |
| 280 if (data_source_) | 279 if (data_source_) |
| 281 data_source_->MediaIsPlaying(); | 280 data_source_->MediaIsPlaying(); |
| 282 | 281 |
| 283 media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PLAY)); | 282 media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::PLAY)); |
| 284 | 283 |
| 285 if (delegate_.get()) | 284 if (delegate_.get()) |
| 286 delegate_->DidPlay(this); | 285 delegate_->DidPlay(this); |
| 287 } | 286 } |
| 288 | 287 |
| 289 void WebMediaPlayerImpl::pause() { | 288 void WebMediaPlayerImpl::pause() { |
| 290 DVLOG(1) << __FUNCTION__; | 289 DVLOG(1) << __FUNCTION__; |
| 291 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 290 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 292 | 291 |
| 293 paused_ = true; | 292 paused_ = true; |
| 294 pipeline_.SetPlaybackRate(0.0f); | 293 pipeline_.SetPlaybackRate(0.0f); |
| 295 if (data_source_) | 294 if (data_source_) |
| 296 data_source_->MediaIsPaused(); | 295 data_source_->MediaIsPaused(); |
| 297 paused_time_ = pipeline_.GetMediaTime(); | 296 paused_time_ = pipeline_.GetMediaTime(); |
| 298 | 297 |
| 299 media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PAUSE)); | 298 media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::PAUSE)); |
| 300 | 299 |
| 301 if (delegate_.get()) | 300 if (delegate_.get()) |
| 302 delegate_->DidPause(this); | 301 delegate_->DidPause(this); |
| 303 } | 302 } |
| 304 | 303 |
| 305 bool WebMediaPlayerImpl::supportsSave() const { | 304 bool WebMediaPlayerImpl::supportsSave() const { |
| 306 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 305 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 307 return supports_save_; | 306 return supports_save_; |
| 308 } | 307 } |
| 309 | 308 |
| (...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 462 } | 461 } |
| 463 | 462 |
| 464 WebMediaPlayer::ReadyState WebMediaPlayerImpl::readyState() const { | 463 WebMediaPlayer::ReadyState WebMediaPlayerImpl::readyState() const { |
| 465 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 464 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 466 return ready_state_; | 465 return ready_state_; |
| 467 } | 466 } |
| 468 | 467 |
| 469 blink::WebTimeRanges WebMediaPlayerImpl::buffered() const { | 468 blink::WebTimeRanges WebMediaPlayerImpl::buffered() const { |
| 470 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 469 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 471 | 470 |
| 472 media::Ranges<base::TimeDelta> buffered_time_ranges = | 471 Ranges<base::TimeDelta> buffered_time_ranges = |
| 473 pipeline_.GetBufferedTimeRanges(); | 472 pipeline_.GetBufferedTimeRanges(); |
| 474 | 473 |
| 475 const base::TimeDelta duration = pipeline_.GetMediaDuration(); | 474 const base::TimeDelta duration = pipeline_.GetMediaDuration(); |
| 476 if (duration != media::kInfiniteDuration()) { | 475 if (duration != kInfiniteDuration()) { |
| 477 buffered_data_source_host_.AddBufferedTimeRanges( | 476 buffered_data_source_host_.AddBufferedTimeRanges( |
| 478 &buffered_time_ranges, duration); | 477 &buffered_time_ranges, duration); |
| 479 } | 478 } |
| 480 return ConvertToWebTimeRanges(buffered_time_ranges); | 479 return ConvertToWebTimeRanges(buffered_time_ranges); |
| 481 } | 480 } |
| 482 | 481 |
| 483 double WebMediaPlayerImpl::maxTimeSeekable() const { | 482 double WebMediaPlayerImpl::maxTimeSeekable() const { |
| 484 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 483 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 485 | 484 |
| 486 // If we haven't even gotten to ReadyStateHaveMetadata yet then just | 485 // If we haven't even gotten to ReadyStateHaveMetadata yet then just |
| (...skipping 25 matching lines...) Expand all Loading... |
| 512 unsigned char alpha, | 511 unsigned char alpha, |
| 513 SkXfermode::Mode mode) { | 512 SkXfermode::Mode mode) { |
| 514 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 513 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 515 TRACE_EVENT0("media", "WebMediaPlayerImpl:paint"); | 514 TRACE_EVENT0("media", "WebMediaPlayerImpl:paint"); |
| 516 | 515 |
| 517 // TODO(scherkus): Clarify paint() API contract to better understand when and | 516 // TODO(scherkus): Clarify paint() API contract to better understand when and |
| 518 // why it's being called. For example, today paint() is called when: | 517 // why it's being called. For example, today paint() is called when: |
| 519 // - We haven't reached HAVE_CURRENT_DATA and need to paint black | 518 // - We haven't reached HAVE_CURRENT_DATA and need to paint black |
| 520 // - We're painting to a canvas | 519 // - We're painting to a canvas |
| 521 // See http://crbug.com/341225 http://crbug.com/342621 for details. | 520 // See http://crbug.com/341225 http://crbug.com/342621 for details. |
| 522 scoped_refptr<media::VideoFrame> video_frame = | 521 scoped_refptr<VideoFrame> video_frame = |
| 523 GetCurrentFrameFromCompositor(); | 522 GetCurrentFrameFromCompositor(); |
| 524 | 523 |
| 525 gfx::Rect gfx_rect(rect); | 524 gfx::Rect gfx_rect(rect); |
| 526 | 525 |
| 527 skcanvas_video_renderer_.Paint(video_frame.get(), | 526 skcanvas_video_renderer_.Paint(video_frame.get(), |
| 528 canvas, | 527 canvas, |
| 529 gfx_rect, | 528 gfx_rect, |
| 530 alpha, | 529 alpha, |
| 531 mode, | 530 mode, |
| 532 pipeline_metadata_.video_rotation); | 531 pipeline_metadata_.video_rotation); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 544 return false; | 543 return false; |
| 545 } | 544 } |
| 546 | 545 |
| 547 double WebMediaPlayerImpl::mediaTimeForTimeValue(double timeValue) const { | 546 double WebMediaPlayerImpl::mediaTimeForTimeValue(double timeValue) const { |
| 548 return ConvertSecondsToTimestamp(timeValue).InSecondsF(); | 547 return ConvertSecondsToTimestamp(timeValue).InSecondsF(); |
| 549 } | 548 } |
| 550 | 549 |
| 551 unsigned WebMediaPlayerImpl::decodedFrameCount() const { | 550 unsigned WebMediaPlayerImpl::decodedFrameCount() const { |
| 552 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 551 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 553 | 552 |
| 554 media::PipelineStatistics stats = pipeline_.GetStatistics(); | 553 PipelineStatistics stats = pipeline_.GetStatistics(); |
| 555 return stats.video_frames_decoded; | 554 return stats.video_frames_decoded; |
| 556 } | 555 } |
| 557 | 556 |
| 558 unsigned WebMediaPlayerImpl::droppedFrameCount() const { | 557 unsigned WebMediaPlayerImpl::droppedFrameCount() const { |
| 559 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 558 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 560 | 559 |
| 561 media::PipelineStatistics stats = pipeline_.GetStatistics(); | 560 PipelineStatistics stats = pipeline_.GetStatistics(); |
| 562 return stats.video_frames_dropped; | 561 return stats.video_frames_dropped; |
| 563 } | 562 } |
| 564 | 563 |
| 565 unsigned WebMediaPlayerImpl::audioDecodedByteCount() const { | 564 unsigned WebMediaPlayerImpl::audioDecodedByteCount() const { |
| 566 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 565 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 567 | 566 |
| 568 media::PipelineStatistics stats = pipeline_.GetStatistics(); | 567 PipelineStatistics stats = pipeline_.GetStatistics(); |
| 569 return stats.audio_bytes_decoded; | 568 return stats.audio_bytes_decoded; |
| 570 } | 569 } |
| 571 | 570 |
| 572 unsigned WebMediaPlayerImpl::videoDecodedByteCount() const { | 571 unsigned WebMediaPlayerImpl::videoDecodedByteCount() const { |
| 573 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 572 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 574 | 573 |
| 575 media::PipelineStatistics stats = pipeline_.GetStatistics(); | 574 PipelineStatistics stats = pipeline_.GetStatistics(); |
| 576 return stats.video_bytes_decoded; | 575 return stats.video_bytes_decoded; |
| 577 } | 576 } |
| 578 | 577 |
| 579 bool WebMediaPlayerImpl::copyVideoTextureToPlatformTexture( | 578 bool WebMediaPlayerImpl::copyVideoTextureToPlatformTexture( |
| 580 blink::WebGraphicsContext3D* web_graphics_context, | 579 blink::WebGraphicsContext3D* web_graphics_context, |
| 581 unsigned int texture, | 580 unsigned int texture, |
| 582 unsigned int level, | 581 unsigned int level, |
| 583 unsigned int internal_format, | 582 unsigned int internal_format, |
| 584 unsigned int type, | 583 unsigned int type, |
| 585 bool premultiply_alpha, | 584 bool premultiply_alpha, |
| 586 bool flip_y) { | 585 bool flip_y) { |
| 587 TRACE_EVENT0("media", "WebMediaPlayerImpl:copyVideoTextureToPlatformTexture"); | 586 TRACE_EVENT0("media", "WebMediaPlayerImpl:copyVideoTextureToPlatformTexture"); |
| 588 | 587 |
| 589 scoped_refptr<media::VideoFrame> video_frame = | 588 scoped_refptr<VideoFrame> video_frame = |
| 590 GetCurrentFrameFromCompositor(); | 589 GetCurrentFrameFromCompositor(); |
| 591 | 590 |
| 592 if (!video_frame.get()) | 591 if (!video_frame.get()) |
| 593 return false; | 592 return false; |
| 594 if (video_frame->format() != media::VideoFrame::NATIVE_TEXTURE) | 593 if (video_frame->format() != VideoFrame::NATIVE_TEXTURE) |
| 595 return false; | 594 return false; |
| 596 | 595 |
| 597 const gpu::MailboxHolder* mailbox_holder = video_frame->mailbox_holder(); | 596 const gpu::MailboxHolder* mailbox_holder = video_frame->mailbox_holder(); |
| 598 if (mailbox_holder->texture_target != GL_TEXTURE_2D) | 597 if (mailbox_holder->texture_target != GL_TEXTURE_2D) |
| 599 return false; | 598 return false; |
| 600 | 599 |
| 601 web_graphics_context->waitSyncPoint(mailbox_holder->sync_point); | 600 web_graphics_context->waitSyncPoint(mailbox_holder->sync_point); |
| 602 uint32 source_texture = web_graphics_context->createAndConsumeTextureCHROMIUM( | 601 uint32 source_texture = web_graphics_context->createAndConsumeTextureCHROMIUM( |
| 603 GL_TEXTURE_2D, mailbox_holder->mailbox.name); | 602 GL_TEXTURE_2D, mailbox_holder->mailbox.name); |
| 604 | 603 |
| (...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 686 PipelineStatus status) { | 685 PipelineStatus status) { |
| 687 DVLOG(1) << __FUNCTION__ << "(" << time_changed << ", " << status << ")"; | 686 DVLOG(1) << __FUNCTION__ << "(" << time_changed << ", " << status << ")"; |
| 688 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 687 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 689 seeking_ = false; | 688 seeking_ = false; |
| 690 if (pending_seek_) { | 689 if (pending_seek_) { |
| 691 pending_seek_ = false; | 690 pending_seek_ = false; |
| 692 seek(pending_seek_seconds_); | 691 seek(pending_seek_seconds_); |
| 693 return; | 692 return; |
| 694 } | 693 } |
| 695 | 694 |
| 696 if (status != media::PIPELINE_OK) { | 695 if (status != PIPELINE_OK) { |
| 697 OnPipelineError(status); | 696 OnPipelineError(status); |
| 698 return; | 697 return; |
| 699 } | 698 } |
| 700 | 699 |
| 701 // Update our paused time. | 700 // Update our paused time. |
| 702 if (paused_) | 701 if (paused_) |
| 703 paused_time_ = pipeline_.GetMediaTime(); | 702 paused_time_ = pipeline_.GetMediaTime(); |
| 704 | 703 |
| 705 should_notify_time_changed_ = time_changed; | 704 should_notify_time_changed_ = time_changed; |
| 706 } | 705 } |
| 707 | 706 |
| 708 void WebMediaPlayerImpl::OnPipelineEnded() { | 707 void WebMediaPlayerImpl::OnPipelineEnded() { |
| 709 DVLOG(1) << __FUNCTION__; | 708 DVLOG(1) << __FUNCTION__; |
| 710 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 709 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 711 | 710 |
| 712 // Ignore state changes until we've completed all outstanding seeks. | 711 // Ignore state changes until we've completed all outstanding seeks. |
| 713 if (seeking_ || pending_seek_) | 712 if (seeking_ || pending_seek_) |
| 714 return; | 713 return; |
| 715 | 714 |
| 716 ended_ = true; | 715 ended_ = true; |
| 717 client_->timeChanged(); | 716 client_->timeChanged(); |
| 718 } | 717 } |
| 719 | 718 |
| 720 void WebMediaPlayerImpl::OnPipelineError(PipelineStatus error) { | 719 void WebMediaPlayerImpl::OnPipelineError(PipelineStatus error) { |
| 721 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 720 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 722 DCHECK_NE(error, media::PIPELINE_OK); | 721 DCHECK_NE(error, PIPELINE_OK); |
| 723 | 722 |
| 724 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) { | 723 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) { |
| 725 // Any error that occurs before reaching ReadyStateHaveMetadata should | 724 // Any error that occurs before reaching ReadyStateHaveMetadata should |
| 726 // be considered a format error. | 725 // be considered a format error. |
| 727 SetNetworkState(WebMediaPlayer::NetworkStateFormatError); | 726 SetNetworkState(WebMediaPlayer::NetworkStateFormatError); |
| 728 return; | 727 return; |
| 729 } | 728 } |
| 730 | 729 |
| 731 SetNetworkState(PipelineErrorToNetworkState(error)); | 730 SetNetworkState(PipelineErrorToNetworkState(error)); |
| 732 | 731 |
| 733 if (error == media::PIPELINE_ERROR_DECRYPT) | 732 if (error == PIPELINE_ERROR_DECRYPT) |
| 734 encrypted_media_support_->OnPipelineDecryptError(); | 733 encrypted_media_support_->OnPipelineDecryptError(); |
| 735 } | 734 } |
| 736 | 735 |
| 737 void WebMediaPlayerImpl::OnPipelineMetadata( | 736 void WebMediaPlayerImpl::OnPipelineMetadata( |
| 738 media::PipelineMetadata metadata) { | 737 PipelineMetadata metadata) { |
| 739 DVLOG(1) << __FUNCTION__; | 738 DVLOG(1) << __FUNCTION__; |
| 740 | 739 |
| 741 pipeline_metadata_ = metadata; | 740 pipeline_metadata_ = metadata; |
| 742 | 741 |
| 743 UMA_HISTOGRAM_ENUMERATION("Media.VideoRotation", | 742 UMA_HISTOGRAM_ENUMERATION("Media.VideoRotation", |
| 744 metadata.video_rotation, | 743 metadata.video_rotation, |
| 745 media::VIDEO_ROTATION_MAX + 1); | 744 VIDEO_ROTATION_MAX + 1); |
| 746 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata); | 745 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata); |
| 747 | 746 |
| 748 if (hasVideo()) { | 747 if (hasVideo()) { |
| 749 DCHECK(!video_weblayer_); | 748 DCHECK(!video_weblayer_); |
| 750 scoped_refptr<cc::VideoLayer> layer = | 749 scoped_refptr<cc::VideoLayer> layer = |
| 751 cc::VideoLayer::Create(compositor_, pipeline_metadata_.video_rotation); | 750 cc::VideoLayer::Create(compositor_, pipeline_metadata_.video_rotation); |
| 752 | 751 |
| 753 if (pipeline_metadata_.video_rotation == media::VIDEO_ROTATION_90 || | 752 if (pipeline_metadata_.video_rotation == VIDEO_ROTATION_90 || |
| 754 pipeline_metadata_.video_rotation == media::VIDEO_ROTATION_270) { | 753 pipeline_metadata_.video_rotation == VIDEO_ROTATION_270) { |
| 755 gfx::Size size = pipeline_metadata_.natural_size; | 754 gfx::Size size = pipeline_metadata_.natural_size; |
| 756 pipeline_metadata_.natural_size = gfx::Size(size.height(), size.width()); | 755 pipeline_metadata_.natural_size = gfx::Size(size.height(), size.width()); |
| 757 } | 756 } |
| 758 | 757 |
| 759 video_weblayer_.reset(new cc_blink::WebLayerImpl(layer)); | 758 video_weblayer_.reset(new cc_blink::WebLayerImpl(layer)); |
| 760 video_weblayer_->setOpaque(opaque_); | 759 video_weblayer_->setOpaque(opaque_); |
| 761 client_->setWebLayer(video_weblayer_.get()); | 760 client_->setWebLayer(video_weblayer_.get()); |
| 762 } | 761 } |
| 763 } | 762 } |
| 764 | 763 |
| 765 void WebMediaPlayerImpl::OnPipelineBufferingStateChanged( | 764 void WebMediaPlayerImpl::OnPipelineBufferingStateChanged( |
| 766 media::BufferingState buffering_state) { | 765 BufferingState buffering_state) { |
| 767 DVLOG(1) << __FUNCTION__ << "(" << buffering_state << ")"; | 766 DVLOG(1) << __FUNCTION__ << "(" << buffering_state << ")"; |
| 768 | 767 |
| 769 // Ignore buffering state changes until we've completed all outstanding seeks. | 768 // Ignore buffering state changes until we've completed all outstanding seeks. |
| 770 if (seeking_ || pending_seek_) | 769 if (seeking_ || pending_seek_) |
| 771 return; | 770 return; |
| 772 | 771 |
| 773 // TODO(scherkus): Handle other buffering states when Pipeline starts using | 772 // TODO(scherkus): Handle other buffering states when Pipeline starts using |
| 774 // them and translate them ready state changes http://crbug.com/144683 | 773 // them and translate them ready state changes http://crbug.com/144683 |
| 775 DCHECK_EQ(buffering_state, media::BUFFERING_HAVE_ENOUGH); | 774 DCHECK_EQ(buffering_state, BUFFERING_HAVE_ENOUGH); |
| 776 SetReadyState(WebMediaPlayer::ReadyStateHaveEnoughData); | 775 SetReadyState(WebMediaPlayer::ReadyStateHaveEnoughData); |
| 777 | 776 |
| 778 // Blink expects a timeChanged() in response to a seek(). | 777 // Blink expects a timeChanged() in response to a seek(). |
| 779 if (should_notify_time_changed_) | 778 if (should_notify_time_changed_) |
| 780 client_->timeChanged(); | 779 client_->timeChanged(); |
| 781 } | 780 } |
| 782 | 781 |
| 783 void WebMediaPlayerImpl::OnDemuxerOpened() { | 782 void WebMediaPlayerImpl::OnDemuxerOpened() { |
| 784 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 783 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 785 client_->mediaSourceOpened(new WebMediaSourceImpl( | 784 client_->mediaSourceOpened(new WebMediaSourceImpl( |
| 786 chunk_demuxer_, base::Bind(&LogMediaSourceError, media_log_))); | 785 chunk_demuxer_, base::Bind(&LogMediaSourceError, media_log_))); |
| 787 } | 786 } |
| 788 | 787 |
| 789 void WebMediaPlayerImpl::OnAddTextTrack( | 788 void WebMediaPlayerImpl::OnAddTextTrack( |
| 790 const media::TextTrackConfig& config, | 789 const TextTrackConfig& config, |
| 791 const media::AddTextTrackDoneCB& done_cb) { | 790 const AddTextTrackDoneCB& done_cb) { |
| 792 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 791 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 793 | 792 |
| 794 const WebInbandTextTrackImpl::Kind web_kind = | 793 const WebInbandTextTrackImpl::Kind web_kind = |
| 795 static_cast<WebInbandTextTrackImpl::Kind>(config.kind()); | 794 static_cast<WebInbandTextTrackImpl::Kind>(config.kind()); |
| 796 const blink::WebString web_label = | 795 const blink::WebString web_label = |
| 797 blink::WebString::fromUTF8(config.label()); | 796 blink::WebString::fromUTF8(config.label()); |
| 798 const blink::WebString web_language = | 797 const blink::WebString web_language = |
| 799 blink::WebString::fromUTF8(config.language()); | 798 blink::WebString::fromUTF8(config.language()); |
| 800 const blink::WebString web_id = | 799 const blink::WebString web_id = |
| 801 blink::WebString::fromUTF8(config.id()); | 800 blink::WebString::fromUTF8(config.id()); |
| 802 | 801 |
| 803 scoped_ptr<WebInbandTextTrackImpl> web_inband_text_track( | 802 scoped_ptr<WebInbandTextTrackImpl> web_inband_text_track( |
| 804 new WebInbandTextTrackImpl(web_kind, web_label, web_language, web_id, | 803 new WebInbandTextTrackImpl(web_kind, web_label, web_language, web_id, |
| 805 text_track_index_++)); | 804 text_track_index_++)); |
| 806 | 805 |
| 807 scoped_ptr<media::TextTrack> text_track(new TextTrackImpl( | 806 scoped_ptr<TextTrack> text_track(new TextTrackImpl( |
| 808 main_task_runner_, client_, web_inband_text_track.Pass())); | 807 main_task_runner_, client_, web_inband_text_track.Pass())); |
| 809 | 808 |
| 810 done_cb.Run(text_track.Pass()); | 809 done_cb.Run(text_track.Pass()); |
| 811 } | 810 } |
| 812 | 811 |
| 813 void WebMediaPlayerImpl::DataSourceInitialized(bool success) { | 812 void WebMediaPlayerImpl::DataSourceInitialized(bool success) { |
| 814 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 813 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 815 | 814 |
| 816 if (!success) { | 815 if (!success) { |
| 817 SetNetworkState(WebMediaPlayer::NetworkStateFormatError); | 816 SetNetworkState(WebMediaPlayer::NetworkStateFormatError); |
| 818 return; | 817 return; |
| 819 } | 818 } |
| 820 | 819 |
| 821 StartPipeline(); | 820 StartPipeline(); |
| 822 } | 821 } |
| 823 | 822 |
| 824 void WebMediaPlayerImpl::NotifyDownloading(bool is_downloading) { | 823 void WebMediaPlayerImpl::NotifyDownloading(bool is_downloading) { |
| 825 if (!is_downloading && network_state_ == WebMediaPlayer::NetworkStateLoading) | 824 if (!is_downloading && network_state_ == WebMediaPlayer::NetworkStateLoading) |
| 826 SetNetworkState(WebMediaPlayer::NetworkStateIdle); | 825 SetNetworkState(WebMediaPlayer::NetworkStateIdle); |
| 827 else if (is_downloading && network_state_ == WebMediaPlayer::NetworkStateIdle) | 826 else if (is_downloading && network_state_ == WebMediaPlayer::NetworkStateIdle) |
| 828 SetNetworkState(WebMediaPlayer::NetworkStateLoading); | 827 SetNetworkState(WebMediaPlayer::NetworkStateLoading); |
| 829 media_log_->AddEvent( | 828 media_log_->AddEvent( |
| 830 media_log_->CreateBooleanEvent( | 829 media_log_->CreateBooleanEvent( |
| 831 media::MediaLogEvent::NETWORK_ACTIVITY_SET, | 830 MediaLogEvent::NETWORK_ACTIVITY_SET, |
| 832 "is_downloading_data", is_downloading)); | 831 "is_downloading_data", is_downloading)); |
| 833 } | 832 } |
| 834 | 833 |
| 835 // TODO(xhwang): Move this to a factory class so that we can create different | 834 // TODO(xhwang): Move this to a factory class so that we can create different |
| 836 // renderers. | 835 // renderers. |
| 837 scoped_ptr<media::Renderer> WebMediaPlayerImpl::CreateRenderer() { | 836 scoped_ptr<Renderer> WebMediaPlayerImpl::CreateRenderer() { |
| 838 media::SetDecryptorReadyCB set_decryptor_ready_cb = | 837 SetDecryptorReadyCB set_decryptor_ready_cb = |
| 839 encrypted_media_support_->CreateSetDecryptorReadyCB(); | 838 encrypted_media_support_->CreateSetDecryptorReadyCB(); |
| 840 | 839 |
| 841 // Create our audio decoders and renderer. | 840 // Create our audio decoders and renderer. |
| 842 ScopedVector<media::AudioDecoder> audio_decoders; | 841 ScopedVector<AudioDecoder> audio_decoders; |
| 843 | 842 |
| 844 media::LogCB log_cb = base::Bind(&LogMediaSourceError, media_log_); | 843 LogCB log_cb = base::Bind(&LogMediaSourceError, media_log_); |
| 845 audio_decoders.push_back(new media::FFmpegAudioDecoder(media_task_runner_, | 844 audio_decoders.push_back(new FFmpegAudioDecoder(media_task_runner_, log_cb)); |
| 846 log_cb)); | 845 audio_decoders.push_back(new OpusAudioDecoder(media_task_runner_)); |
| 847 audio_decoders.push_back(new media::OpusAudioDecoder(media_task_runner_)); | |
| 848 | 846 |
| 849 scoped_ptr<media::AudioRenderer> audio_renderer(new media::AudioRendererImpl( | 847 scoped_ptr<AudioRenderer> audio_renderer(new AudioRendererImpl( |
| 850 media_task_runner_, | 848 media_task_runner_, |
| 851 audio_source_provider_.get(), | 849 audio_source_provider_.get(), |
| 852 audio_decoders.Pass(), | 850 audio_decoders.Pass(), |
| 853 set_decryptor_ready_cb, | 851 set_decryptor_ready_cb, |
| 854 audio_hardware_config_)); | 852 audio_hardware_config_)); |
| 855 | 853 |
| 856 // Create our video decoders and renderer. | 854 // Create our video decoders and renderer. |
| 857 ScopedVector<media::VideoDecoder> video_decoders; | 855 ScopedVector<VideoDecoder> video_decoders; |
| 858 | 856 |
| 859 if (gpu_factories_.get()) { | 857 if (gpu_factories_.get()) { |
| 860 video_decoders.push_back( | 858 video_decoders.push_back( |
| 861 new media::GpuVideoDecoder(gpu_factories_, media_log_)); | 859 new GpuVideoDecoder(gpu_factories_, media_log_)); |
| 862 } | 860 } |
| 863 | 861 |
| 864 #if !defined(MEDIA_DISABLE_LIBVPX) | 862 #if !defined(MEDIA_DISABLE_LIBVPX) |
| 865 video_decoders.push_back(new media::VpxVideoDecoder(media_task_runner_)); | 863 video_decoders.push_back(new VpxVideoDecoder(media_task_runner_)); |
| 866 #endif // !defined(MEDIA_DISABLE_LIBVPX) | 864 #endif // !defined(MEDIA_DISABLE_LIBVPX) |
| 867 | 865 |
| 868 video_decoders.push_back(new media::FFmpegVideoDecoder(media_task_runner_)); | 866 video_decoders.push_back(new FFmpegVideoDecoder(media_task_runner_)); |
| 869 | 867 |
| 870 scoped_ptr<media::VideoRenderer> video_renderer( | 868 scoped_ptr<VideoRenderer> video_renderer( |
| 871 new media::VideoRendererImpl( | 869 new VideoRendererImpl( |
| 872 media_task_runner_, | 870 media_task_runner_, |
| 873 video_decoders.Pass(), | 871 video_decoders.Pass(), |
| 874 set_decryptor_ready_cb, | 872 set_decryptor_ready_cb, |
| 875 base::Bind(&WebMediaPlayerImpl::FrameReady, base::Unretained(this)), | 873 base::Bind(&WebMediaPlayerImpl::FrameReady, base::Unretained(this)), |
| 876 true)); | 874 true)); |
| 877 | 875 |
| 878 // Create renderer. | 876 // Create renderer. |
| 879 return scoped_ptr<media::Renderer>(new media::RendererImpl( | 877 return scoped_ptr<Renderer>(new RendererImpl( |
| 880 media_task_runner_, | 878 media_task_runner_, |
| 881 demuxer_.get(), | 879 demuxer_.get(), |
| 882 audio_renderer.Pass(), | 880 audio_renderer.Pass(), |
| 883 video_renderer.Pass())); | 881 video_renderer.Pass())); |
| 884 } | 882 } |
| 885 | 883 |
| 886 void WebMediaPlayerImpl::StartPipeline() { | 884 void WebMediaPlayerImpl::StartPipeline() { |
| 887 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 885 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 888 | 886 |
| 889 // Keep track if this is a MSE or non-MSE playback. | 887 // Keep track if this is a MSE or non-MSE playback. |
| 890 UMA_HISTOGRAM_BOOLEAN("Media.MSE.Playback", | 888 UMA_HISTOGRAM_BOOLEAN("Media.MSE.Playback", |
| 891 (load_type_ == LoadTypeMediaSource)); | 889 (load_type_ == LoadTypeMediaSource)); |
| 892 | 890 |
| 893 media::LogCB mse_log_cb; | 891 LogCB mse_log_cb; |
| 894 media::Demuxer::NeedKeyCB need_key_cb = | 892 Demuxer::NeedKeyCB need_key_cb = |
| 895 encrypted_media_support_->CreateNeedKeyCB(); | 893 encrypted_media_support_->CreateNeedKeyCB(); |
| 896 | 894 |
| 897 // Figure out which demuxer to use. | 895 // Figure out which demuxer to use. |
| 898 if (load_type_ != LoadTypeMediaSource) { | 896 if (load_type_ != LoadTypeMediaSource) { |
| 899 DCHECK(!chunk_demuxer_); | 897 DCHECK(!chunk_demuxer_); |
| 900 DCHECK(data_source_); | 898 DCHECK(data_source_); |
| 901 | 899 |
| 902 demuxer_.reset(new media::FFmpegDemuxer( | 900 demuxer_.reset(new FFmpegDemuxer( |
| 903 media_task_runner_, data_source_.get(), | 901 media_task_runner_, data_source_.get(), |
| 904 need_key_cb, | 902 need_key_cb, |
| 905 media_log_)); | 903 media_log_)); |
| 906 } else { | 904 } else { |
| 907 DCHECK(!chunk_demuxer_); | 905 DCHECK(!chunk_demuxer_); |
| 908 DCHECK(!data_source_); | 906 DCHECK(!data_source_); |
| 909 | 907 |
| 910 mse_log_cb = base::Bind(&LogMediaSourceError, media_log_); | 908 mse_log_cb = base::Bind(&LogMediaSourceError, media_log_); |
| 911 | 909 |
| 912 chunk_demuxer_ = new media::ChunkDemuxer( | 910 chunk_demuxer_ = new ChunkDemuxer( |
| 913 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnDemuxerOpened), | 911 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnDemuxerOpened), |
| 914 need_key_cb, | 912 need_key_cb, |
| 915 mse_log_cb, | 913 mse_log_cb, |
| 916 true); | 914 true); |
| 917 demuxer_.reset(chunk_demuxer_); | 915 demuxer_.reset(chunk_demuxer_); |
| 918 } | 916 } |
| 919 | 917 |
| 920 // ... and we're ready to go! | 918 // ... and we're ready to go! |
| 921 seeking_ = true; | 919 seeking_ = true; |
| 922 pipeline_.Start( | 920 pipeline_.Start( |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 955 | 953 |
| 956 blink::WebAudioSourceProvider* WebMediaPlayerImpl::audioSourceProvider() { | 954 blink::WebAudioSourceProvider* WebMediaPlayerImpl::audioSourceProvider() { |
| 957 return audio_source_provider_.get(); | 955 return audio_source_provider_.get(); |
| 958 } | 956 } |
| 959 | 957 |
| 960 double WebMediaPlayerImpl::GetPipelineDuration() const { | 958 double WebMediaPlayerImpl::GetPipelineDuration() const { |
| 961 base::TimeDelta duration = pipeline_.GetMediaDuration(); | 959 base::TimeDelta duration = pipeline_.GetMediaDuration(); |
| 962 | 960 |
| 963 // Return positive infinity if the resource is unbounded. | 961 // Return positive infinity if the resource is unbounded. |
| 964 // http://www.whatwg.org/specs/web-apps/current-work/multipage/video.html#dom-
media-duration | 962 // http://www.whatwg.org/specs/web-apps/current-work/multipage/video.html#dom-
media-duration |
| 965 if (duration == media::kInfiniteDuration()) | 963 if (duration == kInfiniteDuration()) |
| 966 return std::numeric_limits<double>::infinity(); | 964 return std::numeric_limits<double>::infinity(); |
| 967 | 965 |
| 968 return duration.InSecondsF(); | 966 return duration.InSecondsF(); |
| 969 } | 967 } |
| 970 | 968 |
| 971 void WebMediaPlayerImpl::OnDurationChanged() { | 969 void WebMediaPlayerImpl::OnDurationChanged() { |
| 972 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) | 970 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) |
| 973 return; | 971 return; |
| 974 | 972 |
| 975 client_->durationChanged(); | 973 client_->durationChanged(); |
| (...skipping 14 matching lines...) Expand all Loading... |
| 990 void WebMediaPlayerImpl::OnOpacityChanged(bool opaque) { | 988 void WebMediaPlayerImpl::OnOpacityChanged(bool opaque) { |
| 991 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 989 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| 992 DCHECK_NE(ready_state_, WebMediaPlayer::ReadyStateHaveNothing); | 990 DCHECK_NE(ready_state_, WebMediaPlayer::ReadyStateHaveNothing); |
| 993 | 991 |
| 994 opaque_ = opaque; | 992 opaque_ = opaque; |
| 995 if (video_weblayer_) | 993 if (video_weblayer_) |
| 996 video_weblayer_->setOpaque(opaque_); | 994 video_weblayer_->setOpaque(opaque_); |
| 997 } | 995 } |
| 998 | 996 |
| 999 void WebMediaPlayerImpl::FrameReady( | 997 void WebMediaPlayerImpl::FrameReady( |
| 1000 const scoped_refptr<media::VideoFrame>& frame) { | 998 const scoped_refptr<VideoFrame>& frame) { |
| 1001 compositor_task_runner_->PostTask( | 999 compositor_task_runner_->PostTask( |
| 1002 FROM_HERE, | 1000 FROM_HERE, |
| 1003 base::Bind(&VideoFrameCompositor::UpdateCurrentFrame, | 1001 base::Bind(&VideoFrameCompositor::UpdateCurrentFrame, |
| 1004 base::Unretained(compositor_), | 1002 base::Unretained(compositor_), |
| 1005 frame)); | 1003 frame)); |
| 1006 } | 1004 } |
| 1007 | 1005 |
| 1008 static void GetCurrentFrameAndSignal( | 1006 static void GetCurrentFrameAndSignal( |
| 1009 VideoFrameCompositor* compositor, | 1007 VideoFrameCompositor* compositor, |
| 1010 scoped_refptr<media::VideoFrame>* video_frame_out, | 1008 scoped_refptr<VideoFrame>* video_frame_out, |
| 1011 base::WaitableEvent* event) { | 1009 base::WaitableEvent* event) { |
| 1012 TRACE_EVENT0("media", "GetCurrentFrameAndSignal"); | 1010 TRACE_EVENT0("media", "GetCurrentFrameAndSignal"); |
| 1013 *video_frame_out = compositor->GetCurrentFrame(); | 1011 *video_frame_out = compositor->GetCurrentFrame(); |
| 1014 event->Signal(); | 1012 event->Signal(); |
| 1015 } | 1013 } |
| 1016 | 1014 |
| 1017 scoped_refptr<media::VideoFrame> | 1015 scoped_refptr<VideoFrame> |
| 1018 WebMediaPlayerImpl::GetCurrentFrameFromCompositor() { | 1016 WebMediaPlayerImpl::GetCurrentFrameFromCompositor() { |
| 1019 TRACE_EVENT0("media", "WebMediaPlayerImpl::GetCurrentFrameFromCompositor"); | 1017 TRACE_EVENT0("media", "WebMediaPlayerImpl::GetCurrentFrameFromCompositor"); |
| 1020 if (compositor_task_runner_->BelongsToCurrentThread()) | 1018 if (compositor_task_runner_->BelongsToCurrentThread()) |
| 1021 return compositor_->GetCurrentFrame(); | 1019 return compositor_->GetCurrentFrame(); |
| 1022 | 1020 |
| 1023 // Use a posted task and waitable event instead of a lock otherwise | 1021 // Use a posted task and waitable event instead of a lock otherwise |
| 1024 // WebGL/Canvas can see different content than what the compositor is seeing. | 1022 // WebGL/Canvas can see different content than what the compositor is seeing. |
| 1025 scoped_refptr<media::VideoFrame> video_frame; | 1023 scoped_refptr<VideoFrame> video_frame; |
| 1026 base::WaitableEvent event(false, false); | 1024 base::WaitableEvent event(false, false); |
| 1027 compositor_task_runner_->PostTask(FROM_HERE, | 1025 compositor_task_runner_->PostTask(FROM_HERE, |
| 1028 base::Bind(&GetCurrentFrameAndSignal, | 1026 base::Bind(&GetCurrentFrameAndSignal, |
| 1029 base::Unretained(compositor_), | 1027 base::Unretained(compositor_), |
| 1030 &video_frame, | 1028 &video_frame, |
| 1031 &event)); | 1029 &event)); |
| 1032 event.Wait(); | 1030 event.Wait(); |
| 1033 return video_frame; | 1031 return video_frame; |
| 1034 } | 1032 } |
| 1035 | 1033 |
| 1036 } // namespace content | 1034 } // namespace media |
| OLD | NEW |