OLD | NEW |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/webmediaplayer_impl.h" | 5 #include "media/blink/webmediaplayer_impl.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 #include <limits> | 8 #include <limits> |
9 #include <string> | 9 #include <string> |
10 #include <vector> | 10 #include <vector> |
11 | 11 |
12 #include "base/bind.h" | 12 #include "base/bind.h" |
13 #include "base/callback.h" | 13 #include "base/callback.h" |
14 #include "base/callback_helpers.h" | 14 #include "base/callback_helpers.h" |
15 #include "base/debug/alias.h" | 15 #include "base/debug/alias.h" |
16 #include "base/debug/crash_logging.h" | 16 #include "base/debug/crash_logging.h" |
17 #include "base/debug/trace_event.h" | 17 #include "base/debug/trace_event.h" |
18 #include "base/message_loop/message_loop_proxy.h" | 18 #include "base/message_loop/message_loop_proxy.h" |
19 #include "base/metrics/histogram.h" | 19 #include "base/metrics/histogram.h" |
20 #include "base/single_thread_task_runner.h" | 20 #include "base/single_thread_task_runner.h" |
21 #include "base/synchronization/waitable_event.h" | 21 #include "base/synchronization/waitable_event.h" |
22 #include "cc/blink/web_layer_impl.h" | 22 #include "cc/blink/web_layer_impl.h" |
23 #include "cc/layers/video_layer.h" | 23 #include "cc/layers/video_layer.h" |
24 #include "content/renderer/media/buffered_data_source.h" | |
25 #include "content/renderer/media/crypto/encrypted_media_player_support.h" | |
26 #include "content/renderer/media/texttrack_impl.h" | |
27 #include "content/renderer/media/webaudiosourceprovider_impl.h" | |
28 #include "content/renderer/media/webinbandtexttrack_impl.h" | |
29 #include "content/renderer/media/webmediaplayer_delegate.h" | |
30 #include "content/renderer/media/webmediaplayer_params.h" | |
31 #include "content/renderer/media/webmediaplayer_util.h" | |
32 #include "content/renderer/media/webmediasource_impl.h" | |
33 #include "gpu/GLES2/gl2extchromium.h" | 24 #include "gpu/GLES2/gl2extchromium.h" |
34 #include "gpu/command_buffer/common/mailbox_holder.h" | 25 #include "gpu/command_buffer/common/mailbox_holder.h" |
35 #include "media/audio/null_audio_sink.h" | 26 #include "media/audio/null_audio_sink.h" |
36 #include "media/base/audio_hardware_config.h" | 27 #include "media/base/audio_hardware_config.h" |
37 #include "media/base/bind_to_current_loop.h" | 28 #include "media/base/bind_to_current_loop.h" |
38 #include "media/base/limits.h" | 29 #include "media/base/limits.h" |
39 #include "media/base/media_log.h" | 30 #include "media/base/media_log.h" |
40 #include "media/base/pipeline.h" | 31 #include "media/base/pipeline.h" |
41 #include "media/base/text_renderer.h" | 32 #include "media/base/text_renderer.h" |
42 #include "media/base/video_frame.h" | 33 #include "media/base/video_frame.h" |
| 34 #include "media/blink/buffered_data_source.h" |
| 35 #include "media/blink/encrypted_media_player_support.h" |
| 36 #include "media/blink/texttrack_impl.h" |
| 37 #include "media/blink/webaudiosourceprovider_impl.h" |
| 38 #include "media/blink/webinbandtexttrack_impl.h" |
| 39 #include "media/blink/webmediaplayer_delegate.h" |
| 40 #include "media/blink/webmediaplayer_params.h" |
| 41 #include "media/blink/webmediaplayer_util.h" |
| 42 #include "media/blink/webmediasource_impl.h" |
43 #include "media/filters/audio_renderer_impl.h" | 43 #include "media/filters/audio_renderer_impl.h" |
44 #include "media/filters/chunk_demuxer.h" | 44 #include "media/filters/chunk_demuxer.h" |
45 #include "media/filters/ffmpeg_audio_decoder.h" | 45 #include "media/filters/ffmpeg_audio_decoder.h" |
46 #include "media/filters/ffmpeg_demuxer.h" | 46 #include "media/filters/ffmpeg_demuxer.h" |
47 #include "media/filters/ffmpeg_video_decoder.h" | 47 #include "media/filters/ffmpeg_video_decoder.h" |
48 #include "media/filters/gpu_video_accelerator_factories.h" | 48 #include "media/filters/gpu_video_accelerator_factories.h" |
49 #include "media/filters/gpu_video_decoder.h" | 49 #include "media/filters/gpu_video_decoder.h" |
50 #include "media/filters/opus_audio_decoder.h" | 50 #include "media/filters/opus_audio_decoder.h" |
51 #include "media/filters/renderer_impl.h" | 51 #include "media/filters/renderer_impl.h" |
52 #include "media/filters/video_renderer_impl.h" | 52 #include "media/filters/video_renderer_impl.h" |
53 #include "media/filters/vpx_video_decoder.h" | 53 #include "media/filters/vpx_video_decoder.h" |
54 #include "third_party/WebKit/public/platform/WebMediaSource.h" | 54 #include "third_party/WebKit/public/platform/WebMediaSource.h" |
55 #include "third_party/WebKit/public/platform/WebRect.h" | 55 #include "third_party/WebKit/public/platform/WebRect.h" |
56 #include "third_party/WebKit/public/platform/WebSize.h" | 56 #include "third_party/WebKit/public/platform/WebSize.h" |
57 #include "third_party/WebKit/public/platform/WebString.h" | 57 #include "third_party/WebKit/public/platform/WebString.h" |
58 #include "third_party/WebKit/public/platform/WebURL.h" | 58 #include "third_party/WebKit/public/platform/WebURL.h" |
59 #include "third_party/WebKit/public/web/WebLocalFrame.h" | 59 #include "third_party/WebKit/public/web/WebLocalFrame.h" |
60 #include "third_party/WebKit/public/web/WebSecurityOrigin.h" | 60 #include "third_party/WebKit/public/web/WebSecurityOrigin.h" |
61 #include "third_party/WebKit/public/web/WebView.h" | 61 #include "third_party/WebKit/public/web/WebView.h" |
62 | 62 |
63 using blink::WebCanvas; | 63 using blink::WebCanvas; |
64 using blink::WebMediaPlayer; | 64 using blink::WebMediaPlayer; |
65 using blink::WebRect; | 65 using blink::WebRect; |
66 using blink::WebSize; | 66 using blink::WebSize; |
67 using blink::WebString; | 67 using blink::WebString; |
68 using media::PipelineStatus; | |
69 | 68 |
70 namespace { | 69 namespace { |
71 | 70 |
72 // Limits the range of playback rate. | 71 // Limits the range of playback rate. |
73 // | 72 // |
74 // TODO(kylep): Revisit these. | 73 // TODO(kylep): Revisit these. |
75 // | 74 // |
76 // Vista has substantially lower performance than XP or Windows7. If you speed | 75 // Vista has substantially lower performance than XP or Windows7. If you speed |
77 // up a video too much, it can't keep up, and rendering stops updating except on | 76 // up a video too much, it can't keep up, and rendering stops updating except on |
78 // the time bar. For really high speeds, audio becomes a bottleneck and we just | 77 // the time bar. For really high speeds, audio becomes a bottleneck and we just |
(...skipping 21 matching lines...) Expand all Loading... |
100 virtual void WaitSyncPoint(uint32 sync_point) OVERRIDE { | 99 virtual void WaitSyncPoint(uint32 sync_point) OVERRIDE { |
101 web_graphics_context_->waitSyncPoint(sync_point); | 100 web_graphics_context_->waitSyncPoint(sync_point); |
102 } | 101 } |
103 | 102 |
104 private: | 103 private: |
105 blink::WebGraphicsContext3D* web_graphics_context_; | 104 blink::WebGraphicsContext3D* web_graphics_context_; |
106 }; | 105 }; |
107 | 106 |
108 } // namespace | 107 } // namespace |
109 | 108 |
110 namespace content { | 109 namespace media { |
111 | 110 |
112 class BufferedDataSourceHostImpl; | 111 class BufferedDataSourceHostImpl; |
113 | 112 |
114 #define COMPILE_ASSERT_MATCHING_ENUM(name) \ | 113 #define COMPILE_ASSERT_MATCHING_ENUM(name) \ |
115 COMPILE_ASSERT(static_cast<int>(WebMediaPlayer::CORSMode ## name) == \ | 114 COMPILE_ASSERT(static_cast<int>(WebMediaPlayer::CORSMode ## name) == \ |
116 static_cast<int>(BufferedResourceLoader::k ## name), \ | 115 static_cast<int>(BufferedResourceLoader::k ## name), \ |
117 mismatching_enums) | 116 mismatching_enums) |
118 COMPILE_ASSERT_MATCHING_ENUM(Unspecified); | 117 COMPILE_ASSERT_MATCHING_ENUM(Unspecified); |
119 COMPILE_ASSERT_MATCHING_ENUM(Anonymous); | 118 COMPILE_ASSERT_MATCHING_ENUM(Anonymous); |
120 COMPILE_ASSERT_MATCHING_ENUM(UseCredentials); | 119 COMPILE_ASSERT_MATCHING_ENUM(UseCredentials); |
121 #undef COMPILE_ASSERT_MATCHING_ENUM | 120 #undef COMPILE_ASSERT_MATCHING_ENUM |
122 | 121 |
123 #define BIND_TO_RENDER_LOOP(function) \ | 122 #define BIND_TO_RENDER_LOOP(function) \ |
124 (DCHECK(main_task_runner_->BelongsToCurrentThread()), \ | 123 (DCHECK(main_task_runner_->BelongsToCurrentThread()), \ |
125 media::BindToCurrentLoop(base::Bind(function, AsWeakPtr()))) | 124 BindToCurrentLoop(base::Bind(function, AsWeakPtr()))) |
126 | 125 |
127 #define BIND_TO_RENDER_LOOP1(function, arg1) \ | 126 #define BIND_TO_RENDER_LOOP1(function, arg1) \ |
128 (DCHECK(main_task_runner_->BelongsToCurrentThread()), \ | 127 (DCHECK(main_task_runner_->BelongsToCurrentThread()), \ |
129 media::BindToCurrentLoop(base::Bind(function, AsWeakPtr(), arg1))) | 128 BindToCurrentLoop(base::Bind(function, AsWeakPtr(), arg1))) |
130 | 129 |
131 static void LogMediaSourceError(const scoped_refptr<media::MediaLog>& media_log, | 130 static void LogMediaSourceError(const scoped_refptr<MediaLog>& media_log, |
132 const std::string& error) { | 131 const std::string& error) { |
133 media_log->AddEvent(media_log->CreateMediaSourceErrorEvent(error)); | 132 media_log->AddEvent(media_log->CreateMediaSourceErrorEvent(error)); |
134 } | 133 } |
135 | 134 |
136 WebMediaPlayerImpl::WebMediaPlayerImpl( | 135 WebMediaPlayerImpl::WebMediaPlayerImpl( |
137 blink::WebLocalFrame* frame, | 136 blink::WebLocalFrame* frame, |
138 blink::WebMediaPlayerClient* client, | 137 blink::WebMediaPlayerClient* client, |
139 base::WeakPtr<WebMediaPlayerDelegate> delegate, | 138 base::WeakPtr<WebMediaPlayerDelegate> delegate, |
140 const WebMediaPlayerParams& params) | 139 const WebMediaPlayerParams& params) |
141 : frame_(frame), | 140 : frame_(frame), |
(...skipping 26 matching lines...) Expand all Loading... |
168 encrypted_media_support_( | 167 encrypted_media_support_( |
169 params.CreateEncryptedMediaPlayerSupport(client)), | 168 params.CreateEncryptedMediaPlayerSupport(client)), |
170 audio_hardware_config_(params.audio_hardware_config()) { | 169 audio_hardware_config_(params.audio_hardware_config()) { |
171 DCHECK(encrypted_media_support_); | 170 DCHECK(encrypted_media_support_); |
172 | 171 |
173 // Threaded compositing isn't enabled universally yet. | 172 // Threaded compositing isn't enabled universally yet. |
174 if (!compositor_task_runner_) | 173 if (!compositor_task_runner_) |
175 compositor_task_runner_ = base::MessageLoopProxy::current(); | 174 compositor_task_runner_ = base::MessageLoopProxy::current(); |
176 | 175 |
177 media_log_->AddEvent( | 176 media_log_->AddEvent( |
178 media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_CREATED)); | 177 media_log_->CreateEvent(MediaLogEvent::WEBMEDIAPLAYER_CREATED)); |
179 | 178 |
180 // |gpu_factories_| requires that its entry points be called on its | 179 // |gpu_factories_| requires that its entry points be called on its |
181 // |GetTaskRunner()|. Since |pipeline_| will own decoders created from the | 180 // |GetTaskRunner()|. Since |pipeline_| will own decoders created from the |
182 // factories, require that their message loops are identical. | 181 // factories, require that their message loops are identical. |
183 DCHECK(!gpu_factories_.get() || | 182 DCHECK(!gpu_factories_.get() || |
184 (gpu_factories_->GetTaskRunner() == media_task_runner_.get())); | 183 (gpu_factories_->GetTaskRunner() == media_task_runner_.get())); |
185 | 184 |
186 // Use the null sink if no sink was provided. | 185 // Use the null sink if no sink was provided. |
187 audio_source_provider_ = new WebAudioSourceProviderImpl( | 186 audio_source_provider_ = new WebAudioSourceProviderImpl( |
188 params.audio_renderer_sink().get() | 187 params.audio_renderer_sink().get() |
189 ? params.audio_renderer_sink() | 188 ? params.audio_renderer_sink() |
190 : new media::NullAudioSink(media_task_runner_)); | 189 : new NullAudioSink(media_task_runner_)); |
191 } | 190 } |
192 | 191 |
193 WebMediaPlayerImpl::~WebMediaPlayerImpl() { | 192 WebMediaPlayerImpl::~WebMediaPlayerImpl() { |
194 client_->setWebLayer(NULL); | 193 client_->setWebLayer(NULL); |
195 | 194 |
196 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 195 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
197 media_log_->AddEvent( | 196 media_log_->AddEvent( |
198 media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_DESTROYED)); | 197 media_log_->CreateEvent(MediaLogEvent::WEBMEDIAPLAYER_DESTROYED)); |
199 | 198 |
200 if (delegate_.get()) | 199 if (delegate_.get()) |
201 delegate_->PlayerGone(this); | 200 delegate_->PlayerGone(this); |
202 | 201 |
203 // Abort any pending IO so stopping the pipeline doesn't get blocked. | 202 // Abort any pending IO so stopping the pipeline doesn't get blocked. |
204 if (data_source_) | 203 if (data_source_) |
205 data_source_->Abort(); | 204 data_source_->Abort(); |
206 if (chunk_demuxer_) { | 205 if (chunk_demuxer_) { |
207 chunk_demuxer_->Shutdown(); | 206 chunk_demuxer_->Shutdown(); |
208 chunk_demuxer_ = NULL; | 207 chunk_demuxer_ = NULL; |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
272 | 271 |
273 void WebMediaPlayerImpl::play() { | 272 void WebMediaPlayerImpl::play() { |
274 DVLOG(1) << __FUNCTION__; | 273 DVLOG(1) << __FUNCTION__; |
275 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 274 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
276 | 275 |
277 paused_ = false; | 276 paused_ = false; |
278 pipeline_.SetPlaybackRate(playback_rate_); | 277 pipeline_.SetPlaybackRate(playback_rate_); |
279 if (data_source_) | 278 if (data_source_) |
280 data_source_->MediaIsPlaying(); | 279 data_source_->MediaIsPlaying(); |
281 | 280 |
282 media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PLAY)); | 281 media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::PLAY)); |
283 | 282 |
284 if (delegate_.get()) | 283 if (delegate_.get()) |
285 delegate_->DidPlay(this); | 284 delegate_->DidPlay(this); |
286 } | 285 } |
287 | 286 |
288 void WebMediaPlayerImpl::pause() { | 287 void WebMediaPlayerImpl::pause() { |
289 DVLOG(1) << __FUNCTION__; | 288 DVLOG(1) << __FUNCTION__; |
290 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 289 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
291 | 290 |
292 paused_ = true; | 291 paused_ = true; |
293 pipeline_.SetPlaybackRate(0.0f); | 292 pipeline_.SetPlaybackRate(0.0f); |
294 if (data_source_) | 293 if (data_source_) |
295 data_source_->MediaIsPaused(); | 294 data_source_->MediaIsPaused(); |
296 paused_time_ = pipeline_.GetMediaTime(); | 295 paused_time_ = pipeline_.GetMediaTime(); |
297 | 296 |
298 media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PAUSE)); | 297 media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::PAUSE)); |
299 | 298 |
300 if (delegate_.get()) | 299 if (delegate_.get()) |
301 delegate_->DidPause(this); | 300 delegate_->DidPause(this); |
302 } | 301 } |
303 | 302 |
304 bool WebMediaPlayerImpl::supportsSave() const { | 303 bool WebMediaPlayerImpl::supportsSave() const { |
305 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 304 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
306 return supports_save_; | 305 return supports_save_; |
307 } | 306 } |
308 | 307 |
(...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
452 } | 451 } |
453 | 452 |
454 WebMediaPlayer::ReadyState WebMediaPlayerImpl::readyState() const { | 453 WebMediaPlayer::ReadyState WebMediaPlayerImpl::readyState() const { |
455 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 454 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
456 return ready_state_; | 455 return ready_state_; |
457 } | 456 } |
458 | 457 |
459 blink::WebTimeRanges WebMediaPlayerImpl::buffered() const { | 458 blink::WebTimeRanges WebMediaPlayerImpl::buffered() const { |
460 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 459 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
461 | 460 |
462 media::Ranges<base::TimeDelta> buffered_time_ranges = | 461 Ranges<base::TimeDelta> buffered_time_ranges = |
463 pipeline_.GetBufferedTimeRanges(); | 462 pipeline_.GetBufferedTimeRanges(); |
464 | 463 |
465 const base::TimeDelta duration = pipeline_.GetMediaDuration(); | 464 const base::TimeDelta duration = pipeline_.GetMediaDuration(); |
466 if (duration != media::kInfiniteDuration()) { | 465 if (duration != kInfiniteDuration()) { |
467 buffered_data_source_host_.AddBufferedTimeRanges( | 466 buffered_data_source_host_.AddBufferedTimeRanges( |
468 &buffered_time_ranges, duration); | 467 &buffered_time_ranges, duration); |
469 } | 468 } |
470 return ConvertToWebTimeRanges(buffered_time_ranges); | 469 return ConvertToWebTimeRanges(buffered_time_ranges); |
471 } | 470 } |
472 | 471 |
473 double WebMediaPlayerImpl::maxTimeSeekable() const { | 472 double WebMediaPlayerImpl::maxTimeSeekable() const { |
474 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 473 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
475 | 474 |
476 // If we haven't even gotten to ReadyStateHaveMetadata yet then just | 475 // If we haven't even gotten to ReadyStateHaveMetadata yet then just |
(...skipping 25 matching lines...) Expand all Loading... |
502 unsigned char alpha, | 501 unsigned char alpha, |
503 SkXfermode::Mode mode) { | 502 SkXfermode::Mode mode) { |
504 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 503 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
505 TRACE_EVENT0("media", "WebMediaPlayerImpl:paint"); | 504 TRACE_EVENT0("media", "WebMediaPlayerImpl:paint"); |
506 | 505 |
507 // TODO(scherkus): Clarify paint() API contract to better understand when and | 506 // TODO(scherkus): Clarify paint() API contract to better understand when and |
508 // why it's being called. For example, today paint() is called when: | 507 // why it's being called. For example, today paint() is called when: |
509 // - We haven't reached HAVE_CURRENT_DATA and need to paint black | 508 // - We haven't reached HAVE_CURRENT_DATA and need to paint black |
510 // - We're painting to a canvas | 509 // - We're painting to a canvas |
511 // See http://crbug.com/341225 http://crbug.com/342621 for details. | 510 // See http://crbug.com/341225 http://crbug.com/342621 for details. |
512 scoped_refptr<media::VideoFrame> video_frame = | 511 scoped_refptr<VideoFrame> video_frame = |
513 GetCurrentFrameFromCompositor(); | 512 GetCurrentFrameFromCompositor(); |
514 | 513 |
515 gfx::Rect gfx_rect(rect); | 514 gfx::Rect gfx_rect(rect); |
516 | 515 |
517 skcanvas_video_renderer_.Paint(video_frame.get(), | 516 skcanvas_video_renderer_.Paint(video_frame.get(), |
518 canvas, | 517 canvas, |
519 gfx_rect, | 518 gfx_rect, |
520 alpha, | 519 alpha, |
521 mode, | 520 mode, |
522 pipeline_metadata_.video_rotation); | 521 pipeline_metadata_.video_rotation); |
(...skipping 11 matching lines...) Expand all Loading... |
534 return false; | 533 return false; |
535 } | 534 } |
536 | 535 |
537 double WebMediaPlayerImpl::mediaTimeForTimeValue(double timeValue) const { | 536 double WebMediaPlayerImpl::mediaTimeForTimeValue(double timeValue) const { |
538 return ConvertSecondsToTimestamp(timeValue).InSecondsF(); | 537 return ConvertSecondsToTimestamp(timeValue).InSecondsF(); |
539 } | 538 } |
540 | 539 |
541 unsigned WebMediaPlayerImpl::decodedFrameCount() const { | 540 unsigned WebMediaPlayerImpl::decodedFrameCount() const { |
542 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 541 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
543 | 542 |
544 media::PipelineStatistics stats = pipeline_.GetStatistics(); | 543 PipelineStatistics stats = pipeline_.GetStatistics(); |
545 return stats.video_frames_decoded; | 544 return stats.video_frames_decoded; |
546 } | 545 } |
547 | 546 |
548 unsigned WebMediaPlayerImpl::droppedFrameCount() const { | 547 unsigned WebMediaPlayerImpl::droppedFrameCount() const { |
549 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 548 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
550 | 549 |
551 media::PipelineStatistics stats = pipeline_.GetStatistics(); | 550 PipelineStatistics stats = pipeline_.GetStatistics(); |
552 return stats.video_frames_dropped; | 551 return stats.video_frames_dropped; |
553 } | 552 } |
554 | 553 |
555 unsigned WebMediaPlayerImpl::audioDecodedByteCount() const { | 554 unsigned WebMediaPlayerImpl::audioDecodedByteCount() const { |
556 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 555 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
557 | 556 |
558 media::PipelineStatistics stats = pipeline_.GetStatistics(); | 557 PipelineStatistics stats = pipeline_.GetStatistics(); |
559 return stats.audio_bytes_decoded; | 558 return stats.audio_bytes_decoded; |
560 } | 559 } |
561 | 560 |
562 unsigned WebMediaPlayerImpl::videoDecodedByteCount() const { | 561 unsigned WebMediaPlayerImpl::videoDecodedByteCount() const { |
563 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 562 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
564 | 563 |
565 media::PipelineStatistics stats = pipeline_.GetStatistics(); | 564 PipelineStatistics stats = pipeline_.GetStatistics(); |
566 return stats.video_bytes_decoded; | 565 return stats.video_bytes_decoded; |
567 } | 566 } |
568 | 567 |
569 bool WebMediaPlayerImpl::copyVideoTextureToPlatformTexture( | 568 bool WebMediaPlayerImpl::copyVideoTextureToPlatformTexture( |
570 blink::WebGraphicsContext3D* web_graphics_context, | 569 blink::WebGraphicsContext3D* web_graphics_context, |
571 unsigned int texture, | 570 unsigned int texture, |
572 unsigned int level, | 571 unsigned int level, |
573 unsigned int internal_format, | 572 unsigned int internal_format, |
574 unsigned int type, | 573 unsigned int type, |
575 bool premultiply_alpha, | 574 bool premultiply_alpha, |
576 bool flip_y) { | 575 bool flip_y) { |
577 TRACE_EVENT0("media", "WebMediaPlayerImpl:copyVideoTextureToPlatformTexture"); | 576 TRACE_EVENT0("media", "WebMediaPlayerImpl:copyVideoTextureToPlatformTexture"); |
578 | 577 |
579 scoped_refptr<media::VideoFrame> video_frame = | 578 scoped_refptr<VideoFrame> video_frame = |
580 GetCurrentFrameFromCompositor(); | 579 GetCurrentFrameFromCompositor(); |
581 | 580 |
582 if (!video_frame.get()) | 581 if (!video_frame.get()) |
583 return false; | 582 return false; |
584 if (video_frame->format() != media::VideoFrame::NATIVE_TEXTURE) | 583 if (video_frame->format() != VideoFrame::NATIVE_TEXTURE) |
585 return false; | 584 return false; |
586 | 585 |
587 const gpu::MailboxHolder* mailbox_holder = video_frame->mailbox_holder(); | 586 const gpu::MailboxHolder* mailbox_holder = video_frame->mailbox_holder(); |
588 if (mailbox_holder->texture_target != GL_TEXTURE_2D) | 587 if (mailbox_holder->texture_target != GL_TEXTURE_2D) |
589 return false; | 588 return false; |
590 | 589 |
591 web_graphics_context->waitSyncPoint(mailbox_holder->sync_point); | 590 web_graphics_context->waitSyncPoint(mailbox_holder->sync_point); |
592 uint32 source_texture = web_graphics_context->createAndConsumeTextureCHROMIUM( | 591 uint32 source_texture = web_graphics_context->createAndConsumeTextureCHROMIUM( |
593 GL_TEXTURE_2D, mailbox_holder->mailbox.name); | 592 GL_TEXTURE_2D, mailbox_holder->mailbox.name); |
594 | 593 |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
676 PipelineStatus status) { | 675 PipelineStatus status) { |
677 DVLOG(1) << __FUNCTION__ << "(" << time_changed << ", " << status << ")"; | 676 DVLOG(1) << __FUNCTION__ << "(" << time_changed << ", " << status << ")"; |
678 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 677 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
679 seeking_ = false; | 678 seeking_ = false; |
680 if (pending_seek_) { | 679 if (pending_seek_) { |
681 pending_seek_ = false; | 680 pending_seek_ = false; |
682 seek(pending_seek_seconds_); | 681 seek(pending_seek_seconds_); |
683 return; | 682 return; |
684 } | 683 } |
685 | 684 |
686 if (status != media::PIPELINE_OK) { | 685 if (status != PIPELINE_OK) { |
687 OnPipelineError(status); | 686 OnPipelineError(status); |
688 return; | 687 return; |
689 } | 688 } |
690 | 689 |
691 // Update our paused time. | 690 // Update our paused time. |
692 if (paused_) | 691 if (paused_) |
693 paused_time_ = pipeline_.GetMediaTime(); | 692 paused_time_ = pipeline_.GetMediaTime(); |
694 | 693 |
695 should_notify_time_changed_ = time_changed; | 694 should_notify_time_changed_ = time_changed; |
696 } | 695 } |
697 | 696 |
698 void WebMediaPlayerImpl::OnPipelineEnded() { | 697 void WebMediaPlayerImpl::OnPipelineEnded() { |
699 DVLOG(1) << __FUNCTION__; | 698 DVLOG(1) << __FUNCTION__; |
700 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 699 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
701 client_->timeChanged(); | 700 client_->timeChanged(); |
702 } | 701 } |
703 | 702 |
704 void WebMediaPlayerImpl::OnPipelineError(PipelineStatus error) { | 703 void WebMediaPlayerImpl::OnPipelineError(PipelineStatus error) { |
705 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 704 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
706 DCHECK_NE(error, media::PIPELINE_OK); | 705 DCHECK_NE(error, PIPELINE_OK); |
707 | 706 |
708 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) { | 707 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) { |
709 // Any error that occurs before reaching ReadyStateHaveMetadata should | 708 // Any error that occurs before reaching ReadyStateHaveMetadata should |
710 // be considered a format error. | 709 // be considered a format error. |
711 SetNetworkState(WebMediaPlayer::NetworkStateFormatError); | 710 SetNetworkState(WebMediaPlayer::NetworkStateFormatError); |
712 return; | 711 return; |
713 } | 712 } |
714 | 713 |
715 SetNetworkState(PipelineErrorToNetworkState(error)); | 714 SetNetworkState(PipelineErrorToNetworkState(error)); |
716 | 715 |
717 if (error == media::PIPELINE_ERROR_DECRYPT) | 716 if (error == PIPELINE_ERROR_DECRYPT) |
718 encrypted_media_support_->OnPipelineDecryptError(); | 717 encrypted_media_support_->OnPipelineDecryptError(); |
719 } | 718 } |
720 | 719 |
721 void WebMediaPlayerImpl::OnPipelineMetadata( | 720 void WebMediaPlayerImpl::OnPipelineMetadata( |
722 media::PipelineMetadata metadata) { | 721 PipelineMetadata metadata) { |
723 DVLOG(1) << __FUNCTION__; | 722 DVLOG(1) << __FUNCTION__; |
724 | 723 |
725 pipeline_metadata_ = metadata; | 724 pipeline_metadata_ = metadata; |
726 | 725 |
727 UMA_HISTOGRAM_ENUMERATION("Media.VideoRotation", | 726 UMA_HISTOGRAM_ENUMERATION("Media.VideoRotation", |
728 metadata.video_rotation, | 727 metadata.video_rotation, |
729 media::VIDEO_ROTATION_MAX + 1); | 728 VIDEO_ROTATION_MAX + 1); |
730 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata); | 729 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata); |
731 | 730 |
732 if (hasVideo()) { | 731 if (hasVideo()) { |
733 DCHECK(!video_weblayer_); | 732 DCHECK(!video_weblayer_); |
734 scoped_refptr<cc::VideoLayer> layer = | 733 scoped_refptr<cc::VideoLayer> layer = |
735 cc::VideoLayer::Create(compositor_, pipeline_metadata_.video_rotation); | 734 cc::VideoLayer::Create(compositor_, pipeline_metadata_.video_rotation); |
736 | 735 |
737 if (pipeline_metadata_.video_rotation == media::VIDEO_ROTATION_90 || | 736 if (pipeline_metadata_.video_rotation == VIDEO_ROTATION_90 || |
738 pipeline_metadata_.video_rotation == media::VIDEO_ROTATION_270) { | 737 pipeline_metadata_.video_rotation == VIDEO_ROTATION_270) { |
739 gfx::Size size = pipeline_metadata_.natural_size; | 738 gfx::Size size = pipeline_metadata_.natural_size; |
740 pipeline_metadata_.natural_size = gfx::Size(size.height(), size.width()); | 739 pipeline_metadata_.natural_size = gfx::Size(size.height(), size.width()); |
741 } | 740 } |
742 | 741 |
743 video_weblayer_.reset(new cc_blink::WebLayerImpl(layer)); | 742 video_weblayer_.reset(new cc_blink::WebLayerImpl(layer)); |
744 video_weblayer_->setOpaque(opaque_); | 743 video_weblayer_->setOpaque(opaque_); |
745 client_->setWebLayer(video_weblayer_.get()); | 744 client_->setWebLayer(video_weblayer_.get()); |
746 } | 745 } |
747 } | 746 } |
748 | 747 |
749 void WebMediaPlayerImpl::OnPipelineBufferingStateChanged( | 748 void WebMediaPlayerImpl::OnPipelineBufferingStateChanged( |
750 media::BufferingState buffering_state) { | 749 BufferingState buffering_state) { |
751 DVLOG(1) << __FUNCTION__ << "(" << buffering_state << ")"; | 750 DVLOG(1) << __FUNCTION__ << "(" << buffering_state << ")"; |
752 | 751 |
753 // Ignore buffering state changes until we've completed all outstanding seeks. | 752 // Ignore buffering state changes until we've completed all outstanding seeks. |
754 if (seeking_ || pending_seek_) | 753 if (seeking_ || pending_seek_) |
755 return; | 754 return; |
756 | 755 |
757 // TODO(scherkus): Handle other buffering states when Pipeline starts using | 756 // TODO(scherkus): Handle other buffering states when Pipeline starts using |
758 // them and translate them ready state changes http://crbug.com/144683 | 757 // them and translate them ready state changes http://crbug.com/144683 |
759 DCHECK_EQ(buffering_state, media::BUFFERING_HAVE_ENOUGH); | 758 DCHECK_EQ(buffering_state, BUFFERING_HAVE_ENOUGH); |
760 SetReadyState(WebMediaPlayer::ReadyStateHaveEnoughData); | 759 SetReadyState(WebMediaPlayer::ReadyStateHaveEnoughData); |
761 | 760 |
762 // Blink expects a timeChanged() in response to a seek(). | 761 // Blink expects a timeChanged() in response to a seek(). |
763 if (should_notify_time_changed_) | 762 if (should_notify_time_changed_) |
764 client_->timeChanged(); | 763 client_->timeChanged(); |
765 } | 764 } |
766 | 765 |
767 void WebMediaPlayerImpl::OnDemuxerOpened() { | 766 void WebMediaPlayerImpl::OnDemuxerOpened() { |
768 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 767 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
769 client_->mediaSourceOpened(new WebMediaSourceImpl( | 768 client_->mediaSourceOpened(new WebMediaSourceImpl( |
770 chunk_demuxer_, base::Bind(&LogMediaSourceError, media_log_))); | 769 chunk_demuxer_, base::Bind(&LogMediaSourceError, media_log_))); |
771 } | 770 } |
772 | 771 |
773 void WebMediaPlayerImpl::OnAddTextTrack( | 772 void WebMediaPlayerImpl::OnAddTextTrack( |
774 const media::TextTrackConfig& config, | 773 const TextTrackConfig& config, |
775 const media::AddTextTrackDoneCB& done_cb) { | 774 const AddTextTrackDoneCB& done_cb) { |
776 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 775 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
777 | 776 |
778 const WebInbandTextTrackImpl::Kind web_kind = | 777 const WebInbandTextTrackImpl::Kind web_kind = |
779 static_cast<WebInbandTextTrackImpl::Kind>(config.kind()); | 778 static_cast<WebInbandTextTrackImpl::Kind>(config.kind()); |
780 const blink::WebString web_label = | 779 const blink::WebString web_label = |
781 blink::WebString::fromUTF8(config.label()); | 780 blink::WebString::fromUTF8(config.label()); |
782 const blink::WebString web_language = | 781 const blink::WebString web_language = |
783 blink::WebString::fromUTF8(config.language()); | 782 blink::WebString::fromUTF8(config.language()); |
784 const blink::WebString web_id = | 783 const blink::WebString web_id = |
785 blink::WebString::fromUTF8(config.id()); | 784 blink::WebString::fromUTF8(config.id()); |
786 | 785 |
787 scoped_ptr<WebInbandTextTrackImpl> web_inband_text_track( | 786 scoped_ptr<WebInbandTextTrackImpl> web_inband_text_track( |
788 new WebInbandTextTrackImpl(web_kind, web_label, web_language, web_id, | 787 new WebInbandTextTrackImpl(web_kind, web_label, web_language, web_id, |
789 text_track_index_++)); | 788 text_track_index_++)); |
790 | 789 |
791 scoped_ptr<media::TextTrack> text_track(new TextTrackImpl( | 790 scoped_ptr<TextTrack> text_track(new TextTrackImpl( |
792 main_task_runner_, client_, web_inband_text_track.Pass())); | 791 main_task_runner_, client_, web_inband_text_track.Pass())); |
793 | 792 |
794 done_cb.Run(text_track.Pass()); | 793 done_cb.Run(text_track.Pass()); |
795 } | 794 } |
796 | 795 |
797 void WebMediaPlayerImpl::DataSourceInitialized(bool success) { | 796 void WebMediaPlayerImpl::DataSourceInitialized(bool success) { |
798 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 797 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
799 | 798 |
800 if (!success) { | 799 if (!success) { |
801 SetNetworkState(WebMediaPlayer::NetworkStateFormatError); | 800 SetNetworkState(WebMediaPlayer::NetworkStateFormatError); |
802 return; | 801 return; |
803 } | 802 } |
804 | 803 |
805 StartPipeline(); | 804 StartPipeline(); |
806 } | 805 } |
807 | 806 |
808 void WebMediaPlayerImpl::NotifyDownloading(bool is_downloading) { | 807 void WebMediaPlayerImpl::NotifyDownloading(bool is_downloading) { |
809 if (!is_downloading && network_state_ == WebMediaPlayer::NetworkStateLoading) | 808 if (!is_downloading && network_state_ == WebMediaPlayer::NetworkStateLoading) |
810 SetNetworkState(WebMediaPlayer::NetworkStateIdle); | 809 SetNetworkState(WebMediaPlayer::NetworkStateIdle); |
811 else if (is_downloading && network_state_ == WebMediaPlayer::NetworkStateIdle) | 810 else if (is_downloading && network_state_ == WebMediaPlayer::NetworkStateIdle) |
812 SetNetworkState(WebMediaPlayer::NetworkStateLoading); | 811 SetNetworkState(WebMediaPlayer::NetworkStateLoading); |
813 media_log_->AddEvent( | 812 media_log_->AddEvent( |
814 media_log_->CreateBooleanEvent( | 813 media_log_->CreateBooleanEvent( |
815 media::MediaLogEvent::NETWORK_ACTIVITY_SET, | 814 MediaLogEvent::NETWORK_ACTIVITY_SET, |
816 "is_downloading_data", is_downloading)); | 815 "is_downloading_data", is_downloading)); |
817 } | 816 } |
818 | 817 |
819 // TODO(xhwang): Move this to a factory class so that we can create different | 818 // TODO(xhwang): Move this to a factory class so that we can create different |
820 // renderers. | 819 // renderers. |
821 scoped_ptr<media::Renderer> WebMediaPlayerImpl::CreateRenderer() { | 820 scoped_ptr<Renderer> WebMediaPlayerImpl::CreateRenderer() { |
822 media::SetDecryptorReadyCB set_decryptor_ready_cb = | 821 SetDecryptorReadyCB set_decryptor_ready_cb = |
823 encrypted_media_support_->CreateSetDecryptorReadyCB(); | 822 encrypted_media_support_->CreateSetDecryptorReadyCB(); |
824 | 823 |
825 // Create our audio decoders and renderer. | 824 // Create our audio decoders and renderer. |
826 ScopedVector<media::AudioDecoder> audio_decoders; | 825 ScopedVector<AudioDecoder> audio_decoders; |
827 | 826 |
828 media::LogCB log_cb = base::Bind(&LogMediaSourceError, media_log_); | 827 LogCB log_cb = base::Bind(&LogMediaSourceError, media_log_); |
829 audio_decoders.push_back(new media::FFmpegAudioDecoder(media_task_runner_, | 828 audio_decoders.push_back(new FFmpegAudioDecoder(media_task_runner_, log_cb)); |
830 log_cb)); | 829 audio_decoders.push_back(new OpusAudioDecoder(media_task_runner_)); |
831 audio_decoders.push_back(new media::OpusAudioDecoder(media_task_runner_)); | |
832 | 830 |
833 scoped_ptr<media::AudioRenderer> audio_renderer(new media::AudioRendererImpl( | 831 scoped_ptr<AudioRenderer> audio_renderer(new AudioRendererImpl( |
834 media_task_runner_, | 832 media_task_runner_, |
835 audio_source_provider_.get(), | 833 audio_source_provider_.get(), |
836 audio_decoders.Pass(), | 834 audio_decoders.Pass(), |
837 set_decryptor_ready_cb, | 835 set_decryptor_ready_cb, |
838 audio_hardware_config_)); | 836 audio_hardware_config_)); |
839 | 837 |
840 // Create our video decoders and renderer. | 838 // Create our video decoders and renderer. |
841 ScopedVector<media::VideoDecoder> video_decoders; | 839 ScopedVector<VideoDecoder> video_decoders; |
842 | 840 |
843 if (gpu_factories_.get()) { | 841 if (gpu_factories_.get()) { |
844 video_decoders.push_back( | 842 video_decoders.push_back( |
845 new media::GpuVideoDecoder(gpu_factories_, media_log_)); | 843 new GpuVideoDecoder(gpu_factories_, media_log_)); |
846 } | 844 } |
847 | 845 |
848 #if !defined(MEDIA_DISABLE_LIBVPX) | 846 #if !defined(MEDIA_DISABLE_LIBVPX) |
849 video_decoders.push_back(new media::VpxVideoDecoder(media_task_runner_)); | 847 video_decoders.push_back(new VpxVideoDecoder(media_task_runner_)); |
850 #endif // !defined(MEDIA_DISABLE_LIBVPX) | 848 #endif // !defined(MEDIA_DISABLE_LIBVPX) |
851 | 849 |
852 video_decoders.push_back(new media::FFmpegVideoDecoder(media_task_runner_)); | 850 video_decoders.push_back(new FFmpegVideoDecoder(media_task_runner_)); |
853 | 851 |
854 scoped_ptr<media::VideoRenderer> video_renderer( | 852 scoped_ptr<VideoRenderer> video_renderer( |
855 new media::VideoRendererImpl( | 853 new VideoRendererImpl( |
856 media_task_runner_, | 854 media_task_runner_, |
857 video_decoders.Pass(), | 855 video_decoders.Pass(), |
858 set_decryptor_ready_cb, | 856 set_decryptor_ready_cb, |
859 base::Bind(&WebMediaPlayerImpl::FrameReady, base::Unretained(this)), | 857 base::Bind(&WebMediaPlayerImpl::FrameReady, base::Unretained(this)), |
860 true)); | 858 true)); |
861 | 859 |
862 // Create renderer. | 860 // Create renderer. |
863 return scoped_ptr<media::Renderer>(new media::RendererImpl( | 861 return scoped_ptr<Renderer>(new RendererImpl( |
864 media_task_runner_, | 862 media_task_runner_, |
865 demuxer_.get(), | 863 demuxer_.get(), |
866 audio_renderer.Pass(), | 864 audio_renderer.Pass(), |
867 video_renderer.Pass())); | 865 video_renderer.Pass())); |
868 } | 866 } |
869 | 867 |
870 void WebMediaPlayerImpl::StartPipeline() { | 868 void WebMediaPlayerImpl::StartPipeline() { |
871 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 869 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
872 | 870 |
873 // Keep track if this is a MSE or non-MSE playback. | 871 // Keep track if this is a MSE or non-MSE playback. |
874 UMA_HISTOGRAM_BOOLEAN("Media.MSE.Playback", | 872 UMA_HISTOGRAM_BOOLEAN("Media.MSE.Playback", |
875 (load_type_ == LoadTypeMediaSource)); | 873 (load_type_ == LoadTypeMediaSource)); |
876 | 874 |
877 media::LogCB mse_log_cb; | 875 LogCB mse_log_cb; |
878 media::Demuxer::NeedKeyCB need_key_cb = | 876 Demuxer::NeedKeyCB need_key_cb = |
879 encrypted_media_support_->CreateNeedKeyCB(); | 877 encrypted_media_support_->CreateNeedKeyCB(); |
880 | 878 |
881 // Figure out which demuxer to use. | 879 // Figure out which demuxer to use. |
882 if (load_type_ != LoadTypeMediaSource) { | 880 if (load_type_ != LoadTypeMediaSource) { |
883 DCHECK(!chunk_demuxer_); | 881 DCHECK(!chunk_demuxer_); |
884 DCHECK(data_source_); | 882 DCHECK(data_source_); |
885 | 883 |
886 demuxer_.reset(new media::FFmpegDemuxer( | 884 demuxer_.reset(new FFmpegDemuxer( |
887 media_task_runner_, data_source_.get(), | 885 media_task_runner_, data_source_.get(), |
888 need_key_cb, | 886 need_key_cb, |
889 media_log_)); | 887 media_log_)); |
890 } else { | 888 } else { |
891 DCHECK(!chunk_demuxer_); | 889 DCHECK(!chunk_demuxer_); |
892 DCHECK(!data_source_); | 890 DCHECK(!data_source_); |
893 | 891 |
894 mse_log_cb = base::Bind(&LogMediaSourceError, media_log_); | 892 mse_log_cb = base::Bind(&LogMediaSourceError, media_log_); |
895 | 893 |
896 chunk_demuxer_ = new media::ChunkDemuxer( | 894 chunk_demuxer_ = new ChunkDemuxer( |
897 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnDemuxerOpened), | 895 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnDemuxerOpened), |
898 need_key_cb, | 896 need_key_cb, |
899 mse_log_cb, | 897 mse_log_cb, |
900 true); | 898 true); |
901 demuxer_.reset(chunk_demuxer_); | 899 demuxer_.reset(chunk_demuxer_); |
902 } | 900 } |
903 | 901 |
904 // ... and we're ready to go! | 902 // ... and we're ready to go! |
905 seeking_ = true; | 903 seeking_ = true; |
906 pipeline_.Start( | 904 pipeline_.Start( |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
939 | 937 |
940 blink::WebAudioSourceProvider* WebMediaPlayerImpl::audioSourceProvider() { | 938 blink::WebAudioSourceProvider* WebMediaPlayerImpl::audioSourceProvider() { |
941 return audio_source_provider_.get(); | 939 return audio_source_provider_.get(); |
942 } | 940 } |
943 | 941 |
944 double WebMediaPlayerImpl::GetPipelineDuration() const { | 942 double WebMediaPlayerImpl::GetPipelineDuration() const { |
945 base::TimeDelta duration = pipeline_.GetMediaDuration(); | 943 base::TimeDelta duration = pipeline_.GetMediaDuration(); |
946 | 944 |
947 // Return positive infinity if the resource is unbounded. | 945 // Return positive infinity if the resource is unbounded. |
948 // http://www.whatwg.org/specs/web-apps/current-work/multipage/video.html#dom-
media-duration | 946 // http://www.whatwg.org/specs/web-apps/current-work/multipage/video.html#dom-
media-duration |
949 if (duration == media::kInfiniteDuration()) | 947 if (duration == kInfiniteDuration()) |
950 return std::numeric_limits<double>::infinity(); | 948 return std::numeric_limits<double>::infinity(); |
951 | 949 |
952 return duration.InSecondsF(); | 950 return duration.InSecondsF(); |
953 } | 951 } |
954 | 952 |
955 void WebMediaPlayerImpl::OnDurationChanged() { | 953 void WebMediaPlayerImpl::OnDurationChanged() { |
956 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) | 954 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) |
957 return; | 955 return; |
958 | 956 |
959 client_->durationChanged(); | 957 client_->durationChanged(); |
(...skipping 14 matching lines...) Expand all Loading... |
974 void WebMediaPlayerImpl::OnOpacityChanged(bool opaque) { | 972 void WebMediaPlayerImpl::OnOpacityChanged(bool opaque) { |
975 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 973 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
976 DCHECK_NE(ready_state_, WebMediaPlayer::ReadyStateHaveNothing); | 974 DCHECK_NE(ready_state_, WebMediaPlayer::ReadyStateHaveNothing); |
977 | 975 |
978 opaque_ = opaque; | 976 opaque_ = opaque; |
979 if (video_weblayer_) | 977 if (video_weblayer_) |
980 video_weblayer_->setOpaque(opaque_); | 978 video_weblayer_->setOpaque(opaque_); |
981 } | 979 } |
982 | 980 |
983 void WebMediaPlayerImpl::FrameReady( | 981 void WebMediaPlayerImpl::FrameReady( |
984 const scoped_refptr<media::VideoFrame>& frame) { | 982 const scoped_refptr<VideoFrame>& frame) { |
985 compositor_task_runner_->PostTask( | 983 compositor_task_runner_->PostTask( |
986 FROM_HERE, | 984 FROM_HERE, |
987 base::Bind(&VideoFrameCompositor::UpdateCurrentFrame, | 985 base::Bind(&VideoFrameCompositor::UpdateCurrentFrame, |
988 base::Unretained(compositor_), | 986 base::Unretained(compositor_), |
989 frame)); | 987 frame)); |
990 } | 988 } |
991 | 989 |
992 static void GetCurrentFrameAndSignal( | 990 static void GetCurrentFrameAndSignal( |
993 VideoFrameCompositor* compositor, | 991 VideoFrameCompositor* compositor, |
994 scoped_refptr<media::VideoFrame>* video_frame_out, | 992 scoped_refptr<VideoFrame>* video_frame_out, |
995 base::WaitableEvent* event) { | 993 base::WaitableEvent* event) { |
996 TRACE_EVENT0("media", "GetCurrentFrameAndSignal"); | 994 TRACE_EVENT0("media", "GetCurrentFrameAndSignal"); |
997 *video_frame_out = compositor->GetCurrentFrame(); | 995 *video_frame_out = compositor->GetCurrentFrame(); |
998 event->Signal(); | 996 event->Signal(); |
999 } | 997 } |
1000 | 998 |
1001 scoped_refptr<media::VideoFrame> | 999 scoped_refptr<VideoFrame> |
1002 WebMediaPlayerImpl::GetCurrentFrameFromCompositor() { | 1000 WebMediaPlayerImpl::GetCurrentFrameFromCompositor() { |
1003 TRACE_EVENT0("media", "WebMediaPlayerImpl::GetCurrentFrameFromCompositor"); | 1001 TRACE_EVENT0("media", "WebMediaPlayerImpl::GetCurrentFrameFromCompositor"); |
1004 if (compositor_task_runner_->BelongsToCurrentThread()) | 1002 if (compositor_task_runner_->BelongsToCurrentThread()) |
1005 return compositor_->GetCurrentFrame(); | 1003 return compositor_->GetCurrentFrame(); |
1006 | 1004 |
1007 // Use a posted task and waitable event instead of a lock otherwise | 1005 // Use a posted task and waitable event instead of a lock otherwise |
1008 // WebGL/Canvas can see different content than what the compositor is seeing. | 1006 // WebGL/Canvas can see different content than what the compositor is seeing. |
1009 scoped_refptr<media::VideoFrame> video_frame; | 1007 scoped_refptr<VideoFrame> video_frame; |
1010 base::WaitableEvent event(false, false); | 1008 base::WaitableEvent event(false, false); |
1011 compositor_task_runner_->PostTask(FROM_HERE, | 1009 compositor_task_runner_->PostTask(FROM_HERE, |
1012 base::Bind(&GetCurrentFrameAndSignal, | 1010 base::Bind(&GetCurrentFrameAndSignal, |
1013 base::Unretained(compositor_), | 1011 base::Unretained(compositor_), |
1014 &video_frame, | 1012 &video_frame, |
1015 &event)); | 1013 &event)); |
1016 event.Wait(); | 1014 event.Wait(); |
1017 return video_frame; | 1015 return video_frame; |
1018 } | 1016 } |
1019 | 1017 |
1020 } // namespace content | 1018 } // namespace media |
OLD | NEW |