OLD | NEW |
| (Empty) |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/renderer/media/webmediaplayer_impl.h" | |
6 | |
7 #include <algorithm> | |
8 #include <limits> | |
9 #include <string> | |
10 #include <vector> | |
11 | |
12 #include "base/bind.h" | |
13 #include "base/callback.h" | |
14 #include "base/callback_helpers.h" | |
15 #include "base/debug/alias.h" | |
16 #include "base/debug/crash_logging.h" | |
17 #include "base/debug/trace_event.h" | |
18 #include "base/message_loop/message_loop_proxy.h" | |
19 #include "base/metrics/histogram.h" | |
20 #include "base/single_thread_task_runner.h" | |
21 #include "base/synchronization/waitable_event.h" | |
22 #include "cc/blink/web_layer_impl.h" | |
23 #include "cc/layers/video_layer.h" | |
24 #include "content/renderer/media/buffered_data_source.h" | |
25 #include "content/renderer/media/crypto/encrypted_media_player_support.h" | |
26 #include "content/renderer/media/texttrack_impl.h" | |
27 #include "content/renderer/media/webaudiosourceprovider_impl.h" | |
28 #include "content/renderer/media/webinbandtexttrack_impl.h" | |
29 #include "content/renderer/media/webmediaplayer_delegate.h" | |
30 #include "content/renderer/media/webmediaplayer_params.h" | |
31 #include "content/renderer/media/webmediaplayer_util.h" | |
32 #include "content/renderer/media/webmediasource_impl.h" | |
33 #include "gpu/GLES2/gl2extchromium.h" | |
34 #include "gpu/command_buffer/common/mailbox_holder.h" | |
35 #include "media/audio/null_audio_sink.h" | |
36 #include "media/base/audio_hardware_config.h" | |
37 #include "media/base/bind_to_current_loop.h" | |
38 #include "media/base/limits.h" | |
39 #include "media/base/media_log.h" | |
40 #include "media/base/pipeline.h" | |
41 #include "media/base/text_renderer.h" | |
42 #include "media/base/video_frame.h" | |
43 #include "media/filters/audio_renderer_impl.h" | |
44 #include "media/filters/chunk_demuxer.h" | |
45 #include "media/filters/ffmpeg_audio_decoder.h" | |
46 #include "media/filters/ffmpeg_demuxer.h" | |
47 #include "media/filters/ffmpeg_video_decoder.h" | |
48 #include "media/filters/gpu_video_accelerator_factories.h" | |
49 #include "media/filters/gpu_video_decoder.h" | |
50 #include "media/filters/opus_audio_decoder.h" | |
51 #include "media/filters/renderer_impl.h" | |
52 #include "media/filters/video_renderer_impl.h" | |
53 #include "media/filters/vpx_video_decoder.h" | |
54 #include "third_party/WebKit/public/platform/WebMediaSource.h" | |
55 #include "third_party/WebKit/public/platform/WebRect.h" | |
56 #include "third_party/WebKit/public/platform/WebSize.h" | |
57 #include "third_party/WebKit/public/platform/WebString.h" | |
58 #include "third_party/WebKit/public/platform/WebURL.h" | |
59 #include "third_party/WebKit/public/web/WebLocalFrame.h" | |
60 #include "third_party/WebKit/public/web/WebSecurityOrigin.h" | |
61 #include "third_party/WebKit/public/web/WebView.h" | |
62 | |
63 using blink::WebCanvas; | |
64 using blink::WebMediaPlayer; | |
65 using blink::WebRect; | |
66 using blink::WebSize; | |
67 using blink::WebString; | |
68 using media::PipelineStatus; | |
69 | |
70 namespace { | |
71 | |
72 // Limits the range of playback rate. | |
73 // | |
74 // TODO(kylep): Revisit these. | |
75 // | |
76 // Vista has substantially lower performance than XP or Windows7. If you speed | |
77 // up a video too much, it can't keep up, and rendering stops updating except on | |
78 // the time bar. For really high speeds, audio becomes a bottleneck and we just | |
79 // use up the data we have, which may not achieve the speed requested, but will | |
80 // not crash the tab. | |
81 // | |
82 // A very slow speed, ie 0.00000001x, causes the machine to lock up. (It seems | |
83 // like a busy loop). It gets unresponsive, although its not completely dead. | |
84 // | |
85 // Also our timers are not very accurate (especially for ogg), which becomes | |
86 // evident at low speeds and on Vista. Since other speeds are risky and outside | |
87 // the norms, we think 1/16x to 16x is a safe and useful range for now. | |
88 const double kMinRate = 0.0625; | |
89 const double kMaxRate = 16.0; | |
90 | |
91 class SyncPointClientImpl : public media::VideoFrame::SyncPointClient { | |
92 public: | |
93 explicit SyncPointClientImpl( | |
94 blink::WebGraphicsContext3D* web_graphics_context) | |
95 : web_graphics_context_(web_graphics_context) {} | |
96 virtual ~SyncPointClientImpl() {} | |
97 virtual uint32 InsertSyncPoint() OVERRIDE { | |
98 return web_graphics_context_->insertSyncPoint(); | |
99 } | |
100 virtual void WaitSyncPoint(uint32 sync_point) OVERRIDE { | |
101 web_graphics_context_->waitSyncPoint(sync_point); | |
102 } | |
103 | |
104 private: | |
105 blink::WebGraphicsContext3D* web_graphics_context_; | |
106 }; | |
107 | |
108 } // namespace | |
109 | |
110 namespace content { | |
111 | |
112 class BufferedDataSourceHostImpl; | |
113 | |
114 #define COMPILE_ASSERT_MATCHING_ENUM(name) \ | |
115 COMPILE_ASSERT(static_cast<int>(WebMediaPlayer::CORSMode ## name) == \ | |
116 static_cast<int>(BufferedResourceLoader::k ## name), \ | |
117 mismatching_enums) | |
118 COMPILE_ASSERT_MATCHING_ENUM(Unspecified); | |
119 COMPILE_ASSERT_MATCHING_ENUM(Anonymous); | |
120 COMPILE_ASSERT_MATCHING_ENUM(UseCredentials); | |
121 #undef COMPILE_ASSERT_MATCHING_ENUM | |
122 | |
123 #define BIND_TO_RENDER_LOOP(function) \ | |
124 (DCHECK(main_task_runner_->BelongsToCurrentThread()), \ | |
125 media::BindToCurrentLoop(base::Bind(function, AsWeakPtr()))) | |
126 | |
127 #define BIND_TO_RENDER_LOOP1(function, arg1) \ | |
128 (DCHECK(main_task_runner_->BelongsToCurrentThread()), \ | |
129 media::BindToCurrentLoop(base::Bind(function, AsWeakPtr(), arg1))) | |
130 | |
131 static void LogMediaSourceError(const scoped_refptr<media::MediaLog>& media_log, | |
132 const std::string& error) { | |
133 media_log->AddEvent(media_log->CreateMediaSourceErrorEvent(error)); | |
134 } | |
135 | |
136 WebMediaPlayerImpl::WebMediaPlayerImpl( | |
137 blink::WebLocalFrame* frame, | |
138 blink::WebMediaPlayerClient* client, | |
139 base::WeakPtr<WebMediaPlayerDelegate> delegate, | |
140 const WebMediaPlayerParams& params) | |
141 : frame_(frame), | |
142 network_state_(WebMediaPlayer::NetworkStateEmpty), | |
143 ready_state_(WebMediaPlayer::ReadyStateHaveNothing), | |
144 preload_(BufferedDataSource::AUTO), | |
145 main_task_runner_(base::MessageLoopProxy::current()), | |
146 media_task_runner_(params.media_task_runner()), | |
147 media_log_(params.media_log()), | |
148 pipeline_(media_task_runner_, media_log_.get()), | |
149 load_type_(LoadTypeURL), | |
150 opaque_(false), | |
151 paused_(true), | |
152 seeking_(false), | |
153 playback_rate_(0.0f), | |
154 ended_(false), | |
155 pending_seek_(false), | |
156 pending_seek_seconds_(0.0f), | |
157 should_notify_time_changed_(false), | |
158 client_(client), | |
159 delegate_(delegate), | |
160 defer_load_cb_(params.defer_load_cb()), | |
161 gpu_factories_(params.gpu_factories()), | |
162 supports_save_(true), | |
163 chunk_demuxer_(NULL), | |
164 compositor_task_runner_(params.compositor_task_runner()), | |
165 compositor_(new VideoFrameCompositor( | |
166 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnNaturalSizeChanged), | |
167 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnOpacityChanged))), | |
168 text_track_index_(0), | |
169 encrypted_media_support_( | |
170 params.CreateEncryptedMediaPlayerSupport(client)), | |
171 audio_hardware_config_(params.audio_hardware_config()) { | |
172 DCHECK(encrypted_media_support_); | |
173 | |
174 // Threaded compositing isn't enabled universally yet. | |
175 if (!compositor_task_runner_.get()) | |
176 compositor_task_runner_ = base::MessageLoopProxy::current(); | |
177 | |
178 media_log_->AddEvent( | |
179 media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_CREATED)); | |
180 | |
181 // |gpu_factories_| requires that its entry points be called on its | |
182 // |GetTaskRunner()|. Since |pipeline_| will own decoders created from the | |
183 // factories, require that their message loops are identical. | |
184 DCHECK(!gpu_factories_.get() || | |
185 (gpu_factories_->GetTaskRunner() == media_task_runner_.get())); | |
186 | |
187 // Use the null sink if no sink was provided. | |
188 audio_source_provider_ = new WebAudioSourceProviderImpl( | |
189 params.audio_renderer_sink().get() | |
190 ? params.audio_renderer_sink() | |
191 : new media::NullAudioSink(media_task_runner_)); | |
192 } | |
193 | |
194 WebMediaPlayerImpl::~WebMediaPlayerImpl() { | |
195 client_->setWebLayer(NULL); | |
196 | |
197 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
198 media_log_->AddEvent( | |
199 media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_DESTROYED)); | |
200 | |
201 if (delegate_.get()) | |
202 delegate_->PlayerGone(this); | |
203 | |
204 // Abort any pending IO so stopping the pipeline doesn't get blocked. | |
205 if (data_source_) | |
206 data_source_->Abort(); | |
207 if (chunk_demuxer_) { | |
208 chunk_demuxer_->Shutdown(); | |
209 chunk_demuxer_ = NULL; | |
210 } | |
211 | |
212 gpu_factories_ = NULL; | |
213 | |
214 // Make sure to kill the pipeline so there's no more media threads running. | |
215 // Note: stopping the pipeline might block for a long time. | |
216 base::WaitableEvent waiter(false, false); | |
217 pipeline_.Stop( | |
218 base::Bind(&base::WaitableEvent::Signal, base::Unretained(&waiter))); | |
219 waiter.Wait(); | |
220 | |
221 compositor_task_runner_->DeleteSoon(FROM_HERE, compositor_); | |
222 } | |
223 | |
224 void WebMediaPlayerImpl::load(LoadType load_type, const blink::WebURL& url, | |
225 CORSMode cors_mode) { | |
226 DVLOG(1) << __FUNCTION__ << "(" << load_type << ", " << url << ", " | |
227 << cors_mode << ")"; | |
228 if (!defer_load_cb_.is_null()) { | |
229 defer_load_cb_.Run(base::Bind( | |
230 &WebMediaPlayerImpl::DoLoad, AsWeakPtr(), load_type, url, cors_mode)); | |
231 return; | |
232 } | |
233 DoLoad(load_type, url, cors_mode); | |
234 } | |
235 | |
236 void WebMediaPlayerImpl::DoLoad(LoadType load_type, | |
237 const blink::WebURL& url, | |
238 CORSMode cors_mode) { | |
239 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
240 | |
241 GURL gurl(url); | |
242 ReportMediaSchemeUma(gurl); | |
243 | |
244 // Set subresource URL for crash reporting. | |
245 base::debug::SetCrashKeyValue("subresource_url", gurl.spec()); | |
246 | |
247 load_type_ = load_type; | |
248 | |
249 SetNetworkState(WebMediaPlayer::NetworkStateLoading); | |
250 SetReadyState(WebMediaPlayer::ReadyStateHaveNothing); | |
251 media_log_->AddEvent(media_log_->CreateLoadEvent(url.spec())); | |
252 | |
253 // Media source pipelines can start immediately. | |
254 if (load_type == LoadTypeMediaSource) { | |
255 supports_save_ = false; | |
256 StartPipeline(); | |
257 return; | |
258 } | |
259 | |
260 // Otherwise it's a regular request which requires resolving the URL first. | |
261 data_source_.reset(new BufferedDataSource( | |
262 url, | |
263 static_cast<BufferedResourceLoader::CORSMode>(cors_mode), | |
264 main_task_runner_, | |
265 frame_, | |
266 media_log_.get(), | |
267 &buffered_data_source_host_, | |
268 base::Bind(&WebMediaPlayerImpl::NotifyDownloading, AsWeakPtr()))); | |
269 data_source_->Initialize( | |
270 base::Bind(&WebMediaPlayerImpl::DataSourceInitialized, AsWeakPtr())); | |
271 data_source_->SetPreload(preload_); | |
272 } | |
273 | |
274 void WebMediaPlayerImpl::play() { | |
275 DVLOG(1) << __FUNCTION__; | |
276 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
277 | |
278 paused_ = false; | |
279 pipeline_.SetPlaybackRate(playback_rate_); | |
280 if (data_source_) | |
281 data_source_->MediaIsPlaying(); | |
282 | |
283 media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PLAY)); | |
284 | |
285 if (delegate_.get()) | |
286 delegate_->DidPlay(this); | |
287 } | |
288 | |
289 void WebMediaPlayerImpl::pause() { | |
290 DVLOG(1) << __FUNCTION__; | |
291 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
292 | |
293 paused_ = true; | |
294 pipeline_.SetPlaybackRate(0.0f); | |
295 if (data_source_) | |
296 data_source_->MediaIsPaused(); | |
297 paused_time_ = pipeline_.GetMediaTime(); | |
298 | |
299 media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PAUSE)); | |
300 | |
301 if (delegate_.get()) | |
302 delegate_->DidPause(this); | |
303 } | |
304 | |
305 bool WebMediaPlayerImpl::supportsSave() const { | |
306 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
307 return supports_save_; | |
308 } | |
309 | |
310 void WebMediaPlayerImpl::seek(double seconds) { | |
311 DVLOG(1) << __FUNCTION__ << "(" << seconds << ")"; | |
312 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
313 | |
314 ended_ = false; | |
315 | |
316 if (ready_state_ > WebMediaPlayer::ReadyStateHaveMetadata) | |
317 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata); | |
318 | |
319 base::TimeDelta seek_time = ConvertSecondsToTimestamp(seconds); | |
320 | |
321 if (seeking_) { | |
322 pending_seek_ = true; | |
323 pending_seek_seconds_ = seconds; | |
324 if (chunk_demuxer_) | |
325 chunk_demuxer_->CancelPendingSeek(seek_time); | |
326 return; | |
327 } | |
328 | |
329 media_log_->AddEvent(media_log_->CreateSeekEvent(seconds)); | |
330 | |
331 // Update our paused time. | |
332 if (paused_) | |
333 paused_time_ = seek_time; | |
334 | |
335 seeking_ = true; | |
336 | |
337 if (chunk_demuxer_) | |
338 chunk_demuxer_->StartWaitingForSeek(seek_time); | |
339 | |
340 // Kick off the asynchronous seek! | |
341 pipeline_.Seek( | |
342 seek_time, | |
343 BIND_TO_RENDER_LOOP1(&WebMediaPlayerImpl::OnPipelineSeeked, true)); | |
344 } | |
345 | |
346 void WebMediaPlayerImpl::setRate(double rate) { | |
347 DVLOG(1) << __FUNCTION__ << "(" << rate << ")"; | |
348 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
349 | |
350 // TODO(kylep): Remove when support for negatives is added. Also, modify the | |
351 // following checks so rewind uses reasonable values also. | |
352 if (rate < 0.0) | |
353 return; | |
354 | |
355 // Limit rates to reasonable values by clamping. | |
356 if (rate != 0.0) { | |
357 if (rate < kMinRate) | |
358 rate = kMinRate; | |
359 else if (rate > kMaxRate) | |
360 rate = kMaxRate; | |
361 } | |
362 | |
363 playback_rate_ = rate; | |
364 if (!paused_) { | |
365 pipeline_.SetPlaybackRate(rate); | |
366 if (data_source_) | |
367 data_source_->MediaPlaybackRateChanged(rate); | |
368 } | |
369 } | |
370 | |
371 void WebMediaPlayerImpl::setVolume(double volume) { | |
372 DVLOG(1) << __FUNCTION__ << "(" << volume << ")"; | |
373 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
374 | |
375 pipeline_.SetVolume(volume); | |
376 } | |
377 | |
378 #define COMPILE_ASSERT_MATCHING_ENUM(webkit_name, chromium_name) \ | |
379 COMPILE_ASSERT(static_cast<int>(WebMediaPlayer::webkit_name) == \ | |
380 static_cast<int>(BufferedDataSource::chromium_name), \ | |
381 mismatching_enums) | |
382 COMPILE_ASSERT_MATCHING_ENUM(PreloadNone, NONE); | |
383 COMPILE_ASSERT_MATCHING_ENUM(PreloadMetaData, METADATA); | |
384 COMPILE_ASSERT_MATCHING_ENUM(PreloadAuto, AUTO); | |
385 #undef COMPILE_ASSERT_MATCHING_ENUM | |
386 | |
387 void WebMediaPlayerImpl::setPreload(WebMediaPlayer::Preload preload) { | |
388 DVLOG(1) << __FUNCTION__ << "(" << preload << ")"; | |
389 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
390 | |
391 preload_ = static_cast<BufferedDataSource::Preload>(preload); | |
392 if (data_source_) | |
393 data_source_->SetPreload(preload_); | |
394 } | |
395 | |
396 bool WebMediaPlayerImpl::hasVideo() const { | |
397 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
398 | |
399 return pipeline_metadata_.has_video; | |
400 } | |
401 | |
402 bool WebMediaPlayerImpl::hasAudio() const { | |
403 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
404 | |
405 return pipeline_metadata_.has_audio; | |
406 } | |
407 | |
408 blink::WebSize WebMediaPlayerImpl::naturalSize() const { | |
409 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
410 | |
411 return blink::WebSize(pipeline_metadata_.natural_size); | |
412 } | |
413 | |
414 bool WebMediaPlayerImpl::paused() const { | |
415 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
416 | |
417 return pipeline_.GetPlaybackRate() == 0.0f; | |
418 } | |
419 | |
420 bool WebMediaPlayerImpl::seeking() const { | |
421 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
422 | |
423 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) | |
424 return false; | |
425 | |
426 return seeking_; | |
427 } | |
428 | |
429 double WebMediaPlayerImpl::duration() const { | |
430 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
431 | |
432 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) | |
433 return std::numeric_limits<double>::quiet_NaN(); | |
434 | |
435 return GetPipelineDuration(); | |
436 } | |
437 | |
438 double WebMediaPlayerImpl::timelineOffset() const { | |
439 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
440 | |
441 if (pipeline_metadata_.timeline_offset.is_null()) | |
442 return std::numeric_limits<double>::quiet_NaN(); | |
443 | |
444 return pipeline_metadata_.timeline_offset.ToJsTime(); | |
445 } | |
446 | |
447 double WebMediaPlayerImpl::currentTime() const { | |
448 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
449 DCHECK_NE(ready_state_, WebMediaPlayer::ReadyStateHaveNothing); | |
450 | |
451 // TODO(scherkus): Replace with an explicit ended signal to HTMLMediaElement, | |
452 // see http://crbug.com/409280 | |
453 if (ended_) | |
454 return duration(); | |
455 | |
456 return (paused_ ? paused_time_ : pipeline_.GetMediaTime()).InSecondsF(); | |
457 } | |
458 | |
459 WebMediaPlayer::NetworkState WebMediaPlayerImpl::networkState() const { | |
460 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
461 return network_state_; | |
462 } | |
463 | |
464 WebMediaPlayer::ReadyState WebMediaPlayerImpl::readyState() const { | |
465 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
466 return ready_state_; | |
467 } | |
468 | |
469 blink::WebTimeRanges WebMediaPlayerImpl::buffered() const { | |
470 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
471 | |
472 media::Ranges<base::TimeDelta> buffered_time_ranges = | |
473 pipeline_.GetBufferedTimeRanges(); | |
474 | |
475 const base::TimeDelta duration = pipeline_.GetMediaDuration(); | |
476 if (duration != media::kInfiniteDuration()) { | |
477 buffered_data_source_host_.AddBufferedTimeRanges( | |
478 &buffered_time_ranges, duration); | |
479 } | |
480 return ConvertToWebTimeRanges(buffered_time_ranges); | |
481 } | |
482 | |
483 double WebMediaPlayerImpl::maxTimeSeekable() const { | |
484 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
485 | |
486 // If we haven't even gotten to ReadyStateHaveMetadata yet then just | |
487 // return 0 so that the seekable range is empty. | |
488 if (ready_state_ < WebMediaPlayer::ReadyStateHaveMetadata) | |
489 return 0.0; | |
490 | |
491 // We don't support seeking in streaming media. | |
492 if (data_source_ && data_source_->IsStreaming()) | |
493 return 0.0; | |
494 return duration(); | |
495 } | |
496 | |
497 bool WebMediaPlayerImpl::didLoadingProgress() { | |
498 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
499 bool pipeline_progress = pipeline_.DidLoadingProgress(); | |
500 bool data_progress = buffered_data_source_host_.DidLoadingProgress(); | |
501 return pipeline_progress || data_progress; | |
502 } | |
503 | |
504 void WebMediaPlayerImpl::paint(blink::WebCanvas* canvas, | |
505 const blink::WebRect& rect, | |
506 unsigned char alpha) { | |
507 paint(canvas, rect, alpha, SkXfermode::kSrcOver_Mode); | |
508 } | |
509 | |
510 void WebMediaPlayerImpl::paint(blink::WebCanvas* canvas, | |
511 const blink::WebRect& rect, | |
512 unsigned char alpha, | |
513 SkXfermode::Mode mode) { | |
514 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
515 TRACE_EVENT0("media", "WebMediaPlayerImpl:paint"); | |
516 | |
517 // TODO(scherkus): Clarify paint() API contract to better understand when and | |
518 // why it's being called. For example, today paint() is called when: | |
519 // - We haven't reached HAVE_CURRENT_DATA and need to paint black | |
520 // - We're painting to a canvas | |
521 // See http://crbug.com/341225 http://crbug.com/342621 for details. | |
522 scoped_refptr<media::VideoFrame> video_frame = | |
523 GetCurrentFrameFromCompositor(); | |
524 | |
525 gfx::Rect gfx_rect(rect); | |
526 | |
527 skcanvas_video_renderer_.Paint(video_frame.get(), | |
528 canvas, | |
529 gfx_rect, | |
530 alpha, | |
531 mode, | |
532 pipeline_metadata_.video_rotation); | |
533 } | |
534 | |
535 bool WebMediaPlayerImpl::hasSingleSecurityOrigin() const { | |
536 if (data_source_) | |
537 return data_source_->HasSingleOrigin(); | |
538 return true; | |
539 } | |
540 | |
541 bool WebMediaPlayerImpl::didPassCORSAccessCheck() const { | |
542 if (data_source_) | |
543 return data_source_->DidPassCORSAccessCheck(); | |
544 return false; | |
545 } | |
546 | |
547 double WebMediaPlayerImpl::mediaTimeForTimeValue(double timeValue) const { | |
548 return ConvertSecondsToTimestamp(timeValue).InSecondsF(); | |
549 } | |
550 | |
551 unsigned WebMediaPlayerImpl::decodedFrameCount() const { | |
552 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
553 | |
554 media::PipelineStatistics stats = pipeline_.GetStatistics(); | |
555 return stats.video_frames_decoded; | |
556 } | |
557 | |
558 unsigned WebMediaPlayerImpl::droppedFrameCount() const { | |
559 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
560 | |
561 media::PipelineStatistics stats = pipeline_.GetStatistics(); | |
562 return stats.video_frames_dropped; | |
563 } | |
564 | |
565 unsigned WebMediaPlayerImpl::audioDecodedByteCount() const { | |
566 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
567 | |
568 media::PipelineStatistics stats = pipeline_.GetStatistics(); | |
569 return stats.audio_bytes_decoded; | |
570 } | |
571 | |
572 unsigned WebMediaPlayerImpl::videoDecodedByteCount() const { | |
573 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
574 | |
575 media::PipelineStatistics stats = pipeline_.GetStatistics(); | |
576 return stats.video_bytes_decoded; | |
577 } | |
578 | |
579 bool WebMediaPlayerImpl::copyVideoTextureToPlatformTexture( | |
580 blink::WebGraphicsContext3D* web_graphics_context, | |
581 unsigned int texture, | |
582 unsigned int level, | |
583 unsigned int internal_format, | |
584 unsigned int type, | |
585 bool premultiply_alpha, | |
586 bool flip_y) { | |
587 TRACE_EVENT0("media", "WebMediaPlayerImpl:copyVideoTextureToPlatformTexture"); | |
588 | |
589 scoped_refptr<media::VideoFrame> video_frame = | |
590 GetCurrentFrameFromCompositor(); | |
591 | |
592 if (!video_frame.get()) | |
593 return false; | |
594 if (video_frame->format() != media::VideoFrame::NATIVE_TEXTURE) | |
595 return false; | |
596 | |
597 const gpu::MailboxHolder* mailbox_holder = video_frame->mailbox_holder(); | |
598 if (mailbox_holder->texture_target != GL_TEXTURE_2D) | |
599 return false; | |
600 | |
601 web_graphics_context->waitSyncPoint(mailbox_holder->sync_point); | |
602 uint32 source_texture = web_graphics_context->createAndConsumeTextureCHROMIUM( | |
603 GL_TEXTURE_2D, mailbox_holder->mailbox.name); | |
604 | |
605 // The video is stored in a unmultiplied format, so premultiply | |
606 // if necessary. | |
607 web_graphics_context->pixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM, | |
608 premultiply_alpha); | |
609 // Application itself needs to take care of setting the right flip_y | |
610 // value down to get the expected result. | |
611 // flip_y==true means to reverse the video orientation while | |
612 // flip_y==false means to keep the intrinsic orientation. | |
613 web_graphics_context->pixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, flip_y); | |
614 web_graphics_context->copyTextureCHROMIUM(GL_TEXTURE_2D, | |
615 source_texture, | |
616 texture, | |
617 level, | |
618 internal_format, | |
619 type); | |
620 web_graphics_context->pixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, false); | |
621 web_graphics_context->pixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM, | |
622 false); | |
623 | |
624 web_graphics_context->deleteTexture(source_texture); | |
625 web_graphics_context->flush(); | |
626 | |
627 SyncPointClientImpl client(web_graphics_context); | |
628 video_frame->UpdateReleaseSyncPoint(&client); | |
629 return true; | |
630 } | |
631 | |
632 WebMediaPlayer::MediaKeyException | |
633 WebMediaPlayerImpl::generateKeyRequest(const WebString& key_system, | |
634 const unsigned char* init_data, | |
635 unsigned init_data_length) { | |
636 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
637 | |
638 return encrypted_media_support_->GenerateKeyRequest( | |
639 frame_, key_system, init_data, init_data_length); | |
640 } | |
641 | |
642 WebMediaPlayer::MediaKeyException WebMediaPlayerImpl::addKey( | |
643 const WebString& key_system, | |
644 const unsigned char* key, | |
645 unsigned key_length, | |
646 const unsigned char* init_data, | |
647 unsigned init_data_length, | |
648 const WebString& session_id) { | |
649 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
650 | |
651 return encrypted_media_support_->AddKey( | |
652 key_system, key, key_length, init_data, init_data_length, session_id); | |
653 } | |
654 | |
655 WebMediaPlayer::MediaKeyException WebMediaPlayerImpl::cancelKeyRequest( | |
656 const WebString& key_system, | |
657 const WebString& session_id) { | |
658 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
659 | |
660 return encrypted_media_support_->CancelKeyRequest(key_system, session_id); | |
661 } | |
662 | |
663 void WebMediaPlayerImpl::setContentDecryptionModule( | |
664 blink::WebContentDecryptionModule* cdm) { | |
665 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
666 | |
667 encrypted_media_support_->SetContentDecryptionModule(cdm); | |
668 } | |
669 | |
670 void WebMediaPlayerImpl::setContentDecryptionModule( | |
671 blink::WebContentDecryptionModule* cdm, | |
672 blink::WebContentDecryptionModuleResult result) { | |
673 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
674 | |
675 encrypted_media_support_->SetContentDecryptionModule(cdm, result); | |
676 } | |
677 | |
678 void WebMediaPlayerImpl::setContentDecryptionModuleSync( | |
679 blink::WebContentDecryptionModule* cdm) { | |
680 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
681 | |
682 encrypted_media_support_->SetContentDecryptionModuleSync(cdm); | |
683 } | |
684 | |
685 void WebMediaPlayerImpl::OnPipelineSeeked(bool time_changed, | |
686 PipelineStatus status) { | |
687 DVLOG(1) << __FUNCTION__ << "(" << time_changed << ", " << status << ")"; | |
688 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
689 seeking_ = false; | |
690 if (pending_seek_) { | |
691 pending_seek_ = false; | |
692 seek(pending_seek_seconds_); | |
693 return; | |
694 } | |
695 | |
696 if (status != media::PIPELINE_OK) { | |
697 OnPipelineError(status); | |
698 return; | |
699 } | |
700 | |
701 // Update our paused time. | |
702 if (paused_) | |
703 paused_time_ = pipeline_.GetMediaTime(); | |
704 | |
705 should_notify_time_changed_ = time_changed; | |
706 } | |
707 | |
708 void WebMediaPlayerImpl::OnPipelineEnded() { | |
709 DVLOG(1) << __FUNCTION__; | |
710 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
711 | |
712 // Ignore state changes until we've completed all outstanding seeks. | |
713 if (seeking_ || pending_seek_) | |
714 return; | |
715 | |
716 ended_ = true; | |
717 client_->timeChanged(); | |
718 } | |
719 | |
720 void WebMediaPlayerImpl::OnPipelineError(PipelineStatus error) { | |
721 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
722 DCHECK_NE(error, media::PIPELINE_OK); | |
723 | |
724 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) { | |
725 // Any error that occurs before reaching ReadyStateHaveMetadata should | |
726 // be considered a format error. | |
727 SetNetworkState(WebMediaPlayer::NetworkStateFormatError); | |
728 return; | |
729 } | |
730 | |
731 SetNetworkState(PipelineErrorToNetworkState(error)); | |
732 | |
733 if (error == media::PIPELINE_ERROR_DECRYPT) | |
734 encrypted_media_support_->OnPipelineDecryptError(); | |
735 } | |
736 | |
737 void WebMediaPlayerImpl::OnPipelineMetadata( | |
738 media::PipelineMetadata metadata) { | |
739 DVLOG(1) << __FUNCTION__; | |
740 | |
741 pipeline_metadata_ = metadata; | |
742 | |
743 UMA_HISTOGRAM_ENUMERATION("Media.VideoRotation", | |
744 metadata.video_rotation, | |
745 media::VIDEO_ROTATION_MAX + 1); | |
746 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata); | |
747 | |
748 if (hasVideo()) { | |
749 DCHECK(!video_weblayer_); | |
750 scoped_refptr<cc::VideoLayer> layer = | |
751 cc::VideoLayer::Create(compositor_, pipeline_metadata_.video_rotation); | |
752 | |
753 if (pipeline_metadata_.video_rotation == media::VIDEO_ROTATION_90 || | |
754 pipeline_metadata_.video_rotation == media::VIDEO_ROTATION_270) { | |
755 gfx::Size size = pipeline_metadata_.natural_size; | |
756 pipeline_metadata_.natural_size = gfx::Size(size.height(), size.width()); | |
757 } | |
758 | |
759 video_weblayer_.reset(new cc_blink::WebLayerImpl(layer)); | |
760 video_weblayer_->setOpaque(opaque_); | |
761 client_->setWebLayer(video_weblayer_.get()); | |
762 } | |
763 } | |
764 | |
765 void WebMediaPlayerImpl::OnPipelineBufferingStateChanged( | |
766 media::BufferingState buffering_state) { | |
767 DVLOG(1) << __FUNCTION__ << "(" << buffering_state << ")"; | |
768 | |
769 // Ignore buffering state changes until we've completed all outstanding seeks. | |
770 if (seeking_ || pending_seek_) | |
771 return; | |
772 | |
773 // TODO(scherkus): Handle other buffering states when Pipeline starts using | |
774 // them and translate them ready state changes http://crbug.com/144683 | |
775 DCHECK_EQ(buffering_state, media::BUFFERING_HAVE_ENOUGH); | |
776 SetReadyState(WebMediaPlayer::ReadyStateHaveEnoughData); | |
777 | |
778 // Blink expects a timeChanged() in response to a seek(). | |
779 if (should_notify_time_changed_) | |
780 client_->timeChanged(); | |
781 } | |
782 | |
783 void WebMediaPlayerImpl::OnDemuxerOpened() { | |
784 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
785 client_->mediaSourceOpened(new WebMediaSourceImpl( | |
786 chunk_demuxer_, base::Bind(&LogMediaSourceError, media_log_))); | |
787 } | |
788 | |
789 void WebMediaPlayerImpl::OnAddTextTrack( | |
790 const media::TextTrackConfig& config, | |
791 const media::AddTextTrackDoneCB& done_cb) { | |
792 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
793 | |
794 const WebInbandTextTrackImpl::Kind web_kind = | |
795 static_cast<WebInbandTextTrackImpl::Kind>(config.kind()); | |
796 const blink::WebString web_label = | |
797 blink::WebString::fromUTF8(config.label()); | |
798 const blink::WebString web_language = | |
799 blink::WebString::fromUTF8(config.language()); | |
800 const blink::WebString web_id = | |
801 blink::WebString::fromUTF8(config.id()); | |
802 | |
803 scoped_ptr<WebInbandTextTrackImpl> web_inband_text_track( | |
804 new WebInbandTextTrackImpl(web_kind, web_label, web_language, web_id, | |
805 text_track_index_++)); | |
806 | |
807 scoped_ptr<media::TextTrack> text_track(new TextTrackImpl( | |
808 main_task_runner_, client_, web_inband_text_track.Pass())); | |
809 | |
810 done_cb.Run(text_track.Pass()); | |
811 } | |
812 | |
813 void WebMediaPlayerImpl::DataSourceInitialized(bool success) { | |
814 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
815 | |
816 if (!success) { | |
817 SetNetworkState(WebMediaPlayer::NetworkStateFormatError); | |
818 return; | |
819 } | |
820 | |
821 StartPipeline(); | |
822 } | |
823 | |
824 void WebMediaPlayerImpl::NotifyDownloading(bool is_downloading) { | |
825 if (!is_downloading && network_state_ == WebMediaPlayer::NetworkStateLoading) | |
826 SetNetworkState(WebMediaPlayer::NetworkStateIdle); | |
827 else if (is_downloading && network_state_ == WebMediaPlayer::NetworkStateIdle) | |
828 SetNetworkState(WebMediaPlayer::NetworkStateLoading); | |
829 media_log_->AddEvent( | |
830 media_log_->CreateBooleanEvent( | |
831 media::MediaLogEvent::NETWORK_ACTIVITY_SET, | |
832 "is_downloading_data", is_downloading)); | |
833 } | |
834 | |
835 // TODO(xhwang): Move this to a factory class so that we can create different | |
836 // renderers. | |
837 scoped_ptr<media::Renderer> WebMediaPlayerImpl::CreateRenderer() { | |
838 media::SetDecryptorReadyCB set_decryptor_ready_cb = | |
839 encrypted_media_support_->CreateSetDecryptorReadyCB(); | |
840 | |
841 // Create our audio decoders and renderer. | |
842 ScopedVector<media::AudioDecoder> audio_decoders; | |
843 | |
844 media::LogCB log_cb = base::Bind(&LogMediaSourceError, media_log_); | |
845 audio_decoders.push_back(new media::FFmpegAudioDecoder(media_task_runner_, | |
846 log_cb)); | |
847 audio_decoders.push_back(new media::OpusAudioDecoder(media_task_runner_)); | |
848 | |
849 scoped_ptr<media::AudioRenderer> audio_renderer(new media::AudioRendererImpl( | |
850 media_task_runner_, | |
851 audio_source_provider_.get(), | |
852 audio_decoders.Pass(), | |
853 set_decryptor_ready_cb, | |
854 audio_hardware_config_)); | |
855 | |
856 // Create our video decoders and renderer. | |
857 ScopedVector<media::VideoDecoder> video_decoders; | |
858 | |
859 if (gpu_factories_.get()) { | |
860 video_decoders.push_back( | |
861 new media::GpuVideoDecoder(gpu_factories_, media_log_)); | |
862 } | |
863 | |
864 #if !defined(MEDIA_DISABLE_LIBVPX) | |
865 video_decoders.push_back(new media::VpxVideoDecoder(media_task_runner_)); | |
866 #endif // !defined(MEDIA_DISABLE_LIBVPX) | |
867 | |
868 video_decoders.push_back(new media::FFmpegVideoDecoder(media_task_runner_)); | |
869 | |
870 scoped_ptr<media::VideoRenderer> video_renderer( | |
871 new media::VideoRendererImpl( | |
872 media_task_runner_, | |
873 video_decoders.Pass(), | |
874 set_decryptor_ready_cb, | |
875 base::Bind(&WebMediaPlayerImpl::FrameReady, base::Unretained(this)), | |
876 true)); | |
877 | |
878 // Create renderer. | |
879 return scoped_ptr<media::Renderer>(new media::RendererImpl( | |
880 media_task_runner_, | |
881 demuxer_.get(), | |
882 audio_renderer.Pass(), | |
883 video_renderer.Pass())); | |
884 } | |
885 | |
886 void WebMediaPlayerImpl::StartPipeline() { | |
887 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
888 | |
889 // Keep track if this is a MSE or non-MSE playback. | |
890 UMA_HISTOGRAM_BOOLEAN("Media.MSE.Playback", | |
891 (load_type_ == LoadTypeMediaSource)); | |
892 | |
893 media::LogCB mse_log_cb; | |
894 media::Demuxer::NeedKeyCB need_key_cb = | |
895 encrypted_media_support_->CreateNeedKeyCB(); | |
896 | |
897 // Figure out which demuxer to use. | |
898 if (load_type_ != LoadTypeMediaSource) { | |
899 DCHECK(!chunk_demuxer_); | |
900 DCHECK(data_source_); | |
901 | |
902 demuxer_.reset(new media::FFmpegDemuxer( | |
903 media_task_runner_, data_source_.get(), | |
904 need_key_cb, | |
905 media_log_)); | |
906 } else { | |
907 DCHECK(!chunk_demuxer_); | |
908 DCHECK(!data_source_); | |
909 | |
910 mse_log_cb = base::Bind(&LogMediaSourceError, media_log_); | |
911 | |
912 chunk_demuxer_ = new media::ChunkDemuxer( | |
913 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnDemuxerOpened), | |
914 need_key_cb, | |
915 mse_log_cb, | |
916 true); | |
917 demuxer_.reset(chunk_demuxer_); | |
918 } | |
919 | |
920 // ... and we're ready to go! | |
921 seeking_ = true; | |
922 pipeline_.Start( | |
923 demuxer_.get(), | |
924 CreateRenderer(), | |
925 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineEnded), | |
926 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineError), | |
927 BIND_TO_RENDER_LOOP1(&WebMediaPlayerImpl::OnPipelineSeeked, false), | |
928 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineMetadata), | |
929 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineBufferingStateChanged), | |
930 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnDurationChanged), | |
931 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnAddTextTrack)); | |
932 } | |
933 | |
934 void WebMediaPlayerImpl::SetNetworkState(WebMediaPlayer::NetworkState state) { | |
935 DVLOG(1) << __FUNCTION__ << "(" << state << ")"; | |
936 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
937 network_state_ = state; | |
938 // Always notify to ensure client has the latest value. | |
939 client_->networkStateChanged(); | |
940 } | |
941 | |
942 void WebMediaPlayerImpl::SetReadyState(WebMediaPlayer::ReadyState state) { | |
943 DVLOG(1) << __FUNCTION__ << "(" << state << ")"; | |
944 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
945 | |
946 if (state == WebMediaPlayer::ReadyStateHaveEnoughData && data_source_ && | |
947 data_source_->assume_fully_buffered() && | |
948 network_state_ == WebMediaPlayer::NetworkStateLoading) | |
949 SetNetworkState(WebMediaPlayer::NetworkStateLoaded); | |
950 | |
951 ready_state_ = state; | |
952 // Always notify to ensure client has the latest value. | |
953 client_->readyStateChanged(); | |
954 } | |
955 | |
956 blink::WebAudioSourceProvider* WebMediaPlayerImpl::audioSourceProvider() { | |
957 return audio_source_provider_.get(); | |
958 } | |
959 | |
960 double WebMediaPlayerImpl::GetPipelineDuration() const { | |
961 base::TimeDelta duration = pipeline_.GetMediaDuration(); | |
962 | |
963 // Return positive infinity if the resource is unbounded. | |
964 // http://www.whatwg.org/specs/web-apps/current-work/multipage/video.html#dom-
media-duration | |
965 if (duration == media::kInfiniteDuration()) | |
966 return std::numeric_limits<double>::infinity(); | |
967 | |
968 return duration.InSecondsF(); | |
969 } | |
970 | |
971 void WebMediaPlayerImpl::OnDurationChanged() { | |
972 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) | |
973 return; | |
974 | |
975 client_->durationChanged(); | |
976 } | |
977 | |
978 void WebMediaPlayerImpl::OnNaturalSizeChanged(gfx::Size size) { | |
979 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
980 DCHECK_NE(ready_state_, WebMediaPlayer::ReadyStateHaveNothing); | |
981 TRACE_EVENT0("media", "WebMediaPlayerImpl::OnNaturalSizeChanged"); | |
982 | |
983 media_log_->AddEvent( | |
984 media_log_->CreateVideoSizeSetEvent(size.width(), size.height())); | |
985 pipeline_metadata_.natural_size = size; | |
986 | |
987 client_->sizeChanged(); | |
988 } | |
989 | |
990 void WebMediaPlayerImpl::OnOpacityChanged(bool opaque) { | |
991 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
992 DCHECK_NE(ready_state_, WebMediaPlayer::ReadyStateHaveNothing); | |
993 | |
994 opaque_ = opaque; | |
995 if (video_weblayer_) | |
996 video_weblayer_->setOpaque(opaque_); | |
997 } | |
998 | |
999 void WebMediaPlayerImpl::FrameReady( | |
1000 const scoped_refptr<media::VideoFrame>& frame) { | |
1001 compositor_task_runner_->PostTask( | |
1002 FROM_HERE, | |
1003 base::Bind(&VideoFrameCompositor::UpdateCurrentFrame, | |
1004 base::Unretained(compositor_), | |
1005 frame)); | |
1006 } | |
1007 | |
1008 static void GetCurrentFrameAndSignal( | |
1009 VideoFrameCompositor* compositor, | |
1010 scoped_refptr<media::VideoFrame>* video_frame_out, | |
1011 base::WaitableEvent* event) { | |
1012 TRACE_EVENT0("media", "GetCurrentFrameAndSignal"); | |
1013 *video_frame_out = compositor->GetCurrentFrame(); | |
1014 event->Signal(); | |
1015 } | |
1016 | |
1017 scoped_refptr<media::VideoFrame> | |
1018 WebMediaPlayerImpl::GetCurrentFrameFromCompositor() { | |
1019 TRACE_EVENT0("media", "WebMediaPlayerImpl::GetCurrentFrameFromCompositor"); | |
1020 if (compositor_task_runner_->BelongsToCurrentThread()) | |
1021 return compositor_->GetCurrentFrame(); | |
1022 | |
1023 // Use a posted task and waitable event instead of a lock otherwise | |
1024 // WebGL/Canvas can see different content than what the compositor is seeing. | |
1025 scoped_refptr<media::VideoFrame> video_frame; | |
1026 base::WaitableEvent event(false, false); | |
1027 compositor_task_runner_->PostTask(FROM_HERE, | |
1028 base::Bind(&GetCurrentFrameAndSignal, | |
1029 base::Unretained(compositor_), | |
1030 &video_frame, | |
1031 &event)); | |
1032 event.Wait(); | |
1033 return video_frame; | |
1034 } | |
1035 | |
1036 } // namespace content | |
OLD | NEW |