OLD | NEW |
| (Empty) |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/renderer/media/webmediaplayer_impl.h" | |
6 | |
7 #include <algorithm> | |
8 #include <limits> | |
9 #include <string> | |
10 #include <vector> | |
11 | |
12 #include "base/bind.h" | |
13 #include "base/callback.h" | |
14 #include "base/callback_helpers.h" | |
15 #include "base/debug/alias.h" | |
16 #include "base/debug/crash_logging.h" | |
17 #include "base/debug/trace_event.h" | |
18 #include "base/message_loop/message_loop_proxy.h" | |
19 #include "base/metrics/histogram.h" | |
20 #include "base/single_thread_task_runner.h" | |
21 #include "base/synchronization/waitable_event.h" | |
22 #include "cc/blink/web_layer_impl.h" | |
23 #include "cc/layers/video_layer.h" | |
24 #include "content/renderer/media/buffered_data_source.h" | |
25 #include "content/renderer/media/crypto/encrypted_media_player_support.h" | |
26 #include "content/renderer/media/texttrack_impl.h" | |
27 #include "content/renderer/media/webaudiosourceprovider_impl.h" | |
28 #include "content/renderer/media/webinbandtexttrack_impl.h" | |
29 #include "content/renderer/media/webmediaplayer_delegate.h" | |
30 #include "content/renderer/media/webmediaplayer_params.h" | |
31 #include "content/renderer/media/webmediaplayer_util.h" | |
32 #include "content/renderer/media/webmediasource_impl.h" | |
33 #include "gpu/GLES2/gl2extchromium.h" | |
34 #include "gpu/command_buffer/common/mailbox_holder.h" | |
35 #include "media/audio/null_audio_sink.h" | |
36 #include "media/base/audio_hardware_config.h" | |
37 #include "media/base/bind_to_current_loop.h" | |
38 #include "media/base/limits.h" | |
39 #include "media/base/media_log.h" | |
40 #include "media/base/pipeline.h" | |
41 #include "media/base/text_renderer.h" | |
42 #include "media/base/video_frame.h" | |
43 #include "media/filters/audio_renderer_impl.h" | |
44 #include "media/filters/chunk_demuxer.h" | |
45 #include "media/filters/ffmpeg_audio_decoder.h" | |
46 #include "media/filters/ffmpeg_demuxer.h" | |
47 #include "media/filters/ffmpeg_video_decoder.h" | |
48 #include "media/filters/gpu_video_accelerator_factories.h" | |
49 #include "media/filters/gpu_video_decoder.h" | |
50 #include "media/filters/opus_audio_decoder.h" | |
51 #include "media/filters/renderer_impl.h" | |
52 #include "media/filters/video_renderer_impl.h" | |
53 #include "media/filters/vpx_video_decoder.h" | |
54 #include "third_party/WebKit/public/platform/WebMediaSource.h" | |
55 #include "third_party/WebKit/public/platform/WebRect.h" | |
56 #include "third_party/WebKit/public/platform/WebSize.h" | |
57 #include "third_party/WebKit/public/platform/WebString.h" | |
58 #include "third_party/WebKit/public/platform/WebURL.h" | |
59 #include "third_party/WebKit/public/web/WebLocalFrame.h" | |
60 #include "third_party/WebKit/public/web/WebSecurityOrigin.h" | |
61 #include "third_party/WebKit/public/web/WebView.h" | |
62 | |
63 using blink::WebCanvas; | |
64 using blink::WebMediaPlayer; | |
65 using blink::WebRect; | |
66 using blink::WebSize; | |
67 using blink::WebString; | |
68 using media::PipelineStatus; | |
69 | |
70 namespace { | |
71 | |
72 // Limits the range of playback rate. | |
73 // | |
74 // TODO(kylep): Revisit these. | |
75 // | |
76 // Vista has substantially lower performance than XP or Windows7. If you speed | |
77 // up a video too much, it can't keep up, and rendering stops updating except on | |
78 // the time bar. For really high speeds, audio becomes a bottleneck and we just | |
79 // use up the data we have, which may not achieve the speed requested, but will | |
80 // not crash the tab. | |
81 // | |
82 // A very slow speed, ie 0.00000001x, causes the machine to lock up. (It seems | |
83 // like a busy loop). It gets unresponsive, although its not completely dead. | |
84 // | |
85 // Also our timers are not very accurate (especially for ogg), which becomes | |
86 // evident at low speeds and on Vista. Since other speeds are risky and outside | |
87 // the norms, we think 1/16x to 16x is a safe and useful range for now. | |
88 const double kMinRate = 0.0625; | |
89 const double kMaxRate = 16.0; | |
90 | |
91 class SyncPointClientImpl : public media::VideoFrame::SyncPointClient { | |
92 public: | |
93 explicit SyncPointClientImpl( | |
94 blink::WebGraphicsContext3D* web_graphics_context) | |
95 : web_graphics_context_(web_graphics_context) {} | |
96 virtual ~SyncPointClientImpl() {} | |
97 virtual uint32 InsertSyncPoint() OVERRIDE { | |
98 return web_graphics_context_->insertSyncPoint(); | |
99 } | |
100 virtual void WaitSyncPoint(uint32 sync_point) OVERRIDE { | |
101 web_graphics_context_->waitSyncPoint(sync_point); | |
102 } | |
103 | |
104 private: | |
105 blink::WebGraphicsContext3D* web_graphics_context_; | |
106 }; | |
107 | |
108 } // namespace | |
109 | |
110 namespace content { | |
111 | |
112 class BufferedDataSourceHostImpl; | |
113 | |
114 #define COMPILE_ASSERT_MATCHING_ENUM(name) \ | |
115 COMPILE_ASSERT(static_cast<int>(WebMediaPlayer::CORSMode ## name) == \ | |
116 static_cast<int>(BufferedResourceLoader::k ## name), \ | |
117 mismatching_enums) | |
118 COMPILE_ASSERT_MATCHING_ENUM(Unspecified); | |
119 COMPILE_ASSERT_MATCHING_ENUM(Anonymous); | |
120 COMPILE_ASSERT_MATCHING_ENUM(UseCredentials); | |
121 #undef COMPILE_ASSERT_MATCHING_ENUM | |
122 | |
123 #define BIND_TO_RENDER_LOOP(function) \ | |
124 (DCHECK(main_task_runner_->BelongsToCurrentThread()), \ | |
125 media::BindToCurrentLoop(base::Bind(function, AsWeakPtr()))) | |
126 | |
127 #define BIND_TO_RENDER_LOOP1(function, arg1) \ | |
128 (DCHECK(main_task_runner_->BelongsToCurrentThread()), \ | |
129 media::BindToCurrentLoop(base::Bind(function, AsWeakPtr(), arg1))) | |
130 | |
131 static void LogMediaSourceError(const scoped_refptr<media::MediaLog>& media_log, | |
132 const std::string& error) { | |
133 media_log->AddEvent(media_log->CreateMediaSourceErrorEvent(error)); | |
134 } | |
135 | |
136 WebMediaPlayerImpl::WebMediaPlayerImpl( | |
137 blink::WebLocalFrame* frame, | |
138 blink::WebMediaPlayerClient* client, | |
139 base::WeakPtr<WebMediaPlayerDelegate> delegate, | |
140 const WebMediaPlayerParams& params) | |
141 : frame_(frame), | |
142 network_state_(WebMediaPlayer::NetworkStateEmpty), | |
143 ready_state_(WebMediaPlayer::ReadyStateHaveNothing), | |
144 preload_(BufferedDataSource::AUTO), | |
145 main_task_runner_(base::MessageLoopProxy::current()), | |
146 media_task_runner_(params.media_task_runner()), | |
147 media_log_(params.media_log()), | |
148 pipeline_(media_task_runner_, media_log_.get()), | |
149 load_type_(LoadTypeURL), | |
150 opaque_(false), | |
151 paused_(true), | |
152 seeking_(false), | |
153 playback_rate_(0.0f), | |
154 pending_seek_(false), | |
155 pending_seek_seconds_(0.0f), | |
156 should_notify_time_changed_(false), | |
157 client_(client), | |
158 delegate_(delegate), | |
159 defer_load_cb_(params.defer_load_cb()), | |
160 gpu_factories_(params.gpu_factories()), | |
161 supports_save_(true), | |
162 chunk_demuxer_(NULL), | |
163 compositor_task_runner_(params.compositor_task_runner()), | |
164 compositor_(new VideoFrameCompositor( | |
165 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnNaturalSizeChanged), | |
166 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnOpacityChanged))), | |
167 text_track_index_(0), | |
168 encrypted_media_support_( | |
169 params.CreateEncryptedMediaPlayerSupport(client)), | |
170 audio_hardware_config_(params.audio_hardware_config()) { | |
171 DCHECK(encrypted_media_support_); | |
172 | |
173 // Threaded compositing isn't enabled universally yet. | |
174 if (!compositor_task_runner_) | |
175 compositor_task_runner_ = base::MessageLoopProxy::current(); | |
176 | |
177 media_log_->AddEvent( | |
178 media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_CREATED)); | |
179 | |
180 // |gpu_factories_| requires that its entry points be called on its | |
181 // |GetTaskRunner()|. Since |pipeline_| will own decoders created from the | |
182 // factories, require that their message loops are identical. | |
183 DCHECK(!gpu_factories_.get() || | |
184 (gpu_factories_->GetTaskRunner() == media_task_runner_.get())); | |
185 | |
186 // Use the null sink if no sink was provided. | |
187 audio_source_provider_ = new WebAudioSourceProviderImpl( | |
188 params.audio_renderer_sink().get() | |
189 ? params.audio_renderer_sink() | |
190 : new media::NullAudioSink(media_task_runner_)); | |
191 } | |
192 | |
193 WebMediaPlayerImpl::~WebMediaPlayerImpl() { | |
194 client_->setWebLayer(NULL); | |
195 | |
196 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
197 media_log_->AddEvent( | |
198 media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_DESTROYED)); | |
199 | |
200 if (delegate_.get()) | |
201 delegate_->PlayerGone(this); | |
202 | |
203 // Abort any pending IO so stopping the pipeline doesn't get blocked. | |
204 if (data_source_) | |
205 data_source_->Abort(); | |
206 if (chunk_demuxer_) { | |
207 chunk_demuxer_->Shutdown(); | |
208 chunk_demuxer_ = NULL; | |
209 } | |
210 | |
211 gpu_factories_ = NULL; | |
212 | |
213 // Make sure to kill the pipeline so there's no more media threads running. | |
214 // Note: stopping the pipeline might block for a long time. | |
215 base::WaitableEvent waiter(false, false); | |
216 pipeline_.Stop( | |
217 base::Bind(&base::WaitableEvent::Signal, base::Unretained(&waiter))); | |
218 waiter.Wait(); | |
219 | |
220 compositor_task_runner_->DeleteSoon(FROM_HERE, compositor_); | |
221 } | |
222 | |
223 void WebMediaPlayerImpl::load(LoadType load_type, const blink::WebURL& url, | |
224 CORSMode cors_mode) { | |
225 DVLOG(1) << __FUNCTION__ << "(" << load_type << ", " << url << ", " | |
226 << cors_mode << ")"; | |
227 if (!defer_load_cb_.is_null()) { | |
228 defer_load_cb_.Run(base::Bind( | |
229 &WebMediaPlayerImpl::DoLoad, AsWeakPtr(), load_type, url, cors_mode)); | |
230 return; | |
231 } | |
232 DoLoad(load_type, url, cors_mode); | |
233 } | |
234 | |
235 void WebMediaPlayerImpl::DoLoad(LoadType load_type, | |
236 const blink::WebURL& url, | |
237 CORSMode cors_mode) { | |
238 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
239 | |
240 GURL gurl(url); | |
241 ReportMediaSchemeUma(gurl); | |
242 | |
243 // Set subresource URL for crash reporting. | |
244 base::debug::SetCrashKeyValue("subresource_url", gurl.spec()); | |
245 | |
246 load_type_ = load_type; | |
247 | |
248 SetNetworkState(WebMediaPlayer::NetworkStateLoading); | |
249 SetReadyState(WebMediaPlayer::ReadyStateHaveNothing); | |
250 media_log_->AddEvent(media_log_->CreateLoadEvent(url.spec())); | |
251 | |
252 // Media source pipelines can start immediately. | |
253 if (load_type == LoadTypeMediaSource) { | |
254 supports_save_ = false; | |
255 StartPipeline(); | |
256 return; | |
257 } | |
258 | |
259 // Otherwise it's a regular request which requires resolving the URL first. | |
260 data_source_.reset(new BufferedDataSource( | |
261 url, | |
262 static_cast<BufferedResourceLoader::CORSMode>(cors_mode), | |
263 main_task_runner_, | |
264 frame_, | |
265 media_log_.get(), | |
266 &buffered_data_source_host_, | |
267 base::Bind(&WebMediaPlayerImpl::NotifyDownloading, AsWeakPtr()))); | |
268 data_source_->Initialize( | |
269 base::Bind(&WebMediaPlayerImpl::DataSourceInitialized, AsWeakPtr())); | |
270 data_source_->SetPreload(preload_); | |
271 } | |
272 | |
273 void WebMediaPlayerImpl::play() { | |
274 DVLOG(1) << __FUNCTION__; | |
275 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
276 | |
277 paused_ = false; | |
278 pipeline_.SetPlaybackRate(playback_rate_); | |
279 if (data_source_) | |
280 data_source_->MediaIsPlaying(); | |
281 | |
282 media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PLAY)); | |
283 | |
284 if (delegate_.get()) | |
285 delegate_->DidPlay(this); | |
286 } | |
287 | |
288 void WebMediaPlayerImpl::pause() { | |
289 DVLOG(1) << __FUNCTION__; | |
290 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
291 | |
292 paused_ = true; | |
293 pipeline_.SetPlaybackRate(0.0f); | |
294 if (data_source_) | |
295 data_source_->MediaIsPaused(); | |
296 paused_time_ = pipeline_.GetMediaTime(); | |
297 | |
298 media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PAUSE)); | |
299 | |
300 if (delegate_.get()) | |
301 delegate_->DidPause(this); | |
302 } | |
303 | |
304 bool WebMediaPlayerImpl::supportsSave() const { | |
305 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
306 return supports_save_; | |
307 } | |
308 | |
309 void WebMediaPlayerImpl::seek(double seconds) { | |
310 DVLOG(1) << __FUNCTION__ << "(" << seconds << ")"; | |
311 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
312 | |
313 if (ready_state_ > WebMediaPlayer::ReadyStateHaveMetadata) | |
314 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata); | |
315 | |
316 base::TimeDelta seek_time = ConvertSecondsToTimestamp(seconds); | |
317 | |
318 if (seeking_) { | |
319 pending_seek_ = true; | |
320 pending_seek_seconds_ = seconds; | |
321 if (chunk_demuxer_) | |
322 chunk_demuxer_->CancelPendingSeek(seek_time); | |
323 return; | |
324 } | |
325 | |
326 media_log_->AddEvent(media_log_->CreateSeekEvent(seconds)); | |
327 | |
328 // Update our paused time. | |
329 if (paused_) | |
330 paused_time_ = seek_time; | |
331 | |
332 seeking_ = true; | |
333 | |
334 if (chunk_demuxer_) | |
335 chunk_demuxer_->StartWaitingForSeek(seek_time); | |
336 | |
337 // Kick off the asynchronous seek! | |
338 pipeline_.Seek( | |
339 seek_time, | |
340 BIND_TO_RENDER_LOOP1(&WebMediaPlayerImpl::OnPipelineSeeked, true)); | |
341 } | |
342 | |
343 void WebMediaPlayerImpl::setRate(double rate) { | |
344 DVLOG(1) << __FUNCTION__ << "(" << rate << ")"; | |
345 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
346 | |
347 // TODO(kylep): Remove when support for negatives is added. Also, modify the | |
348 // following checks so rewind uses reasonable values also. | |
349 if (rate < 0.0) | |
350 return; | |
351 | |
352 // Limit rates to reasonable values by clamping. | |
353 if (rate != 0.0) { | |
354 if (rate < kMinRate) | |
355 rate = kMinRate; | |
356 else if (rate > kMaxRate) | |
357 rate = kMaxRate; | |
358 } | |
359 | |
360 playback_rate_ = rate; | |
361 if (!paused_) { | |
362 pipeline_.SetPlaybackRate(rate); | |
363 if (data_source_) | |
364 data_source_->MediaPlaybackRateChanged(rate); | |
365 } | |
366 } | |
367 | |
368 void WebMediaPlayerImpl::setVolume(double volume) { | |
369 DVLOG(1) << __FUNCTION__ << "(" << volume << ")"; | |
370 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
371 | |
372 pipeline_.SetVolume(volume); | |
373 } | |
374 | |
375 #define COMPILE_ASSERT_MATCHING_ENUM(webkit_name, chromium_name) \ | |
376 COMPILE_ASSERT(static_cast<int>(WebMediaPlayer::webkit_name) == \ | |
377 static_cast<int>(BufferedDataSource::chromium_name), \ | |
378 mismatching_enums) | |
379 COMPILE_ASSERT_MATCHING_ENUM(PreloadNone, NONE); | |
380 COMPILE_ASSERT_MATCHING_ENUM(PreloadMetaData, METADATA); | |
381 COMPILE_ASSERT_MATCHING_ENUM(PreloadAuto, AUTO); | |
382 #undef COMPILE_ASSERT_MATCHING_ENUM | |
383 | |
384 void WebMediaPlayerImpl::setPreload(WebMediaPlayer::Preload preload) { | |
385 DVLOG(1) << __FUNCTION__ << "(" << preload << ")"; | |
386 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
387 | |
388 preload_ = static_cast<BufferedDataSource::Preload>(preload); | |
389 if (data_source_) | |
390 data_source_->SetPreload(preload_); | |
391 } | |
392 | |
393 bool WebMediaPlayerImpl::hasVideo() const { | |
394 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
395 | |
396 return pipeline_metadata_.has_video; | |
397 } | |
398 | |
399 bool WebMediaPlayerImpl::hasAudio() const { | |
400 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
401 | |
402 return pipeline_metadata_.has_audio; | |
403 } | |
404 | |
405 blink::WebSize WebMediaPlayerImpl::naturalSize() const { | |
406 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
407 | |
408 return blink::WebSize(pipeline_metadata_.natural_size); | |
409 } | |
410 | |
411 bool WebMediaPlayerImpl::paused() const { | |
412 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
413 | |
414 return pipeline_.GetPlaybackRate() == 0.0f; | |
415 } | |
416 | |
417 bool WebMediaPlayerImpl::seeking() const { | |
418 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
419 | |
420 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) | |
421 return false; | |
422 | |
423 return seeking_; | |
424 } | |
425 | |
426 double WebMediaPlayerImpl::duration() const { | |
427 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
428 | |
429 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) | |
430 return std::numeric_limits<double>::quiet_NaN(); | |
431 | |
432 return GetPipelineDuration(); | |
433 } | |
434 | |
435 double WebMediaPlayerImpl::timelineOffset() const { | |
436 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
437 | |
438 if (pipeline_metadata_.timeline_offset.is_null()) | |
439 return std::numeric_limits<double>::quiet_NaN(); | |
440 | |
441 return pipeline_metadata_.timeline_offset.ToJsTime(); | |
442 } | |
443 | |
444 double WebMediaPlayerImpl::currentTime() const { | |
445 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
446 return (paused_ ? paused_time_ : pipeline_.GetMediaTime()).InSecondsF(); | |
447 } | |
448 | |
449 WebMediaPlayer::NetworkState WebMediaPlayerImpl::networkState() const { | |
450 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
451 return network_state_; | |
452 } | |
453 | |
454 WebMediaPlayer::ReadyState WebMediaPlayerImpl::readyState() const { | |
455 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
456 return ready_state_; | |
457 } | |
458 | |
459 blink::WebTimeRanges WebMediaPlayerImpl::buffered() const { | |
460 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
461 | |
462 media::Ranges<base::TimeDelta> buffered_time_ranges = | |
463 pipeline_.GetBufferedTimeRanges(); | |
464 | |
465 const base::TimeDelta duration = pipeline_.GetMediaDuration(); | |
466 if (duration != media::kInfiniteDuration()) { | |
467 buffered_data_source_host_.AddBufferedTimeRanges( | |
468 &buffered_time_ranges, duration); | |
469 } | |
470 return ConvertToWebTimeRanges(buffered_time_ranges); | |
471 } | |
472 | |
473 double WebMediaPlayerImpl::maxTimeSeekable() const { | |
474 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
475 | |
476 // If we haven't even gotten to ReadyStateHaveMetadata yet then just | |
477 // return 0 so that the seekable range is empty. | |
478 if (ready_state_ < WebMediaPlayer::ReadyStateHaveMetadata) | |
479 return 0.0; | |
480 | |
481 // We don't support seeking in streaming media. | |
482 if (data_source_ && data_source_->IsStreaming()) | |
483 return 0.0; | |
484 return duration(); | |
485 } | |
486 | |
487 bool WebMediaPlayerImpl::didLoadingProgress() { | |
488 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
489 bool pipeline_progress = pipeline_.DidLoadingProgress(); | |
490 bool data_progress = buffered_data_source_host_.DidLoadingProgress(); | |
491 return pipeline_progress || data_progress; | |
492 } | |
493 | |
494 void WebMediaPlayerImpl::paint(blink::WebCanvas* canvas, | |
495 const blink::WebRect& rect, | |
496 unsigned char alpha) { | |
497 paint(canvas, rect, alpha, SkXfermode::kSrcOver_Mode); | |
498 } | |
499 | |
500 void WebMediaPlayerImpl::paint(blink::WebCanvas* canvas, | |
501 const blink::WebRect& rect, | |
502 unsigned char alpha, | |
503 SkXfermode::Mode mode) { | |
504 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
505 TRACE_EVENT0("media", "WebMediaPlayerImpl:paint"); | |
506 | |
507 // TODO(scherkus): Clarify paint() API contract to better understand when and | |
508 // why it's being called. For example, today paint() is called when: | |
509 // - We haven't reached HAVE_CURRENT_DATA and need to paint black | |
510 // - We're painting to a canvas | |
511 // See http://crbug.com/341225 http://crbug.com/342621 for details. | |
512 scoped_refptr<media::VideoFrame> video_frame = | |
513 GetCurrentFrameFromCompositor(); | |
514 | |
515 gfx::Rect gfx_rect(rect); | |
516 | |
517 skcanvas_video_renderer_.Paint(video_frame.get(), | |
518 canvas, | |
519 gfx_rect, | |
520 alpha, | |
521 mode, | |
522 pipeline_metadata_.video_rotation); | |
523 } | |
524 | |
525 bool WebMediaPlayerImpl::hasSingleSecurityOrigin() const { | |
526 if (data_source_) | |
527 return data_source_->HasSingleOrigin(); | |
528 return true; | |
529 } | |
530 | |
531 bool WebMediaPlayerImpl::didPassCORSAccessCheck() const { | |
532 if (data_source_) | |
533 return data_source_->DidPassCORSAccessCheck(); | |
534 return false; | |
535 } | |
536 | |
537 double WebMediaPlayerImpl::mediaTimeForTimeValue(double timeValue) const { | |
538 return ConvertSecondsToTimestamp(timeValue).InSecondsF(); | |
539 } | |
540 | |
541 unsigned WebMediaPlayerImpl::decodedFrameCount() const { | |
542 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
543 | |
544 media::PipelineStatistics stats = pipeline_.GetStatistics(); | |
545 return stats.video_frames_decoded; | |
546 } | |
547 | |
548 unsigned WebMediaPlayerImpl::droppedFrameCount() const { | |
549 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
550 | |
551 media::PipelineStatistics stats = pipeline_.GetStatistics(); | |
552 return stats.video_frames_dropped; | |
553 } | |
554 | |
555 unsigned WebMediaPlayerImpl::audioDecodedByteCount() const { | |
556 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
557 | |
558 media::PipelineStatistics stats = pipeline_.GetStatistics(); | |
559 return stats.audio_bytes_decoded; | |
560 } | |
561 | |
562 unsigned WebMediaPlayerImpl::videoDecodedByteCount() const { | |
563 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
564 | |
565 media::PipelineStatistics stats = pipeline_.GetStatistics(); | |
566 return stats.video_bytes_decoded; | |
567 } | |
568 | |
569 bool WebMediaPlayerImpl::copyVideoTextureToPlatformTexture( | |
570 blink::WebGraphicsContext3D* web_graphics_context, | |
571 unsigned int texture, | |
572 unsigned int level, | |
573 unsigned int internal_format, | |
574 unsigned int type, | |
575 bool premultiply_alpha, | |
576 bool flip_y) { | |
577 TRACE_EVENT0("media", "WebMediaPlayerImpl:copyVideoTextureToPlatformTexture"); | |
578 | |
579 scoped_refptr<media::VideoFrame> video_frame = | |
580 GetCurrentFrameFromCompositor(); | |
581 | |
582 if (!video_frame.get()) | |
583 return false; | |
584 if (video_frame->format() != media::VideoFrame::NATIVE_TEXTURE) | |
585 return false; | |
586 | |
587 const gpu::MailboxHolder* mailbox_holder = video_frame->mailbox_holder(); | |
588 if (mailbox_holder->texture_target != GL_TEXTURE_2D) | |
589 return false; | |
590 | |
591 web_graphics_context->waitSyncPoint(mailbox_holder->sync_point); | |
592 uint32 source_texture = web_graphics_context->createAndConsumeTextureCHROMIUM( | |
593 GL_TEXTURE_2D, mailbox_holder->mailbox.name); | |
594 | |
595 // The video is stored in a unmultiplied format, so premultiply | |
596 // if necessary. | |
597 web_graphics_context->pixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM, | |
598 premultiply_alpha); | |
599 // Application itself needs to take care of setting the right flip_y | |
600 // value down to get the expected result. | |
601 // flip_y==true means to reverse the video orientation while | |
602 // flip_y==false means to keep the intrinsic orientation. | |
603 web_graphics_context->pixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, flip_y); | |
604 web_graphics_context->copyTextureCHROMIUM(GL_TEXTURE_2D, | |
605 source_texture, | |
606 texture, | |
607 level, | |
608 internal_format, | |
609 type); | |
610 web_graphics_context->pixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, false); | |
611 web_graphics_context->pixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM, | |
612 false); | |
613 | |
614 web_graphics_context->deleteTexture(source_texture); | |
615 web_graphics_context->flush(); | |
616 | |
617 SyncPointClientImpl client(web_graphics_context); | |
618 video_frame->UpdateReleaseSyncPoint(&client); | |
619 return true; | |
620 } | |
621 | |
622 WebMediaPlayer::MediaKeyException | |
623 WebMediaPlayerImpl::generateKeyRequest(const WebString& key_system, | |
624 const unsigned char* init_data, | |
625 unsigned init_data_length) { | |
626 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
627 | |
628 return encrypted_media_support_->GenerateKeyRequest( | |
629 frame_, key_system, init_data, init_data_length); | |
630 } | |
631 | |
632 WebMediaPlayer::MediaKeyException WebMediaPlayerImpl::addKey( | |
633 const WebString& key_system, | |
634 const unsigned char* key, | |
635 unsigned key_length, | |
636 const unsigned char* init_data, | |
637 unsigned init_data_length, | |
638 const WebString& session_id) { | |
639 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
640 | |
641 return encrypted_media_support_->AddKey( | |
642 key_system, key, key_length, init_data, init_data_length, session_id); | |
643 } | |
644 | |
645 WebMediaPlayer::MediaKeyException WebMediaPlayerImpl::cancelKeyRequest( | |
646 const WebString& key_system, | |
647 const WebString& session_id) { | |
648 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
649 | |
650 return encrypted_media_support_->CancelKeyRequest(key_system, session_id); | |
651 } | |
652 | |
653 void WebMediaPlayerImpl::setContentDecryptionModule( | |
654 blink::WebContentDecryptionModule* cdm) { | |
655 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
656 | |
657 encrypted_media_support_->SetContentDecryptionModule(cdm); | |
658 } | |
659 | |
660 void WebMediaPlayerImpl::setContentDecryptionModule( | |
661 blink::WebContentDecryptionModule* cdm, | |
662 blink::WebContentDecryptionModuleResult result) { | |
663 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
664 | |
665 encrypted_media_support_->SetContentDecryptionModule(cdm, result); | |
666 } | |
667 | |
668 void WebMediaPlayerImpl::setContentDecryptionModuleSync( | |
669 blink::WebContentDecryptionModule* cdm) { | |
670 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
671 | |
672 encrypted_media_support_->SetContentDecryptionModuleSync(cdm); | |
673 } | |
674 | |
675 void WebMediaPlayerImpl::OnPipelineSeeked(bool time_changed, | |
676 PipelineStatus status) { | |
677 DVLOG(1) << __FUNCTION__ << "(" << time_changed << ", " << status << ")"; | |
678 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
679 seeking_ = false; | |
680 if (pending_seek_) { | |
681 pending_seek_ = false; | |
682 seek(pending_seek_seconds_); | |
683 return; | |
684 } | |
685 | |
686 if (status != media::PIPELINE_OK) { | |
687 OnPipelineError(status); | |
688 return; | |
689 } | |
690 | |
691 // Update our paused time. | |
692 if (paused_) | |
693 paused_time_ = pipeline_.GetMediaTime(); | |
694 | |
695 should_notify_time_changed_ = time_changed; | |
696 } | |
697 | |
698 void WebMediaPlayerImpl::OnPipelineEnded() { | |
699 DVLOG(1) << __FUNCTION__; | |
700 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
701 client_->timeChanged(); | |
702 } | |
703 | |
704 void WebMediaPlayerImpl::OnPipelineError(PipelineStatus error) { | |
705 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
706 DCHECK_NE(error, media::PIPELINE_OK); | |
707 | |
708 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) { | |
709 // Any error that occurs before reaching ReadyStateHaveMetadata should | |
710 // be considered a format error. | |
711 SetNetworkState(WebMediaPlayer::NetworkStateFormatError); | |
712 return; | |
713 } | |
714 | |
715 SetNetworkState(PipelineErrorToNetworkState(error)); | |
716 | |
717 if (error == media::PIPELINE_ERROR_DECRYPT) | |
718 encrypted_media_support_->OnPipelineDecryptError(); | |
719 } | |
720 | |
721 void WebMediaPlayerImpl::OnPipelineMetadata( | |
722 media::PipelineMetadata metadata) { | |
723 DVLOG(1) << __FUNCTION__; | |
724 | |
725 pipeline_metadata_ = metadata; | |
726 | |
727 UMA_HISTOGRAM_ENUMERATION("Media.VideoRotation", | |
728 metadata.video_rotation, | |
729 media::VIDEO_ROTATION_MAX + 1); | |
730 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata); | |
731 | |
732 if (hasVideo()) { | |
733 DCHECK(!video_weblayer_); | |
734 scoped_refptr<cc::VideoLayer> layer = | |
735 cc::VideoLayer::Create(compositor_, pipeline_metadata_.video_rotation); | |
736 | |
737 if (pipeline_metadata_.video_rotation == media::VIDEO_ROTATION_90 || | |
738 pipeline_metadata_.video_rotation == media::VIDEO_ROTATION_270) { | |
739 gfx::Size size = pipeline_metadata_.natural_size; | |
740 pipeline_metadata_.natural_size = gfx::Size(size.height(), size.width()); | |
741 } | |
742 | |
743 video_weblayer_.reset(new cc_blink::WebLayerImpl(layer)); | |
744 video_weblayer_->setOpaque(opaque_); | |
745 client_->setWebLayer(video_weblayer_.get()); | |
746 } | |
747 } | |
748 | |
749 void WebMediaPlayerImpl::OnPipelineBufferingStateChanged( | |
750 media::BufferingState buffering_state) { | |
751 DVLOG(1) << __FUNCTION__ << "(" << buffering_state << ")"; | |
752 | |
753 // Ignore buffering state changes until we've completed all outstanding seeks. | |
754 if (seeking_ || pending_seek_) | |
755 return; | |
756 | |
757 // TODO(scherkus): Handle other buffering states when Pipeline starts using | |
758 // them and translate them ready state changes http://crbug.com/144683 | |
759 DCHECK_EQ(buffering_state, media::BUFFERING_HAVE_ENOUGH); | |
760 SetReadyState(WebMediaPlayer::ReadyStateHaveEnoughData); | |
761 | |
762 // Blink expects a timeChanged() in response to a seek(). | |
763 if (should_notify_time_changed_) | |
764 client_->timeChanged(); | |
765 } | |
766 | |
767 void WebMediaPlayerImpl::OnDemuxerOpened() { | |
768 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
769 client_->mediaSourceOpened(new WebMediaSourceImpl( | |
770 chunk_demuxer_, base::Bind(&LogMediaSourceError, media_log_))); | |
771 } | |
772 | |
773 void WebMediaPlayerImpl::OnAddTextTrack( | |
774 const media::TextTrackConfig& config, | |
775 const media::AddTextTrackDoneCB& done_cb) { | |
776 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
777 | |
778 const WebInbandTextTrackImpl::Kind web_kind = | |
779 static_cast<WebInbandTextTrackImpl::Kind>(config.kind()); | |
780 const blink::WebString web_label = | |
781 blink::WebString::fromUTF8(config.label()); | |
782 const blink::WebString web_language = | |
783 blink::WebString::fromUTF8(config.language()); | |
784 const blink::WebString web_id = | |
785 blink::WebString::fromUTF8(config.id()); | |
786 | |
787 scoped_ptr<WebInbandTextTrackImpl> web_inband_text_track( | |
788 new WebInbandTextTrackImpl(web_kind, web_label, web_language, web_id, | |
789 text_track_index_++)); | |
790 | |
791 scoped_ptr<media::TextTrack> text_track(new TextTrackImpl( | |
792 main_task_runner_, client_, web_inband_text_track.Pass())); | |
793 | |
794 done_cb.Run(text_track.Pass()); | |
795 } | |
796 | |
797 void WebMediaPlayerImpl::DataSourceInitialized(bool success) { | |
798 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
799 | |
800 if (!success) { | |
801 SetNetworkState(WebMediaPlayer::NetworkStateFormatError); | |
802 return; | |
803 } | |
804 | |
805 StartPipeline(); | |
806 } | |
807 | |
808 void WebMediaPlayerImpl::NotifyDownloading(bool is_downloading) { | |
809 if (!is_downloading && network_state_ == WebMediaPlayer::NetworkStateLoading) | |
810 SetNetworkState(WebMediaPlayer::NetworkStateIdle); | |
811 else if (is_downloading && network_state_ == WebMediaPlayer::NetworkStateIdle) | |
812 SetNetworkState(WebMediaPlayer::NetworkStateLoading); | |
813 media_log_->AddEvent( | |
814 media_log_->CreateBooleanEvent( | |
815 media::MediaLogEvent::NETWORK_ACTIVITY_SET, | |
816 "is_downloading_data", is_downloading)); | |
817 } | |
818 | |
819 // TODO(xhwang): Move this to a factory class so that we can create different | |
820 // renderers. | |
821 scoped_ptr<media::Renderer> WebMediaPlayerImpl::CreateRenderer() { | |
822 media::SetDecryptorReadyCB set_decryptor_ready_cb = | |
823 encrypted_media_support_->CreateSetDecryptorReadyCB(); | |
824 | |
825 // Create our audio decoders and renderer. | |
826 ScopedVector<media::AudioDecoder> audio_decoders; | |
827 | |
828 media::LogCB log_cb = base::Bind(&LogMediaSourceError, media_log_); | |
829 audio_decoders.push_back(new media::FFmpegAudioDecoder(media_task_runner_, | |
830 log_cb)); | |
831 audio_decoders.push_back(new media::OpusAudioDecoder(media_task_runner_)); | |
832 | |
833 scoped_ptr<media::AudioRenderer> audio_renderer(new media::AudioRendererImpl( | |
834 media_task_runner_, | |
835 audio_source_provider_.get(), | |
836 audio_decoders.Pass(), | |
837 set_decryptor_ready_cb, | |
838 audio_hardware_config_)); | |
839 | |
840 // Create our video decoders and renderer. | |
841 ScopedVector<media::VideoDecoder> video_decoders; | |
842 | |
843 if (gpu_factories_.get()) { | |
844 video_decoders.push_back( | |
845 new media::GpuVideoDecoder(gpu_factories_, media_log_)); | |
846 } | |
847 | |
848 #if !defined(MEDIA_DISABLE_LIBVPX) | |
849 video_decoders.push_back(new media::VpxVideoDecoder(media_task_runner_)); | |
850 #endif // !defined(MEDIA_DISABLE_LIBVPX) | |
851 | |
852 video_decoders.push_back(new media::FFmpegVideoDecoder(media_task_runner_)); | |
853 | |
854 scoped_ptr<media::VideoRenderer> video_renderer( | |
855 new media::VideoRendererImpl( | |
856 media_task_runner_, | |
857 video_decoders.Pass(), | |
858 set_decryptor_ready_cb, | |
859 base::Bind(&WebMediaPlayerImpl::FrameReady, base::Unretained(this)), | |
860 true)); | |
861 | |
862 // Create renderer. | |
863 return scoped_ptr<media::Renderer>(new media::RendererImpl( | |
864 media_task_runner_, | |
865 demuxer_.get(), | |
866 audio_renderer.Pass(), | |
867 video_renderer.Pass())); | |
868 } | |
869 | |
870 void WebMediaPlayerImpl::StartPipeline() { | |
871 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
872 | |
873 // Keep track if this is a MSE or non-MSE playback. | |
874 UMA_HISTOGRAM_BOOLEAN("Media.MSE.Playback", | |
875 (load_type_ == LoadTypeMediaSource)); | |
876 | |
877 media::LogCB mse_log_cb; | |
878 media::Demuxer::NeedKeyCB need_key_cb = | |
879 encrypted_media_support_->CreateNeedKeyCB(); | |
880 | |
881 // Figure out which demuxer to use. | |
882 if (load_type_ != LoadTypeMediaSource) { | |
883 DCHECK(!chunk_demuxer_); | |
884 DCHECK(data_source_); | |
885 | |
886 demuxer_.reset(new media::FFmpegDemuxer( | |
887 media_task_runner_, data_source_.get(), | |
888 need_key_cb, | |
889 media_log_)); | |
890 } else { | |
891 DCHECK(!chunk_demuxer_); | |
892 DCHECK(!data_source_); | |
893 | |
894 mse_log_cb = base::Bind(&LogMediaSourceError, media_log_); | |
895 | |
896 chunk_demuxer_ = new media::ChunkDemuxer( | |
897 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnDemuxerOpened), | |
898 need_key_cb, | |
899 mse_log_cb, | |
900 true); | |
901 demuxer_.reset(chunk_demuxer_); | |
902 } | |
903 | |
904 // ... and we're ready to go! | |
905 seeking_ = true; | |
906 pipeline_.Start( | |
907 demuxer_.get(), | |
908 CreateRenderer(), | |
909 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineEnded), | |
910 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineError), | |
911 BIND_TO_RENDER_LOOP1(&WebMediaPlayerImpl::OnPipelineSeeked, false), | |
912 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineMetadata), | |
913 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineBufferingStateChanged), | |
914 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnDurationChanged), | |
915 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnAddTextTrack)); | |
916 } | |
917 | |
918 void WebMediaPlayerImpl::SetNetworkState(WebMediaPlayer::NetworkState state) { | |
919 DVLOG(1) << __FUNCTION__ << "(" << state << ")"; | |
920 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
921 network_state_ = state; | |
922 // Always notify to ensure client has the latest value. | |
923 client_->networkStateChanged(); | |
924 } | |
925 | |
926 void WebMediaPlayerImpl::SetReadyState(WebMediaPlayer::ReadyState state) { | |
927 DVLOG(1) << __FUNCTION__ << "(" << state << ")"; | |
928 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
929 | |
930 if (state == WebMediaPlayer::ReadyStateHaveEnoughData && data_source_ && | |
931 data_source_->assume_fully_buffered() && | |
932 network_state_ == WebMediaPlayer::NetworkStateLoading) | |
933 SetNetworkState(WebMediaPlayer::NetworkStateLoaded); | |
934 | |
935 ready_state_ = state; | |
936 // Always notify to ensure client has the latest value. | |
937 client_->readyStateChanged(); | |
938 } | |
939 | |
940 blink::WebAudioSourceProvider* WebMediaPlayerImpl::audioSourceProvider() { | |
941 return audio_source_provider_.get(); | |
942 } | |
943 | |
944 double WebMediaPlayerImpl::GetPipelineDuration() const { | |
945 base::TimeDelta duration = pipeline_.GetMediaDuration(); | |
946 | |
947 // Return positive infinity if the resource is unbounded. | |
948 // http://www.whatwg.org/specs/web-apps/current-work/multipage/video.html#dom-
media-duration | |
949 if (duration == media::kInfiniteDuration()) | |
950 return std::numeric_limits<double>::infinity(); | |
951 | |
952 return duration.InSecondsF(); | |
953 } | |
954 | |
955 void WebMediaPlayerImpl::OnDurationChanged() { | |
956 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) | |
957 return; | |
958 | |
959 client_->durationChanged(); | |
960 } | |
961 | |
962 void WebMediaPlayerImpl::OnNaturalSizeChanged(gfx::Size size) { | |
963 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
964 DCHECK_NE(ready_state_, WebMediaPlayer::ReadyStateHaveNothing); | |
965 TRACE_EVENT0("media", "WebMediaPlayerImpl::OnNaturalSizeChanged"); | |
966 | |
967 media_log_->AddEvent( | |
968 media_log_->CreateVideoSizeSetEvent(size.width(), size.height())); | |
969 pipeline_metadata_.natural_size = size; | |
970 | |
971 client_->sizeChanged(); | |
972 } | |
973 | |
974 void WebMediaPlayerImpl::OnOpacityChanged(bool opaque) { | |
975 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
976 DCHECK_NE(ready_state_, WebMediaPlayer::ReadyStateHaveNothing); | |
977 | |
978 opaque_ = opaque; | |
979 if (video_weblayer_) | |
980 video_weblayer_->setOpaque(opaque_); | |
981 } | |
982 | |
983 void WebMediaPlayerImpl::FrameReady( | |
984 const scoped_refptr<media::VideoFrame>& frame) { | |
985 compositor_task_runner_->PostTask( | |
986 FROM_HERE, | |
987 base::Bind(&VideoFrameCompositor::UpdateCurrentFrame, | |
988 base::Unretained(compositor_), | |
989 frame)); | |
990 } | |
991 | |
992 static void GetCurrentFrameAndSignal( | |
993 VideoFrameCompositor* compositor, | |
994 scoped_refptr<media::VideoFrame>* video_frame_out, | |
995 base::WaitableEvent* event) { | |
996 TRACE_EVENT0("media", "GetCurrentFrameAndSignal"); | |
997 *video_frame_out = compositor->GetCurrentFrame(); | |
998 event->Signal(); | |
999 } | |
1000 | |
1001 scoped_refptr<media::VideoFrame> | |
1002 WebMediaPlayerImpl::GetCurrentFrameFromCompositor() { | |
1003 TRACE_EVENT0("media", "WebMediaPlayerImpl::GetCurrentFrameFromCompositor"); | |
1004 if (compositor_task_runner_->BelongsToCurrentThread()) | |
1005 return compositor_->GetCurrentFrame(); | |
1006 | |
1007 // Use a posted task and waitable event instead of a lock otherwise | |
1008 // WebGL/Canvas can see different content than what the compositor is seeing. | |
1009 scoped_refptr<media::VideoFrame> video_frame; | |
1010 base::WaitableEvent event(false, false); | |
1011 compositor_task_runner_->PostTask(FROM_HERE, | |
1012 base::Bind(&GetCurrentFrameAndSignal, | |
1013 base::Unretained(compositor_), | |
1014 &video_frame, | |
1015 &event)); | |
1016 event.Wait(); | |
1017 return video_frame; | |
1018 } | |
1019 | |
1020 } // namespace content | |
OLD | NEW |