OLD | NEW |
---|---|
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/webmediaplayer_ms.h" | 5 #include "content/renderer/media/webmediaplayer_ms.h" |
6 | 6 |
7 #include <limits> | 7 #include <limits> |
8 | 8 |
9 #include "base/bind.h" | 9 #include "base/bind.h" |
10 #include "base/callback.h" | 10 #include "base/callback.h" |
11 #include "base/message_loop/message_loop.h" | 11 #include "base/message_loop/message_loop.h" |
12 #include "base/metrics/histogram.h" | 12 #include "base/metrics/histogram.h" |
13 #include "cc/blink/context_provider_web_context.h" | 13 #include "cc/blink/context_provider_web_context.h" |
14 #include "cc/blink/web_layer_impl.h" | 14 #include "cc/blink/web_layer_impl.h" |
15 #include "cc/layers/video_frame_provider_client_impl.h" | |
15 #include "cc/layers/video_layer.h" | 16 #include "cc/layers/video_layer.h" |
16 #include "content/public/renderer/media_stream_audio_renderer.h" | 17 #include "content/public/renderer/media_stream_audio_renderer.h" |
17 #include "content/public/renderer/media_stream_renderer_factory.h" | 18 #include "content/public/renderer/media_stream_renderer_factory.h" |
18 #include "content/public/renderer/render_view.h" | 19 #include "content/public/renderer/render_view.h" |
19 #include "content/public/renderer/video_frame_provider.h" | 20 #include "content/public/renderer/video_frame_provider.h" |
20 #include "content/renderer/render_frame_impl.h" | 21 #include "content/renderer/render_frame_impl.h" |
21 #include "content/renderer/render_thread_impl.h" | 22 #include "content/renderer/render_thread_impl.h" |
22 #include "gpu/blink/webgraphicscontext3d_impl.h" | 23 #include "gpu/blink/webgraphicscontext3d_impl.h" |
23 #include "media/base/media_log.h" | 24 #include "media/base/media_log.h" |
24 #include "media/base/video_frame.h" | 25 #include "media/base/video_frame.h" |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
86 return new_frame; | 87 return new_frame; |
87 } | 88 } |
88 | 89 |
89 } // anonymous namespace | 90 } // anonymous namespace |
90 | 91 |
91 WebMediaPlayerMS::WebMediaPlayerMS( | 92 WebMediaPlayerMS::WebMediaPlayerMS( |
92 blink::WebFrame* frame, | 93 blink::WebFrame* frame, |
93 blink::WebMediaPlayerClient* client, | 94 blink::WebMediaPlayerClient* client, |
94 base::WeakPtr<media::WebMediaPlayerDelegate> delegate, | 95 base::WeakPtr<media::WebMediaPlayerDelegate> delegate, |
95 media::MediaLog* media_log, | 96 media::MediaLog* media_log, |
96 scoped_ptr<MediaStreamRendererFactory> factory) | 97 scoped_ptr<MediaStreamRendererFactory> factory, |
98 scoped_refptr<base::SingleThreadTaskRunner> compositor_thread) | |
97 : frame_(frame), | 99 : frame_(frame), |
98 network_state_(WebMediaPlayer::NetworkStateEmpty), | 100 network_state_(WebMediaPlayer::NetworkStateEmpty), |
99 ready_state_(WebMediaPlayer::ReadyStateHaveNothing), | 101 ready_state_(WebMediaPlayer::ReadyStateHaveNothing), |
100 buffered_(static_cast<size_t>(0)), | 102 buffered_(static_cast<size_t>(0)), |
101 volume_(1.0f), | 103 volume_(1.0f), |
102 client_(client), | 104 client_(client), |
103 delegate_(delegate), | 105 delegate_(delegate), |
104 paused_(true), | 106 paused_(true), |
105 current_frame_used_(false), | |
106 video_frame_provider_client_(NULL), | |
107 received_first_frame_(false), | 107 received_first_frame_(false), |
108 total_frame_count_(0), | |
109 dropped_frame_count_(0), | |
110 media_log_(media_log), | 108 media_log_(media_log), |
111 renderer_factory_(factory.Pass()) { | 109 renderer_factory_(factory.Pass()), |
110 compositor_(new Compositor(compositor_thread)), | |
111 compositor_thread_(compositor_thread) { | |
112 DVLOG(1) << "WebMediaPlayerMS::ctor"; | 112 DVLOG(1) << "WebMediaPlayerMS::ctor"; |
113 media_log_->AddEvent( | 113 media_log_->AddEvent( |
114 media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_CREATED)); | 114 media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_CREATED)); |
115 } | 115 } |
116 | 116 |
117 WebMediaPlayerMS::~WebMediaPlayerMS() { | 117 WebMediaPlayerMS::~WebMediaPlayerMS() { |
118 DVLOG(1) << "WebMediaPlayerMS::dtor"; | 118 DVLOG(1) << "WebMediaPlayerMS::dtor"; |
119 DCHECK(thread_checker_.CalledOnValidThread()); | 119 DCHECK(thread_checker_.CalledOnValidThread()); |
120 | 120 |
121 SetVideoFrameProviderClient(NULL); | 121 compositor_thread_->DeleteSoon(FROM_HERE, compositor_); |
DaleCurtis
2015/08/14 17:07:56
I don't think this is quite right, you need to ens
qiangchen
2015/08/14 18:16:39
Compositor class does not have a pointer to WMPMS,
| |
122 | |
122 GetClient()->setWebLayer(NULL); | 123 GetClient()->setWebLayer(NULL); |
123 | 124 |
124 if (video_frame_provider_.get()) | 125 if (video_frame_provider_.get()) |
125 video_frame_provider_->Stop(); | 126 video_frame_provider_->Stop(); |
126 | 127 |
127 if (audio_renderer_.get()) | 128 if (audio_renderer_.get()) |
128 audio_renderer_->Stop(); | 129 audio_renderer_->Stop(); |
129 | 130 |
130 media_log_->AddEvent( | 131 media_log_->AddEvent( |
131 media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_DESTROYED)); | 132 media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_DESTROYED)); |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
180 } | 181 } |
181 | 182 |
182 void WebMediaPlayerMS::play() { | 183 void WebMediaPlayerMS::play() { |
183 DVLOG(1) << "WebMediaPlayerMS::play"; | 184 DVLOG(1) << "WebMediaPlayerMS::play"; |
184 DCHECK(thread_checker_.CalledOnValidThread()); | 185 DCHECK(thread_checker_.CalledOnValidThread()); |
185 | 186 |
186 if (paused_) { | 187 if (paused_) { |
187 if (video_frame_provider_.get()) | 188 if (video_frame_provider_.get()) |
188 video_frame_provider_->Play(); | 189 video_frame_provider_->Play(); |
189 | 190 |
191 | |
192 compositor_thread_->PostTask( | |
193 FROM_HERE, base::Bind(&WebMediaPlayerMS::Compositor::StartRendering, | |
194 compositor_)); | |
195 | |
196 | |
190 if (audio_renderer_.get()) | 197 if (audio_renderer_.get()) |
191 audio_renderer_->Play(); | 198 audio_renderer_->Play(); |
192 | 199 |
193 if (delegate_.get()) | 200 if (delegate_.get()) |
194 delegate_->DidPlay(this); | 201 delegate_->DidPlay(this); |
195 } | 202 } |
196 | 203 |
197 paused_ = false; | 204 paused_ = false; |
198 | 205 |
199 media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PLAY)); | 206 media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PLAY)); |
200 } | 207 } |
201 | 208 |
202 void WebMediaPlayerMS::pause() { | 209 void WebMediaPlayerMS::pause() { |
203 DVLOG(1) << "WebMediaPlayerMS::pause"; | 210 DVLOG(1) << "WebMediaPlayerMS::pause"; |
204 DCHECK(thread_checker_.CalledOnValidThread()); | 211 DCHECK(thread_checker_.CalledOnValidThread()); |
205 | 212 |
206 if (video_frame_provider_.get()) | 213 if (video_frame_provider_.get()) |
207 video_frame_provider_->Pause(); | 214 video_frame_provider_->Pause(); |
208 | 215 |
216 compositor_thread_->PostTask( | |
217 FROM_HERE, base::Bind(&WebMediaPlayerMS::Compositor::StopRendering, | |
218 compositor_, &video_renderer_)); | |
219 | |
220 | |
209 if (!paused_) { | 221 if (!paused_) { |
210 if (audio_renderer_.get()) | 222 if (audio_renderer_.get()) |
211 audio_renderer_->Pause(); | 223 audio_renderer_->Pause(); |
212 | 224 |
213 if (delegate_.get()) | 225 if (delegate_.get()) |
214 delegate_->DidPause(this); | 226 delegate_->DidPause(this); |
215 } | 227 } |
216 | 228 |
217 paused_ = true; | 229 paused_ = true; |
218 | 230 |
219 media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PAUSE)); | 231 media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PAUSE)); |
220 | |
221 if (!current_frame_.get()) | |
222 return; | |
223 | |
224 // Copy the frame so that rendering can show the last received frame. | |
225 // The original frame must not be referenced when the player is paused since | |
226 // there might be a finite number of available buffers. E.g, video that | |
227 // originates from a video camera. | |
228 scoped_refptr<media::VideoFrame> new_frame = | |
229 CopyFrameToYV12(current_frame_, &video_renderer_); | |
230 | |
231 base::AutoLock auto_lock(current_frame_lock_); | |
232 current_frame_ = new_frame; | |
233 } | 232 } |
234 | 233 |
235 bool WebMediaPlayerMS::supportsSave() const { | 234 bool WebMediaPlayerMS::supportsSave() const { |
236 DCHECK(thread_checker_.CalledOnValidThread()); | 235 DCHECK(thread_checker_.CalledOnValidThread()); |
237 return false; | 236 return false; |
238 } | 237 } |
239 | 238 |
240 void WebMediaPlayerMS::seek(double seconds) { | 239 void WebMediaPlayerMS::seek(double seconds) { |
241 DCHECK(thread_checker_.CalledOnValidThread()); | 240 DCHECK(thread_checker_.CalledOnValidThread()); |
242 } | 241 } |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
280 } | 279 } |
281 | 280 |
282 bool WebMediaPlayerMS::hasAudio() const { | 281 bool WebMediaPlayerMS::hasAudio() const { |
283 DCHECK(thread_checker_.CalledOnValidThread()); | 282 DCHECK(thread_checker_.CalledOnValidThread()); |
284 return (audio_renderer_.get() != NULL); | 283 return (audio_renderer_.get() != NULL); |
285 } | 284 } |
286 | 285 |
287 blink::WebSize WebMediaPlayerMS::naturalSize() const { | 286 blink::WebSize WebMediaPlayerMS::naturalSize() const { |
288 DCHECK(thread_checker_.CalledOnValidThread()); | 287 DCHECK(thread_checker_.CalledOnValidThread()); |
289 | 288 |
290 gfx::Size size; | 289 gfx::Size size = compositor_->CurrentFrameSize(); |
291 if (current_frame_.get()) | 290 |
292 size = current_frame_->natural_size(); | |
293 DVLOG(3) << "WebMediaPlayerMS::naturalSize, " << size.ToString(); | 291 DVLOG(3) << "WebMediaPlayerMS::naturalSize, " << size.ToString(); |
294 return blink::WebSize(size); | 292 return blink::WebSize(size); |
295 } | 293 } |
296 | 294 |
297 bool WebMediaPlayerMS::paused() const { | 295 bool WebMediaPlayerMS::paused() const { |
298 DCHECK(thread_checker_.CalledOnValidThread()); | 296 DCHECK(thread_checker_.CalledOnValidThread()); |
299 return paused_; | 297 return paused_; |
300 } | 298 } |
301 | 299 |
302 bool WebMediaPlayerMS::seeking() const { | 300 bool WebMediaPlayerMS::seeking() const { |
303 DCHECK(thread_checker_.CalledOnValidThread()); | 301 DCHECK(thread_checker_.CalledOnValidThread()); |
304 return false; | 302 return false; |
305 } | 303 } |
306 | 304 |
307 double WebMediaPlayerMS::duration() const { | 305 double WebMediaPlayerMS::duration() const { |
308 DCHECK(thread_checker_.CalledOnValidThread()); | 306 DCHECK(thread_checker_.CalledOnValidThread()); |
309 return std::numeric_limits<double>::infinity(); | 307 return std::numeric_limits<double>::infinity(); |
310 } | 308 } |
311 | 309 |
312 double WebMediaPlayerMS::currentTime() const { | 310 double WebMediaPlayerMS::currentTime() const { |
313 DCHECK(thread_checker_.CalledOnValidThread()); | 311 DCHECK(thread_checker_.CalledOnValidThread()); |
314 if (current_time_.ToInternalValue() != 0) { | 312 base::TimeDelta current_time = compositor_->CurrentFrameTimestamp(); |
315 return current_time_.InSecondsF(); | 313 if (current_time.ToInternalValue() != 0) { |
314 return current_time.InSecondsF(); | |
316 } else if (audio_renderer_.get()) { | 315 } else if (audio_renderer_.get()) { |
317 return audio_renderer_->GetCurrentRenderTime().InSecondsF(); | 316 return audio_renderer_->GetCurrentRenderTime().InSecondsF(); |
318 } | 317 } |
319 return 0.0; | 318 return 0.0; |
320 } | 319 } |
321 | 320 |
322 WebMediaPlayer::NetworkState WebMediaPlayerMS::networkState() const { | 321 WebMediaPlayer::NetworkState WebMediaPlayerMS::networkState() const { |
323 DCHECK(thread_checker_.CalledOnValidThread()); | 322 DCHECK(thread_checker_.CalledOnValidThread()); |
324 DVLOG(1) << "WebMediaPlayerMS::networkState, state:" << network_state_; | 323 DVLOG(1) << "WebMediaPlayerMS::networkState, state:" << network_state_; |
325 return network_state_; | 324 return network_state_; |
(...skipping 20 matching lines...) Expand all Loading... | |
346 return true; | 345 return true; |
347 } | 346 } |
348 | 347 |
349 void WebMediaPlayerMS::paint(blink::WebCanvas* canvas, | 348 void WebMediaPlayerMS::paint(blink::WebCanvas* canvas, |
350 const blink::WebRect& rect, | 349 const blink::WebRect& rect, |
351 unsigned char alpha, | 350 unsigned char alpha, |
352 SkXfermode::Mode mode) { | 351 SkXfermode::Mode mode) { |
353 DVLOG(3) << "WebMediaPlayerMS::paint"; | 352 DVLOG(3) << "WebMediaPlayerMS::paint"; |
354 DCHECK(thread_checker_.CalledOnValidThread()); | 353 DCHECK(thread_checker_.CalledOnValidThread()); |
355 | 354 |
355 auto frame = compositor_->GetCurrentFrame(); | |
356 | |
356 media::Context3D context_3d; | 357 media::Context3D context_3d; |
357 if (current_frame_.get() && current_frame_->HasTextures()) { | 358 if (frame.get() && frame->HasTextures()) { |
358 cc::ContextProvider* provider = | 359 cc::ContextProvider* provider = |
359 RenderThreadImpl::current()->SharedMainThreadContextProvider().get(); | 360 RenderThreadImpl::current()->SharedMainThreadContextProvider().get(); |
360 // GPU Process crashed. | 361 // GPU Process crashed. |
361 if (!provider) | 362 if (!provider) |
362 return; | 363 return; |
363 context_3d = media::Context3D(provider->ContextGL(), provider->GrContext()); | 364 context_3d = media::Context3D(provider->ContextGL(), provider->GrContext()); |
364 DCHECK(context_3d.gl); | 365 DCHECK(context_3d.gl); |
365 } | 366 } |
366 gfx::RectF dest_rect(rect.x, rect.y, rect.width, rect.height); | 367 gfx::RectF dest_rect(rect.x, rect.y, rect.width, rect.height); |
367 video_renderer_.Paint(current_frame_, canvas, dest_rect, alpha, mode, | 368 video_renderer_.Paint(frame, canvas, dest_rect, alpha, mode, |
368 media::VIDEO_ROTATION_0, context_3d); | 369 media::VIDEO_ROTATION_0, context_3d); |
369 | |
370 { | |
371 base::AutoLock auto_lock(current_frame_lock_); | |
372 if (current_frame_.get()) | |
373 current_frame_used_ = true; | |
374 } | |
375 } | 370 } |
376 | 371 |
377 bool WebMediaPlayerMS::hasSingleSecurityOrigin() const { | 372 bool WebMediaPlayerMS::hasSingleSecurityOrigin() const { |
378 DCHECK(thread_checker_.CalledOnValidThread()); | 373 DCHECK(thread_checker_.CalledOnValidThread()); |
379 return true; | 374 return true; |
380 } | 375 } |
381 | 376 |
382 bool WebMediaPlayerMS::didPassCORSAccessCheck() const { | 377 bool WebMediaPlayerMS::didPassCORSAccessCheck() const { |
383 DCHECK(thread_checker_.CalledOnValidThread()); | 378 DCHECK(thread_checker_.CalledOnValidThread()); |
384 return true; | 379 return true; |
385 } | 380 } |
386 | 381 |
387 double WebMediaPlayerMS::mediaTimeForTimeValue(double timeValue) const { | 382 double WebMediaPlayerMS::mediaTimeForTimeValue(double timeValue) const { |
388 return media::ConvertSecondsToTimestamp(timeValue).InSecondsF(); | 383 return media::ConvertSecondsToTimestamp(timeValue).InSecondsF(); |
389 } | 384 } |
390 | 385 |
391 unsigned WebMediaPlayerMS::decodedFrameCount() const { | 386 unsigned WebMediaPlayerMS::decodedFrameCount() const { |
392 DCHECK(thread_checker_.CalledOnValidThread()); | 387 DCHECK(thread_checker_.CalledOnValidThread()); |
393 DVLOG(1) << "WebMediaPlayerMS::decodedFrameCount, " << total_frame_count_; | 388 unsigned total_frame_count = compositor_->TotalFrameCount(); |
394 return total_frame_count_; | 389 DVLOG(1) << "WebMediaPlayerMS::decodedFrameCount, " << total_frame_count; |
390 return total_frame_count; | |
395 } | 391 } |
396 | 392 |
397 unsigned WebMediaPlayerMS::droppedFrameCount() const { | 393 unsigned WebMediaPlayerMS::droppedFrameCount() const { |
398 DCHECK(thread_checker_.CalledOnValidThread()); | 394 DCHECK(thread_checker_.CalledOnValidThread()); |
399 DVLOG(1) << "WebMediaPlayerMS::droppedFrameCount, " << dropped_frame_count_; | 395 unsigned dropped_frame_count = compositor_->DroppedFrameCount(); |
400 return dropped_frame_count_; | 396 DVLOG(1) << "WebMediaPlayerMS::droppedFrameCount, " << dropped_frame_count; |
397 return dropped_frame_count; | |
401 } | 398 } |
402 | 399 |
403 unsigned WebMediaPlayerMS::audioDecodedByteCount() const { | 400 unsigned WebMediaPlayerMS::audioDecodedByteCount() const { |
404 DCHECK(thread_checker_.CalledOnValidThread()); | 401 DCHECK(thread_checker_.CalledOnValidThread()); |
405 NOTIMPLEMENTED(); | 402 NOTIMPLEMENTED(); |
406 return 0; | 403 return 0; |
407 } | 404 } |
408 | 405 |
409 unsigned WebMediaPlayerMS::videoDecodedByteCount() const { | 406 unsigned WebMediaPlayerMS::videoDecodedByteCount() const { |
410 DCHECK(thread_checker_.CalledOnValidThread()); | 407 DCHECK(thread_checker_.CalledOnValidThread()); |
411 NOTIMPLEMENTED(); | 408 NOTIMPLEMENTED(); |
412 return 0; | 409 return 0; |
413 } | 410 } |
414 | 411 |
415 bool WebMediaPlayerMS::copyVideoTextureToPlatformTexture( | 412 bool WebMediaPlayerMS::copyVideoTextureToPlatformTexture( |
416 blink::WebGraphicsContext3D* web_graphics_context, | 413 blink::WebGraphicsContext3D* web_graphics_context, |
417 unsigned int texture, | 414 unsigned int texture, |
418 unsigned int internal_format, | 415 unsigned int internal_format, |
419 unsigned int type, | 416 unsigned int type, |
420 bool premultiply_alpha, | 417 bool premultiply_alpha, |
421 bool flip_y) { | 418 bool flip_y) { |
422 TRACE_EVENT0("media", "WebMediaPlayerMS:copyVideoTextureToPlatformTexture"); | 419 TRACE_EVENT0("media", "WebMediaPlayerMS:copyVideoTextureToPlatformTexture"); |
423 DCHECK(thread_checker_.CalledOnValidThread()); | 420 DCHECK(thread_checker_.CalledOnValidThread()); |
424 | 421 |
425 scoped_refptr<media::VideoFrame> video_frame; | 422 scoped_refptr<media::VideoFrame> video_frame = compositor_->CurrentFrame(); |
426 { | |
427 base::AutoLock auto_lock(current_frame_lock_); | |
428 video_frame = current_frame_; | |
429 } | |
430 | 423 |
431 if (!video_frame.get() || video_frame->HasTextures() || | 424 if (!video_frame.get() || video_frame->HasTextures() || |
432 media::VideoFrame::NumPlanes(video_frame->format()) != 1) { | 425 media::VideoFrame::NumPlanes(video_frame->format()) != 1) { |
433 return false; | 426 return false; |
434 } | 427 } |
435 | 428 |
436 // TODO(dshwang): need more elegant way to convert WebGraphicsContext3D to | 429 // TODO(dshwang): need more elegant way to convert WebGraphicsContext3D to |
437 // GLES2Interface. | 430 // GLES2Interface. |
438 gpu::gles2::GLES2Interface* gl = | 431 gpu::gles2::GLES2Interface* gl = |
439 static_cast<gpu_blink::WebGraphicsContext3DImpl*>(web_graphics_context) | 432 static_cast<gpu_blink::WebGraphicsContext3DImpl*>(web_graphics_context) |
440 ->GetGLInterface(); | 433 ->GetGLInterface(); |
441 media::SkCanvasVideoRenderer::CopyVideoFrameSingleTextureToGLTexture( | 434 media::SkCanvasVideoRenderer::CopyVideoFrameSingleTextureToGLTexture( |
442 gl, video_frame.get(), texture, internal_format, type, premultiply_alpha, | 435 gl, video_frame.get(), texture, internal_format, type, premultiply_alpha, |
443 flip_y); | 436 flip_y); |
444 return true; | 437 return true; |
445 } | 438 } |
446 | 439 |
447 void WebMediaPlayerMS::SetVideoFrameProviderClient( | |
448 cc::VideoFrameProvider::Client* client) { | |
449 // This is called from both the main renderer thread and the compositor | |
450 // thread (when the main thread is blocked). | |
451 if (video_frame_provider_client_) | |
452 video_frame_provider_client_->StopUsingProvider(); | |
453 video_frame_provider_client_ = client; | |
454 } | |
455 | |
456 bool WebMediaPlayerMS::UpdateCurrentFrame(base::TimeTicks deadline_min, | |
457 base::TimeTicks deadline_max) { | |
458 // TODO(dalecurtis): This should make use of the deadline interval to ensure | |
459 // the painted frame is correct for the given interval. | |
460 NOTREACHED(); | |
461 return false; | |
462 } | |
463 | |
464 bool WebMediaPlayerMS::HasCurrentFrame() { | |
465 base::AutoLock auto_lock(current_frame_lock_); | |
466 return current_frame_; | |
467 } | |
468 | |
469 scoped_refptr<media::VideoFrame> WebMediaPlayerMS::GetCurrentFrame() { | |
470 DVLOG(3) << "WebMediaPlayerMS::GetCurrentFrame"; | |
471 base::AutoLock auto_lock(current_frame_lock_); | |
472 if (!current_frame_.get()) | |
473 return NULL; | |
474 current_frame_used_ = true; | |
475 return current_frame_; | |
476 } | |
477 | |
478 void WebMediaPlayerMS::PutCurrentFrame() { | |
479 DVLOG(3) << "WebMediaPlayerMS::PutCurrentFrame"; | |
480 } | |
481 | |
482 void WebMediaPlayerMS::OnFrameAvailable( | 440 void WebMediaPlayerMS::OnFrameAvailable( |
483 const scoped_refptr<media::VideoFrame>& frame) { | 441 const scoped_refptr<media::VideoFrame>& frame) { |
484 DVLOG(3) << "WebMediaPlayerMS::OnFrameAvailable"; | 442 DVLOG(3) << "WebMediaPlayerMS::OnFrameAvailable"; |
485 DCHECK(thread_checker_.CalledOnValidThread()); | 443 DCHECK(thread_checker_.CalledOnValidThread()); |
486 ++total_frame_count_; | 444 |
445 base::TimeTicks render_time; | |
446 if (!frame->metadata()->GetTimeTicks( | |
447 media::VideoFrameMetadata::REFERENCE_TIME, &render_time)) { | |
448 render_time = base::TimeTicks(); | |
449 } | |
450 TRACE_EVENT1("webrtc", "WebMediaPlayerMS::OnFrameAvailable", | |
451 "Ideal Render Instant", render_time.ToInternalValue()); | |
452 | |
487 if (!received_first_frame_) { | 453 if (!received_first_frame_) { |
488 received_first_frame_ = true; | 454 received_first_frame_ = true; |
489 { | |
490 base::AutoLock auto_lock(current_frame_lock_); | |
491 DCHECK(!current_frame_used_); | |
492 current_frame_ = frame; | |
493 } | |
494 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata); | 455 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata); |
495 SetReadyState(WebMediaPlayer::ReadyStateHaveEnoughData); | 456 SetReadyState(WebMediaPlayer::ReadyStateHaveEnoughData); |
496 GetClient()->sizeChanged(); | |
497 | 457 |
498 if (video_frame_provider_.get()) { | 458 if (video_frame_provider_.get()) { |
499 video_weblayer_.reset(new cc_blink::WebLayerImpl( | 459 video_weblayer_.reset(new cc_blink::WebLayerImpl( |
500 cc::VideoLayer::Create(cc_blink::WebLayerImpl::LayerSettings(), this, | 460 cc::VideoLayer::Create(cc_blink::WebLayerImpl::LayerSettings(), |
461 compositor_, | |
501 media::VIDEO_ROTATION_0))); | 462 media::VIDEO_ROTATION_0))); |
502 video_weblayer_->setOpaque(true); | 463 video_weblayer_->setOpaque(true); |
503 GetClient()->setWebLayer(video_weblayer_.get()); | 464 GetClient()->setWebLayer(video_weblayer_.get()); |
504 } | 465 } |
505 } | 466 } |
506 | 467 |
507 // Do not update |current_frame_| when paused. | 468 bool size_changed = !compositor_->HasCurrentFrame() || |
508 if (paused_) | 469 compositor_->CurrentFrameSize() != frame->natural_size(); |
509 return; | |
510 | 470 |
511 bool size_changed = !current_frame_.get() || | 471 compositor_->EnqueueFrame(frame); |
512 current_frame_->natural_size() != frame->natural_size(); | |
513 | |
514 { | |
515 base::AutoLock auto_lock(current_frame_lock_); | |
516 if (!current_frame_used_ && current_frame_.get()) | |
517 ++dropped_frame_count_; | |
518 current_frame_ = frame; | |
519 current_time_ = frame->timestamp(); | |
520 current_frame_used_ = false; | |
521 } | |
522 | 472 |
523 if (size_changed) | 473 if (size_changed) |
524 GetClient()->sizeChanged(); | 474 GetClient()->sizeChanged(); |
525 | |
526 GetClient()->repaint(); | |
527 } | 475 } |
528 | 476 |
529 void WebMediaPlayerMS::RepaintInternal() { | 477 void WebMediaPlayerMS::RepaintInternal() { |
530 DVLOG(1) << "WebMediaPlayerMS::RepaintInternal"; | 478 DVLOG(1) << "WebMediaPlayerMS::RepaintInternal"; |
531 DCHECK(thread_checker_.CalledOnValidThread()); | 479 DCHECK(thread_checker_.CalledOnValidThread()); |
532 GetClient()->repaint(); | 480 GetClient()->repaint(); |
533 } | 481 } |
534 | 482 |
535 void WebMediaPlayerMS::OnSourceError() { | 483 void WebMediaPlayerMS::OnSourceError() { |
536 DVLOG(1) << "WebMediaPlayerMS::OnSourceError"; | 484 DVLOG(1) << "WebMediaPlayerMS::OnSourceError"; |
(...skipping 15 matching lines...) Expand all Loading... | |
552 // Always notify to ensure client has the latest value. | 500 // Always notify to ensure client has the latest value. |
553 GetClient()->readyStateChanged(); | 501 GetClient()->readyStateChanged(); |
554 } | 502 } |
555 | 503 |
556 blink::WebMediaPlayerClient* WebMediaPlayerMS::GetClient() { | 504 blink::WebMediaPlayerClient* WebMediaPlayerMS::GetClient() { |
557 DCHECK(thread_checker_.CalledOnValidThread()); | 505 DCHECK(thread_checker_.CalledOnValidThread()); |
558 DCHECK(client_); | 506 DCHECK(client_); |
559 return client_; | 507 return client_; |
560 } | 508 } |
561 | 509 |
510 WebMediaPlayerMS::Compositor::Compositor( | |
511 scoped_refptr<base::SingleThreadTaskRunner> compositor_thread) | |
512 : compositor_thread_(compositor_thread), | |
513 video_frame_provider_client_(NULL), | |
514 current_frame_used_(false), | |
515 total_frame_count_(0), | |
516 dropped_frame_count_(0), | |
517 paused_(false) {} | |
518 | |
519 WebMediaPlayerMS::Compositor::~Compositor() { | |
520 DCHECK(compositor_thread_->BelongsToCurrentThread()); | |
521 if (video_frame_provider_client_) | |
522 video_frame_provider_client_->StopUsingProvider(); | |
523 } | |
524 | |
525 void WebMediaPlayerMS::Compositor::EnqueueFrame( | |
526 scoped_refptr<media::VideoFrame> frame) { | |
527 base::AutoLock auto_lock(current_frame_lock_); | |
528 ++total_frame_count_; | |
529 if (!current_frame_used_) ++dropped_frame_count_; | |
530 | |
531 // When paused, do not update current_frame; | |
532 // TODO(qiangchen): After applying VRA, this check should move to | |
533 // UpdateCurrentFrame. During pause, we have to let frames go through VRA and | |
534 // then be thrown away, otherwise VRA will think there is a very long frame. | |
535 if (paused_) return; | |
536 | |
537 if (base::TimeTicks::Now() > last_deadline_max_) { | |
538 // TODO(qiangchen): This shows vsyncs stops rendering frames. A probable | |
539 // cause is that the tab is not in the front. But we still have to let | |
540 // old frames go. Call VRA::RemoveExpiredFrames. | |
541 } | |
542 | |
543 // TODO(qiangchen): Instead of using one variable to hold one frame, use | |
544 // VideoRendererAlgorithm. | |
545 current_frame_ = frame; | |
546 current_frame_used_ = false; | |
547 current_time_ = frame->timestamp(); | |
548 } | |
549 | |
550 bool WebMediaPlayerMS::Compositor::UpdateCurrentFrame( | |
551 base::TimeTicks deadline_min, | |
552 base::TimeTicks deadline_max) { | |
553 DCHECK(compositor_thread_->BelongsToCurrentThread()); | |
554 base::AutoLock auto_lock(current_frame_lock_); | |
555 TRACE_EVENT_BEGIN2("webrtc", "WebMediaPlayerMS::UpdateCurrentFrame", | |
556 "Actual Render Begin", deadline_min.ToInternalValue(), | |
557 "Actual Render End", deadline_max.ToInternalValue()); | |
558 last_deadline_max_ = deadline_max; | |
559 | |
560 // TODO(dalecurtis): This should make use of the deadline interval to ensure | |
561 // the painted frame is correct for the given interval. | |
562 | |
563 base::TimeTicks render_time; | |
564 if (!current_frame_->metadata()->GetTimeTicks( | |
565 media::VideoFrameMetadata::REFERENCE_TIME, &render_time)) { | |
566 render_time = base::TimeTicks(); | |
567 } | |
568 TRACE_EVENT_END1("webrtc", "WebMediaPlayerMS::UpdateCurrentFrame", | |
569 "Ideal Render Instant", render_time.ToInternalValue()); | |
570 return !current_frame_used_; | |
571 } | |
572 | |
573 bool WebMediaPlayerMS::Compositor::HasCurrentFrame() { | |
574 base::AutoLock auto_lock(current_frame_lock_); | |
575 return current_frame_; | |
576 } | |
577 | |
578 scoped_refptr<media::VideoFrame> | |
579 WebMediaPlayerMS::Compositor::GetCurrentFrame() { | |
580 DVLOG(3) << "WebMediaPlayerMS::Compositor::GetCurrentFrame"; | |
581 base::AutoLock auto_lock(current_frame_lock_); | |
582 if (!current_frame_.get()) | |
583 return NULL; | |
584 current_frame_used_ = true; | |
DaleCurtis
2015/08/14 18:31:24
Technically you don't know this until PutCurrentFr
qiangchen
2015/08/14 20:09:42
Did you mean to modify current_frame_used_ in PutC
DaleCurtis
2015/08/14 20:47:28
Actually I misremembered the issue. It's more that
qiangchen
2015/08/14 21:53:48
For WMPMS, it is just used to keep a statistics on
| |
585 return current_frame_; | |
586 } | |
587 | |
588 void WebMediaPlayerMS::Compositor::PutCurrentFrame() { | |
589 DVLOG(3) << "WebMediaPlayerMS::PutCurrentFrame"; | |
590 } | |
591 | |
DaleCurtis
2015/08/14 18:31:24
Extra space.
qiangchen
2015/08/14 20:09:42
Done.
| |
592 | |
593 void WebMediaPlayerMS::Compositor::SetVideoFrameProviderClient( | |
594 cc::VideoFrameProvider::Client* client) { | |
595 DCHECK(compositor_thread_->BelongsToCurrentThread()); | |
596 if (video_frame_provider_client_) | |
597 video_frame_provider_client_->StopUsingProvider(); | |
598 | |
599 video_frame_provider_client_ = client; | |
600 if (video_frame_provider_client_) | |
601 video_frame_provider_client_->StartRendering(); | |
602 } | |
603 | |
604 void WebMediaPlayerMS::Compositor::StartRendering(Compositor* compositor) { | |
605 DCHECK(compositor->compositor_thread_->BelongsToCurrentThread()); | |
606 compositor->paused_ = false; | |
607 if (compositor->video_frame_provider_client_) | |
608 compositor->video_frame_provider_client_->StartRendering(); | |
609 } | |
610 | |
611 void WebMediaPlayerMS::Compositor::StopRendering( | |
612 Compositor* compositor, | |
613 media::SkCanvasVideoRenderer* video_renderer) { | |
614 DCHECK(compositor->compositor_thread_->BelongsToCurrentThread()); | |
615 compositor->paused_ = true; | |
616 if (compositor->video_frame_provider_client_) | |
617 compositor->video_frame_provider_client_->StopRendering(); | |
618 | |
619 base::AutoLock auto_lock(compositor->current_frame_lock_); | |
620 if (!compositor->current_frame_.get()) | |
621 return; | |
622 | |
623 // Copy the frame so that rendering can show the last received frame. | |
624 // The original frame must not be referenced when the player is paused since | |
625 // there might be a finite number of available buffers. E.g, video that | |
626 // originates from a video camera. | |
627 scoped_refptr<media::VideoFrame> new_frame = | |
DaleCurtis
2015/08/14 18:31:24
Hmm, how is this going to work with GpuMemoryBuffe
qiangchen
2015/08/14 20:09:42
Hmm, that's kind of out of my scope right now. As
| |
628 CopyFrameToYV12(compositor->current_frame_, video_renderer); | |
629 | |
630 compositor->current_frame_ = new_frame; | |
631 } | |
632 | |
633 gfx::Size WebMediaPlayerMS::Compositor::CurrentFrameSize() { | |
634 base::AutoLock auto_lock(current_frame_lock_); | |
635 if (current_frame_.get()) | |
636 return current_frame_->natural_size(); | |
637 else | |
DaleCurtis
2015/08/14 18:31:24
Don't use else w/ return.
qiangchen
2015/08/14 20:09:42
Done.
| |
638 return gfx::Size(); | |
639 } | |
640 | |
641 base::TimeDelta WebMediaPlayerMS::Compositor::CurrentFrameTimestamp() { | |
642 return current_time_; | |
643 } | |
644 | |
645 unsigned WebMediaPlayerMS::Compositor::TotalFrameCount() { | |
646 return total_frame_count_; | |
647 } | |
648 | |
649 unsigned WebMediaPlayerMS::Compositor::DroppedFrameCount() { | |
650 return dropped_frame_count_; | |
651 } | |
652 | |
653 scoped_refptr<media::VideoFrame> WebMediaPlayerMS::Compositor::CurrentFrame() { | |
654 base::AutoLock auto_lock(current_frame_lock_); | |
655 if (!current_frame_.get()) | |
DaleCurtis
2015/08/14 18:31:24
This is just return current_frame_ ?
qiangchen
2015/08/14 20:09:42
If we set current_frame_used_ in PutCurrentFrame,
| |
656 return NULL; | |
657 return current_frame_; | |
658 } | |
659 | |
562 } // namespace content | 660 } // namespace content |
OLD | NEW |