| OLD | NEW |
| 1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/renderer/media/webmediaplayer_ms.h" | 5 #include "content/renderer/media/webmediaplayer_ms.h" |
| 6 | 6 |
| 7 #include <limits> | 7 #include <limits> |
| 8 | 8 |
| 9 #include "base/bind.h" | 9 #include "base/bind.h" |
| 10 #include "base/callback.h" | 10 #include "base/callback.h" |
| 11 #include "base/message_loop/message_loop.h" | 11 #include "base/message_loop/message_loop.h" |
| 12 #include "base/metrics/histogram.h" | 12 #include "base/metrics/histogram.h" |
| 13 #include "cc/blink/context_provider_web_context.h" | 13 #include "cc/blink/context_provider_web_context.h" |
| 14 #include "cc/blink/web_layer_impl.h" | 14 #include "cc/blink/web_layer_impl.h" |
| 15 #include "cc/layers/video_frame_provider_client_impl.h" | |
| 16 #include "cc/layers/video_layer.h" | 15 #include "cc/layers/video_layer.h" |
| 17 #include "content/public/renderer/media_stream_audio_renderer.h" | 16 #include "content/public/renderer/media_stream_audio_renderer.h" |
| 18 #include "content/public/renderer/media_stream_renderer_factory.h" | 17 #include "content/public/renderer/media_stream_renderer_factory.h" |
| 19 #include "content/public/renderer/render_view.h" | 18 #include "content/public/renderer/render_view.h" |
| 20 #include "content/public/renderer/video_frame_provider.h" | 19 #include "content/public/renderer/video_frame_provider.h" |
| 21 #include "content/renderer/render_frame_impl.h" | 20 #include "content/renderer/render_frame_impl.h" |
| 22 #include "content/renderer/render_thread_impl.h" | 21 #include "content/renderer/render_thread_impl.h" |
| 23 #include "gpu/blink/webgraphicscontext3d_impl.h" | 22 #include "gpu/blink/webgraphicscontext3d_impl.h" |
| 24 #include "media/base/media_log.h" | 23 #include "media/base/media_log.h" |
| 25 #include "media/base/video_frame.h" | 24 #include "media/base/video_frame.h" |
| (...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 86 return new_frame; | 85 return new_frame; |
| 87 } | 86 } |
| 88 | 87 |
| 89 } // anonymous namespace | 88 } // anonymous namespace |
| 90 | 89 |
| 91 WebMediaPlayerMS::WebMediaPlayerMS( | 90 WebMediaPlayerMS::WebMediaPlayerMS( |
| 92 blink::WebFrame* frame, | 91 blink::WebFrame* frame, |
| 93 blink::WebMediaPlayerClient* client, | 92 blink::WebMediaPlayerClient* client, |
| 94 base::WeakPtr<media::WebMediaPlayerDelegate> delegate, | 93 base::WeakPtr<media::WebMediaPlayerDelegate> delegate, |
| 95 media::MediaLog* media_log, | 94 media::MediaLog* media_log, |
| 96 scoped_ptr<MediaStreamRendererFactory> factory, | 95 scoped_ptr<MediaStreamRendererFactory> factory) |
| 97 const scoped_refptr<base::SingleThreadTaskRunner>& compositor_task_runner) | |
| 98 : frame_(frame), | 96 : frame_(frame), |
| 99 network_state_(WebMediaPlayer::NetworkStateEmpty), | 97 network_state_(WebMediaPlayer::NetworkStateEmpty), |
| 100 ready_state_(WebMediaPlayer::ReadyStateHaveNothing), | 98 ready_state_(WebMediaPlayer::ReadyStateHaveNothing), |
| 101 buffered_(static_cast<size_t>(0)), | 99 buffered_(static_cast<size_t>(0)), |
| 102 volume_(1.0f), | 100 volume_(1.0f), |
| 103 client_(client), | 101 client_(client), |
| 104 delegate_(delegate), | 102 delegate_(delegate), |
| 105 paused_(true), | 103 paused_(true), |
| 104 current_frame_used_(false), |
| 105 video_frame_provider_client_(NULL), |
| 106 received_first_frame_(false), | 106 received_first_frame_(false), |
| 107 total_frame_count_(0), |
| 108 dropped_frame_count_(0), |
| 107 media_log_(media_log), | 109 media_log_(media_log), |
| 108 renderer_factory_(factory.Pass()), | 110 renderer_factory_(factory.Pass()) { |
| 109 compositor_(new Compositor(compositor_task_runner)), | |
| 110 compositor_task_runner_(compositor_task_runner) { | |
| 111 DVLOG(1) << "WebMediaPlayerMS::ctor"; | 111 DVLOG(1) << "WebMediaPlayerMS::ctor"; |
| 112 media_log_->AddEvent( | 112 media_log_->AddEvent( |
| 113 media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_CREATED)); | 113 media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_CREATED)); |
| 114 } | 114 } |
| 115 | 115 |
| 116 WebMediaPlayerMS::~WebMediaPlayerMS() { | 116 WebMediaPlayerMS::~WebMediaPlayerMS() { |
| 117 DVLOG(1) << "WebMediaPlayerMS::dtor"; | 117 DVLOG(1) << "WebMediaPlayerMS::dtor"; |
| 118 DCHECK(thread_checker_.CalledOnValidThread()); | 118 DCHECK(thread_checker_.CalledOnValidThread()); |
| 119 | 119 |
| 120 compositor_task_runner_->DeleteSoon(FROM_HERE, compositor_.release()); | 120 SetVideoFrameProviderClient(NULL); |
| 121 | |
| 122 GetClient()->setWebLayer(NULL); | 121 GetClient()->setWebLayer(NULL); |
| 123 | 122 |
| 124 if (video_frame_provider_.get()) | 123 if (video_frame_provider_.get()) |
| 125 video_frame_provider_->Stop(); | 124 video_frame_provider_->Stop(); |
| 126 | 125 |
| 127 if (audio_renderer_.get()) | 126 if (audio_renderer_.get()) |
| 128 audio_renderer_->Stop(); | 127 audio_renderer_->Stop(); |
| 129 | 128 |
| 130 media_log_->AddEvent( | 129 media_log_->AddEvent( |
| 131 media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_DESTROYED)); | 130 media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_DESTROYED)); |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 180 } | 179 } |
| 181 | 180 |
| 182 void WebMediaPlayerMS::play() { | 181 void WebMediaPlayerMS::play() { |
| 183 DVLOG(1) << "WebMediaPlayerMS::play"; | 182 DVLOG(1) << "WebMediaPlayerMS::play"; |
| 184 DCHECK(thread_checker_.CalledOnValidThread()); | 183 DCHECK(thread_checker_.CalledOnValidThread()); |
| 185 | 184 |
| 186 if (paused_) { | 185 if (paused_) { |
| 187 if (video_frame_provider_.get()) | 186 if (video_frame_provider_.get()) |
| 188 video_frame_provider_->Play(); | 187 video_frame_provider_->Play(); |
| 189 | 188 |
| 190 compositor_task_runner_->PostTask( | |
| 191 FROM_HERE, base::Bind(&WebMediaPlayerMS::Compositor::StartRendering, | |
| 192 base::Unretained(compositor_.get()))); | |
| 193 | |
| 194 if (audio_renderer_.get()) | 189 if (audio_renderer_.get()) |
| 195 audio_renderer_->Play(); | 190 audio_renderer_->Play(); |
| 196 | 191 |
| 197 if (delegate_.get()) | 192 if (delegate_.get()) |
| 198 delegate_->DidPlay(this); | 193 delegate_->DidPlay(this); |
| 199 } | 194 } |
| 200 | 195 |
| 201 paused_ = false; | 196 paused_ = false; |
| 202 | 197 |
| 203 media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PLAY)); | 198 media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PLAY)); |
| 204 } | 199 } |
| 205 | 200 |
| 206 void WebMediaPlayerMS::pause() { | 201 void WebMediaPlayerMS::pause() { |
| 207 DVLOG(1) << "WebMediaPlayerMS::pause"; | 202 DVLOG(1) << "WebMediaPlayerMS::pause"; |
| 208 DCHECK(thread_checker_.CalledOnValidThread()); | 203 DCHECK(thread_checker_.CalledOnValidThread()); |
| 209 | 204 |
| 210 if (video_frame_provider_.get()) | 205 if (video_frame_provider_.get()) |
| 211 video_frame_provider_->Pause(); | 206 video_frame_provider_->Pause(); |
| 212 | 207 |
| 213 compositor_task_runner_->PostTask( | |
| 214 FROM_HERE, base::Bind(&WebMediaPlayerMS::Compositor::StopRendering, | |
| 215 base::Unretained(compositor_.get()))); | |
| 216 | |
| 217 if (!paused_) { | 208 if (!paused_) { |
| 218 if (audio_renderer_.get()) | 209 if (audio_renderer_.get()) |
| 219 audio_renderer_->Pause(); | 210 audio_renderer_->Pause(); |
| 220 | 211 |
| 221 if (delegate_.get()) | 212 if (delegate_.get()) |
| 222 delegate_->DidPause(this); | 213 delegate_->DidPause(this); |
| 223 } | 214 } |
| 224 | 215 |
| 225 paused_ = true; | 216 paused_ = true; |
| 226 | 217 |
| 227 media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PAUSE)); | 218 media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PAUSE)); |
| 219 |
| 220 if (!current_frame_.get()) |
| 221 return; |
| 222 |
| 223 // Copy the frame so that rendering can show the last received frame. |
| 224 // The original frame must not be referenced when the player is paused since |
| 225 // there might be a finite number of available buffers. E.g, video that |
| 226 // originates from a video camera. |
| 227 scoped_refptr<media::VideoFrame> new_frame = |
| 228 CopyFrameToYV12(current_frame_, &video_renderer_); |
| 229 |
| 230 base::AutoLock auto_lock(current_frame_lock_); |
| 231 current_frame_ = new_frame; |
| 228 } | 232 } |
| 229 | 233 |
| 230 bool WebMediaPlayerMS::supportsSave() const { | 234 bool WebMediaPlayerMS::supportsSave() const { |
| 231 DCHECK(thread_checker_.CalledOnValidThread()); | 235 DCHECK(thread_checker_.CalledOnValidThread()); |
| 232 return false; | 236 return false; |
| 233 } | 237 } |
| 234 | 238 |
| 235 void WebMediaPlayerMS::seek(double seconds) { | 239 void WebMediaPlayerMS::seek(double seconds) { |
| 236 DCHECK(thread_checker_.CalledOnValidThread()); | 240 DCHECK(thread_checker_.CalledOnValidThread()); |
| 237 } | 241 } |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 277 } | 281 } |
| 278 | 282 |
| 279 bool WebMediaPlayerMS::hasAudio() const { | 283 bool WebMediaPlayerMS::hasAudio() const { |
| 280 DCHECK(thread_checker_.CalledOnValidThread()); | 284 DCHECK(thread_checker_.CalledOnValidThread()); |
| 281 return (audio_renderer_.get() != NULL); | 285 return (audio_renderer_.get() != NULL); |
| 282 } | 286 } |
| 283 | 287 |
| 284 blink::WebSize WebMediaPlayerMS::naturalSize() const { | 288 blink::WebSize WebMediaPlayerMS::naturalSize() const { |
| 285 DCHECK(thread_checker_.CalledOnValidThread()); | 289 DCHECK(thread_checker_.CalledOnValidThread()); |
| 286 | 290 |
| 287 gfx::Size size = compositor_->GetCurrentSize(); | 291 gfx::Size size; |
| 288 | 292 if (current_frame_.get()) |
| 293 size = current_frame_->natural_size(); |
| 289 DVLOG(3) << "WebMediaPlayerMS::naturalSize, " << size.ToString(); | 294 DVLOG(3) << "WebMediaPlayerMS::naturalSize, " << size.ToString(); |
| 290 return blink::WebSize(size); | 295 return blink::WebSize(size); |
| 291 } | 296 } |
| 292 | 297 |
| 293 bool WebMediaPlayerMS::paused() const { | 298 bool WebMediaPlayerMS::paused() const { |
| 294 DCHECK(thread_checker_.CalledOnValidThread()); | 299 DCHECK(thread_checker_.CalledOnValidThread()); |
| 295 return paused_; | 300 return paused_; |
| 296 } | 301 } |
| 297 | 302 |
| 298 bool WebMediaPlayerMS::seeking() const { | 303 bool WebMediaPlayerMS::seeking() const { |
| 299 DCHECK(thread_checker_.CalledOnValidThread()); | 304 DCHECK(thread_checker_.CalledOnValidThread()); |
| 300 return false; | 305 return false; |
| 301 } | 306 } |
| 302 | 307 |
| 303 double WebMediaPlayerMS::duration() const { | 308 double WebMediaPlayerMS::duration() const { |
| 304 DCHECK(thread_checker_.CalledOnValidThread()); | 309 DCHECK(thread_checker_.CalledOnValidThread()); |
| 305 return std::numeric_limits<double>::infinity(); | 310 return std::numeric_limits<double>::infinity(); |
| 306 } | 311 } |
| 307 | 312 |
| 308 double WebMediaPlayerMS::currentTime() const { | 313 double WebMediaPlayerMS::currentTime() const { |
| 309 DCHECK(thread_checker_.CalledOnValidThread()); | 314 DCHECK(thread_checker_.CalledOnValidThread()); |
| 310 base::TimeDelta current_time = compositor_->GetCurrentTime(); | 315 if (current_time_.ToInternalValue() != 0) { |
| 311 if (current_time.ToInternalValue() != 0) { | 316 return current_time_.InSecondsF(); |
| 312 return current_time.InSecondsF(); | |
| 313 } else if (audio_renderer_.get()) { | 317 } else if (audio_renderer_.get()) { |
| 314 return audio_renderer_->GetCurrentRenderTime().InSecondsF(); | 318 return audio_renderer_->GetCurrentRenderTime().InSecondsF(); |
| 315 } | 319 } |
| 316 return 0.0; | 320 return 0.0; |
| 317 } | 321 } |
| 318 | 322 |
| 319 WebMediaPlayer::NetworkState WebMediaPlayerMS::networkState() const { | 323 WebMediaPlayer::NetworkState WebMediaPlayerMS::networkState() const { |
| 320 DCHECK(thread_checker_.CalledOnValidThread()); | 324 DCHECK(thread_checker_.CalledOnValidThread()); |
| 321 DVLOG(1) << "WebMediaPlayerMS::networkState, state:" << network_state_; | 325 DVLOG(1) << "WebMediaPlayerMS::networkState, state:" << network_state_; |
| 322 return network_state_; | 326 return network_state_; |
| (...skipping 20 matching lines...) Expand all Loading... |
| 343 return true; | 347 return true; |
| 344 } | 348 } |
| 345 | 349 |
| 346 void WebMediaPlayerMS::paint(blink::WebCanvas* canvas, | 350 void WebMediaPlayerMS::paint(blink::WebCanvas* canvas, |
| 347 const blink::WebRect& rect, | 351 const blink::WebRect& rect, |
| 348 unsigned char alpha, | 352 unsigned char alpha, |
| 349 SkXfermode::Mode mode) { | 353 SkXfermode::Mode mode) { |
| 350 DVLOG(3) << "WebMediaPlayerMS::paint"; | 354 DVLOG(3) << "WebMediaPlayerMS::paint"; |
| 351 DCHECK(thread_checker_.CalledOnValidThread()); | 355 DCHECK(thread_checker_.CalledOnValidThread()); |
| 352 | 356 |
| 353 scoped_refptr<media::VideoFrame> frame = compositor_->GetCurrentFrame(); | |
| 354 | |
| 355 media::Context3D context_3d; | 357 media::Context3D context_3d; |
| 356 if (frame.get() && frame->HasTextures()) { | 358 if (current_frame_.get() && current_frame_->HasTextures()) { |
| 357 cc::ContextProvider* provider = | 359 cc::ContextProvider* provider = |
| 358 RenderThreadImpl::current()->SharedMainThreadContextProvider().get(); | 360 RenderThreadImpl::current()->SharedMainThreadContextProvider().get(); |
| 359 // GPU Process crashed. | 361 // GPU Process crashed. |
| 360 if (!provider) | 362 if (!provider) |
| 361 return; | 363 return; |
| 362 context_3d = media::Context3D(provider->ContextGL(), provider->GrContext()); | 364 context_3d = media::Context3D(provider->ContextGL(), provider->GrContext()); |
| 363 DCHECK(context_3d.gl); | 365 DCHECK(context_3d.gl); |
| 364 } | 366 } |
| 365 gfx::RectF dest_rect(rect.x, rect.y, rect.width, rect.height); | 367 gfx::RectF dest_rect(rect.x, rect.y, rect.width, rect.height); |
| 366 video_renderer_.Paint(frame, canvas, dest_rect, alpha, mode, | 368 video_renderer_.Paint(current_frame_, canvas, dest_rect, alpha, mode, |
| 367 media::VIDEO_ROTATION_0, context_3d); | 369 media::VIDEO_ROTATION_0, context_3d); |
| 370 |
| 371 { |
| 372 base::AutoLock auto_lock(current_frame_lock_); |
| 373 if (current_frame_.get()) |
| 374 current_frame_used_ = true; |
| 375 } |
| 368 } | 376 } |
| 369 | 377 |
| 370 bool WebMediaPlayerMS::hasSingleSecurityOrigin() const { | 378 bool WebMediaPlayerMS::hasSingleSecurityOrigin() const { |
| 371 DCHECK(thread_checker_.CalledOnValidThread()); | 379 DCHECK(thread_checker_.CalledOnValidThread()); |
| 372 return true; | 380 return true; |
| 373 } | 381 } |
| 374 | 382 |
| 375 bool WebMediaPlayerMS::didPassCORSAccessCheck() const { | 383 bool WebMediaPlayerMS::didPassCORSAccessCheck() const { |
| 376 DCHECK(thread_checker_.CalledOnValidThread()); | 384 DCHECK(thread_checker_.CalledOnValidThread()); |
| 377 return true; | 385 return true; |
| 378 } | 386 } |
| 379 | 387 |
| 380 double WebMediaPlayerMS::mediaTimeForTimeValue(double timeValue) const { | 388 double WebMediaPlayerMS::mediaTimeForTimeValue(double timeValue) const { |
| 381 return base::TimeDelta::FromSecondsD(timeValue).InSecondsF(); | 389 return base::TimeDelta::FromSecondsD(timeValue).InSecondsF(); |
| 382 } | 390 } |
| 383 | 391 |
| 384 unsigned WebMediaPlayerMS::decodedFrameCount() const { | 392 unsigned WebMediaPlayerMS::decodedFrameCount() const { |
| 385 DCHECK(thread_checker_.CalledOnValidThread()); | 393 DCHECK(thread_checker_.CalledOnValidThread()); |
| 386 unsigned total_frame_count = compositor_->GetTotalFrameCount(); | 394 DVLOG(1) << "WebMediaPlayerMS::decodedFrameCount, " << total_frame_count_; |
| 387 DVLOG(1) << "WebMediaPlayerMS::decodedFrameCount, " << total_frame_count; | 395 return total_frame_count_; |
| 388 return total_frame_count; | |
| 389 } | 396 } |
| 390 | 397 |
| 391 unsigned WebMediaPlayerMS::droppedFrameCount() const { | 398 unsigned WebMediaPlayerMS::droppedFrameCount() const { |
| 392 DCHECK(thread_checker_.CalledOnValidThread()); | 399 DCHECK(thread_checker_.CalledOnValidThread()); |
| 393 unsigned dropped_frame_count = compositor_->GetDroppedFrameCount(); | 400 DVLOG(1) << "WebMediaPlayerMS::droppedFrameCount, " << dropped_frame_count_; |
| 394 DVLOG(1) << "WebMediaPlayerMS::droppedFrameCount, " << dropped_frame_count; | 401 return dropped_frame_count_; |
| 395 return dropped_frame_count; | |
| 396 } | 402 } |
| 397 | 403 |
| 398 unsigned WebMediaPlayerMS::audioDecodedByteCount() const { | 404 unsigned WebMediaPlayerMS::audioDecodedByteCount() const { |
| 399 DCHECK(thread_checker_.CalledOnValidThread()); | 405 DCHECK(thread_checker_.CalledOnValidThread()); |
| 400 NOTIMPLEMENTED(); | 406 NOTIMPLEMENTED(); |
| 401 return 0; | 407 return 0; |
| 402 } | 408 } |
| 403 | 409 |
| 404 unsigned WebMediaPlayerMS::videoDecodedByteCount() const { | 410 unsigned WebMediaPlayerMS::videoDecodedByteCount() const { |
| 405 DCHECK(thread_checker_.CalledOnValidThread()); | 411 DCHECK(thread_checker_.CalledOnValidThread()); |
| 406 NOTIMPLEMENTED(); | 412 NOTIMPLEMENTED(); |
| 407 return 0; | 413 return 0; |
| 408 } | 414 } |
| 409 | 415 |
| 410 bool WebMediaPlayerMS::copyVideoTextureToPlatformTexture( | 416 bool WebMediaPlayerMS::copyVideoTextureToPlatformTexture( |
| 411 blink::WebGraphicsContext3D* web_graphics_context, | 417 blink::WebGraphicsContext3D* web_graphics_context, |
| 412 unsigned int texture, | 418 unsigned int texture, |
| 413 unsigned int internal_format, | 419 unsigned int internal_format, |
| 414 unsigned int type, | 420 unsigned int type, |
| 415 bool premultiply_alpha, | 421 bool premultiply_alpha, |
| 416 bool flip_y) { | 422 bool flip_y) { |
| 417 TRACE_EVENT0("media", "WebMediaPlayerMS:copyVideoTextureToPlatformTexture"); | 423 TRACE_EVENT0("media", "WebMediaPlayerMS:copyVideoTextureToPlatformTexture"); |
| 418 DCHECK(thread_checker_.CalledOnValidThread()); | 424 DCHECK(thread_checker_.CalledOnValidThread()); |
| 419 | 425 |
| 420 scoped_refptr<media::VideoFrame> video_frame = compositor_->GetCurrentFrame(); | 426 scoped_refptr<media::VideoFrame> video_frame; |
| 427 { |
| 428 base::AutoLock auto_lock(current_frame_lock_); |
| 429 video_frame = current_frame_; |
| 430 } |
| 421 | 431 |
| 422 if (!video_frame.get() || video_frame->HasTextures() || | 432 if (!video_frame.get() || video_frame->HasTextures() || |
| 423 media::VideoFrame::NumPlanes(video_frame->format()) != 1) { | 433 media::VideoFrame::NumPlanes(video_frame->format()) != 1) { |
| 424 return false; | 434 return false; |
| 425 } | 435 } |
| 426 | 436 |
| 427 // TODO(dshwang): need more elegant way to convert WebGraphicsContext3D to | 437 // TODO(dshwang): need more elegant way to convert WebGraphicsContext3D to |
| 428 // GLES2Interface. | 438 // GLES2Interface. |
| 429 gpu::gles2::GLES2Interface* gl = | 439 gpu::gles2::GLES2Interface* gl = |
| 430 static_cast<gpu_blink::WebGraphicsContext3DImpl*>(web_graphics_context) | 440 static_cast<gpu_blink::WebGraphicsContext3DImpl*>(web_graphics_context) |
| 431 ->GetGLInterface(); | 441 ->GetGLInterface(); |
| 432 media::SkCanvasVideoRenderer::CopyVideoFrameSingleTextureToGLTexture( | 442 media::SkCanvasVideoRenderer::CopyVideoFrameSingleTextureToGLTexture( |
| 433 gl, video_frame.get(), texture, internal_format, type, premultiply_alpha, | 443 gl, video_frame.get(), texture, internal_format, type, premultiply_alpha, |
| 434 flip_y); | 444 flip_y); |
| 435 return true; | 445 return true; |
| 436 } | 446 } |
| 437 | 447 |
| 448 void WebMediaPlayerMS::SetVideoFrameProviderClient( |
| 449 cc::VideoFrameProvider::Client* client) { |
| 450 // This is called from both the main renderer thread and the compositor |
| 451 // thread (when the main thread is blocked). |
| 452 if (video_frame_provider_client_) |
| 453 video_frame_provider_client_->StopUsingProvider(); |
| 454 video_frame_provider_client_ = client; |
| 455 } |
| 456 |
| 457 bool WebMediaPlayerMS::UpdateCurrentFrame(base::TimeTicks deadline_min, |
| 458 base::TimeTicks deadline_max) { |
| 459 // TODO(dalecurtis): This should make use of the deadline interval to ensure |
| 460 // the painted frame is correct for the given interval. |
| 461 NOTREACHED(); |
| 462 return false; |
| 463 } |
| 464 |
| 465 bool WebMediaPlayerMS::HasCurrentFrame() { |
| 466 base::AutoLock auto_lock(current_frame_lock_); |
| 467 return current_frame_; |
| 468 } |
| 469 |
| 470 scoped_refptr<media::VideoFrame> WebMediaPlayerMS::GetCurrentFrame() { |
| 471 DVLOG(3) << "WebMediaPlayerMS::GetCurrentFrame"; |
| 472 base::AutoLock auto_lock(current_frame_lock_); |
| 473 if (!current_frame_.get()) |
| 474 return NULL; |
| 475 current_frame_used_ = true; |
| 476 return current_frame_; |
| 477 } |
| 478 |
| 479 void WebMediaPlayerMS::PutCurrentFrame() { |
| 480 DVLOG(3) << "WebMediaPlayerMS::PutCurrentFrame"; |
| 481 } |
| 482 |
| 438 void WebMediaPlayerMS::OnFrameAvailable( | 483 void WebMediaPlayerMS::OnFrameAvailable( |
| 439 const scoped_refptr<media::VideoFrame>& frame) { | 484 const scoped_refptr<media::VideoFrame>& frame) { |
| 440 DVLOG(3) << "WebMediaPlayerMS::OnFrameAvailable"; | 485 DVLOG(3) << "WebMediaPlayerMS::OnFrameAvailable"; |
| 441 DCHECK(thread_checker_.CalledOnValidThread()); | 486 DCHECK(thread_checker_.CalledOnValidThread()); |
| 442 | 487 ++total_frame_count_; |
| 443 base::TimeTicks render_time; | |
| 444 if (!frame->metadata()->GetTimeTicks( | |
| 445 media::VideoFrameMetadata::REFERENCE_TIME, &render_time)) { | |
| 446 render_time = base::TimeTicks(); | |
| 447 } | |
| 448 TRACE_EVENT1("webrtc", "WebMediaPlayerMS::OnFrameAvailable", | |
| 449 "Ideal Render Instant", render_time.ToInternalValue()); | |
| 450 | |
| 451 if (!received_first_frame_) { | 488 if (!received_first_frame_) { |
| 452 received_first_frame_ = true; | 489 received_first_frame_ = true; |
| 490 { |
| 491 base::AutoLock auto_lock(current_frame_lock_); |
| 492 DCHECK(!current_frame_used_); |
| 493 current_frame_ = frame; |
| 494 } |
| 453 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata); | 495 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata); |
| 454 SetReadyState(WebMediaPlayer::ReadyStateHaveEnoughData); | 496 SetReadyState(WebMediaPlayer::ReadyStateHaveEnoughData); |
| 497 GetClient()->sizeChanged(); |
| 455 | 498 |
| 456 if (video_frame_provider_.get()) { | 499 if (video_frame_provider_.get()) { |
| 457 video_weblayer_.reset(new cc_blink::WebLayerImpl( | 500 video_weblayer_.reset(new cc_blink::WebLayerImpl( |
| 458 cc::VideoLayer::Create(cc_blink::WebLayerImpl::LayerSettings(), | 501 cc::VideoLayer::Create(cc_blink::WebLayerImpl::LayerSettings(), this, |
| 459 compositor_.get(), media::VIDEO_ROTATION_0))); | 502 media::VIDEO_ROTATION_0))); |
| 460 video_weblayer_->setOpaque(true); | 503 video_weblayer_->setOpaque(true); |
| 461 GetClient()->setWebLayer(video_weblayer_.get()); | 504 GetClient()->setWebLayer(video_weblayer_.get()); |
| 462 } | 505 } |
| 463 } | 506 } |
| 464 | 507 |
| 465 bool size_changed = compositor_->GetCurrentSize() != frame->natural_size(); | 508 // Do not update |current_frame_| when paused. |
| 509 if (paused_) |
| 510 return; |
| 466 | 511 |
| 467 compositor_->EnqueueFrame(frame); | 512 bool size_changed = !current_frame_.get() || |
| 513 current_frame_->natural_size() != frame->natural_size(); |
| 514 |
| 515 { |
| 516 base::AutoLock auto_lock(current_frame_lock_); |
| 517 if (!current_frame_used_ && current_frame_.get()) |
| 518 ++dropped_frame_count_; |
| 519 current_frame_ = frame; |
| 520 current_time_ = frame->timestamp(); |
| 521 current_frame_used_ = false; |
| 522 } |
| 468 | 523 |
| 469 if (size_changed) | 524 if (size_changed) |
| 470 GetClient()->sizeChanged(); | 525 GetClient()->sizeChanged(); |
| 526 |
| 527 GetClient()->repaint(); |
| 471 } | 528 } |
| 472 | 529 |
| 473 void WebMediaPlayerMS::RepaintInternal() { | 530 void WebMediaPlayerMS::RepaintInternal() { |
| 474 DVLOG(1) << "WebMediaPlayerMS::RepaintInternal"; | 531 DVLOG(1) << "WebMediaPlayerMS::RepaintInternal"; |
| 475 DCHECK(thread_checker_.CalledOnValidThread()); | 532 DCHECK(thread_checker_.CalledOnValidThread()); |
| 476 GetClient()->repaint(); | 533 GetClient()->repaint(); |
| 477 } | 534 } |
| 478 | 535 |
| 479 void WebMediaPlayerMS::OnSourceError() { | 536 void WebMediaPlayerMS::OnSourceError() { |
| 480 DVLOG(1) << "WebMediaPlayerMS::OnSourceError"; | 537 DVLOG(1) << "WebMediaPlayerMS::OnSourceError"; |
| (...skipping 15 matching lines...) Expand all Loading... |
| 496 // Always notify to ensure client has the latest value. | 553 // Always notify to ensure client has the latest value. |
| 497 GetClient()->readyStateChanged(); | 554 GetClient()->readyStateChanged(); |
| 498 } | 555 } |
| 499 | 556 |
| 500 blink::WebMediaPlayerClient* WebMediaPlayerMS::GetClient() { | 557 blink::WebMediaPlayerClient* WebMediaPlayerMS::GetClient() { |
| 501 DCHECK(thread_checker_.CalledOnValidThread()); | 558 DCHECK(thread_checker_.CalledOnValidThread()); |
| 502 DCHECK(client_); | 559 DCHECK(client_); |
| 503 return client_; | 560 return client_; |
| 504 } | 561 } |
| 505 | 562 |
| 506 WebMediaPlayerMS::Compositor::Compositor( | |
| 507 const scoped_refptr<base::SingleThreadTaskRunner>& compositor_task_runner) | |
| 508 : compositor_task_runner_(compositor_task_runner), | |
| 509 video_frame_provider_client_(NULL), | |
| 510 current_frame_used_(false), | |
| 511 last_deadline_max_(base::TimeTicks()), | |
| 512 total_frame_count_(0), | |
| 513 dropped_frame_count_(0), | |
| 514 paused_(false) {} | |
| 515 | |
| 516 WebMediaPlayerMS::Compositor::~Compositor() { | |
| 517 DCHECK(compositor_task_runner_->BelongsToCurrentThread()); | |
| 518 if (video_frame_provider_client_) | |
| 519 video_frame_provider_client_->StopUsingProvider(); | |
| 520 } | |
| 521 | |
| 522 void WebMediaPlayerMS::Compositor::EnqueueFrame( | |
| 523 scoped_refptr<media::VideoFrame> const& frame) { | |
| 524 base::AutoLock auto_lock(current_frame_lock_); | |
| 525 ++total_frame_count_; | |
| 526 | |
| 527 if (base::TimeTicks::Now() > last_deadline_max_) { | |
| 528 // TODO(qiangchen): This shows vsyncs stops rendering frames. A probable | |
| 529 // cause is that the tab is not in the front. But we still have to let | |
| 530 // old frames go. Call VRA::RemoveExpiredFrames. | |
| 531 current_frame_ = frame; | |
| 532 } | |
| 533 | |
| 534 if (staging_frame_.get() != current_frame_.get()) { | |
| 535 // This shows the staging_frame_ nevers get updated into current_frame_, and | |
| 536 // now we are going to overwrite it. The frame should be counted as dropped. | |
| 537 ++dropped_frame_count_; | |
| 538 } | |
| 539 | |
| 540 // TODO(qiangchen): Instead of using one variable to hold one frame, use | |
| 541 // VideoRendererAlgorithm. | |
| 542 staging_frame_ = frame; | |
| 543 } | |
| 544 | |
| 545 bool WebMediaPlayerMS::Compositor::UpdateCurrentFrame( | |
| 546 base::TimeTicks deadline_min, | |
| 547 base::TimeTicks deadline_max) { | |
| 548 DCHECK(compositor_task_runner_->BelongsToCurrentThread()); | |
| 549 base::AutoLock auto_lock(current_frame_lock_); | |
| 550 TRACE_EVENT_BEGIN2("webrtc", "WebMediaPlayerMS::UpdateCurrentFrame", | |
| 551 "Actual Render Begin", deadline_min.ToInternalValue(), | |
| 552 "Actual Render End", deadline_max.ToInternalValue()); | |
| 553 last_deadline_max_ = deadline_max; | |
| 554 | |
| 555 // TODO(dalecurtis): This should make use of the deadline interval to ensure | |
| 556 // the painted frame is correct for the given interval. | |
| 557 | |
| 558 if (paused_) | |
| 559 return false; | |
| 560 | |
| 561 if (current_frame_.get() != staging_frame_.get()) { | |
| 562 if (!current_frame_used_) | |
| 563 ++dropped_frame_count_; | |
| 564 current_frame_ = staging_frame_; | |
| 565 current_frame_used_ = false; | |
| 566 } | |
| 567 | |
| 568 base::TimeTicks render_time; | |
| 569 if (!current_frame_->metadata()->GetTimeTicks( | |
| 570 media::VideoFrameMetadata::REFERENCE_TIME, &render_time)) { | |
| 571 render_time = base::TimeTicks(); | |
| 572 } | |
| 573 TRACE_EVENT_END1("webrtc", "WebMediaPlayerMS::UpdateCurrentFrame", | |
| 574 "Ideal Render Instant", render_time.ToInternalValue()); | |
| 575 return !current_frame_used_; | |
| 576 } | |
| 577 | |
| 578 bool WebMediaPlayerMS::Compositor::HasCurrentFrame() { | |
| 579 base::AutoLock auto_lock(current_frame_lock_); | |
| 580 return !!current_frame_.get(); | |
| 581 } | |
| 582 | |
| 583 scoped_refptr<media::VideoFrame> | |
| 584 WebMediaPlayerMS::Compositor::GetCurrentFrame() { | |
| 585 DVLOG(3) << "WebMediaPlayerMS::Compositor::GetCurrentFrame"; | |
| 586 base::AutoLock auto_lock(current_frame_lock_); | |
| 587 if (!current_frame_.get()) | |
| 588 return NULL; | |
| 589 return current_frame_; | |
| 590 } | |
| 591 | |
| 592 void WebMediaPlayerMS::Compositor::PutCurrentFrame() { | |
| 593 DVLOG(3) << "WebMediaPlayerMS::PutCurrentFrame"; | |
| 594 current_frame_used_ = true; | |
| 595 } | |
| 596 | |
| 597 void WebMediaPlayerMS::Compositor::SetVideoFrameProviderClient( | |
| 598 cc::VideoFrameProvider::Client* client) { | |
| 599 DCHECK(compositor_task_runner_->BelongsToCurrentThread()); | |
| 600 if (video_frame_provider_client_) | |
| 601 video_frame_provider_client_->StopUsingProvider(); | |
| 602 | |
| 603 video_frame_provider_client_ = client; | |
| 604 if (video_frame_provider_client_) | |
| 605 video_frame_provider_client_->StartRendering(); | |
| 606 } | |
| 607 | |
| 608 void WebMediaPlayerMS::Compositor::StartRendering() { | |
| 609 DCHECK(compositor_task_runner_->BelongsToCurrentThread()); | |
| 610 paused_ = false; | |
| 611 if (video_frame_provider_client_) | |
| 612 video_frame_provider_client_->StartRendering(); | |
| 613 } | |
| 614 | |
| 615 void WebMediaPlayerMS::Compositor::StopRendering() { | |
| 616 DCHECK(compositor_task_runner_->BelongsToCurrentThread()); | |
| 617 paused_ = true; | |
| 618 if (video_frame_provider_client_) | |
| 619 video_frame_provider_client_->StopRendering(); | |
| 620 | |
| 621 base::AutoLock auto_lock(current_frame_lock_); | |
| 622 if (!current_frame_.get()) | |
| 623 return; | |
| 624 | |
| 625 // Copy the frame so that rendering can show the last received frame. | |
| 626 // The original frame must not be referenced when the player is paused since | |
| 627 // there might be a finite number of available buffers. E.g, video that | |
| 628 // originates from a video camera. | |
| 629 scoped_refptr<media::VideoFrame> new_frame = | |
| 630 CopyFrameToYV12(current_frame_, &video_renderer_); | |
| 631 | |
| 632 current_frame_ = new_frame; | |
| 633 } | |
| 634 | |
| 635 gfx::Size WebMediaPlayerMS::Compositor::GetCurrentSize() { | |
| 636 base::AutoLock auto_lock(current_frame_lock_); | |
| 637 return staging_frame_.get() ? staging_frame_->natural_size() : gfx::Size(); | |
| 638 } | |
| 639 | |
| 640 base::TimeDelta WebMediaPlayerMS::Compositor::GetCurrentTime() { | |
| 641 base::AutoLock auto_lock(current_frame_lock_); | |
| 642 return staging_frame_.get() ? staging_frame_->timestamp() : base::TimeDelta(); | |
| 643 } | |
| 644 | |
| 645 unsigned WebMediaPlayerMS::Compositor::GetTotalFrameCount() { | |
| 646 return total_frame_count_; | |
| 647 } | |
| 648 | |
| 649 unsigned WebMediaPlayerMS::Compositor::GetDroppedFrameCount() { | |
| 650 return dropped_frame_count_; | |
| 651 } | |
| 652 } // namespace content | 563 } // namespace content |
| OLD | NEW |