OLD | NEW |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/rtc_video_decoder.h" | 5 #include "content/renderer/media/rtc_video_decoder.h" |
6 | 6 |
7 #include <utility> | 7 #include <utility> |
8 | 8 |
9 #include "base/bind.h" | 9 #include "base/bind.h" |
10 #include "base/logging.h" | 10 #include "base/logging.h" |
(...skipping 354 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
365 const media::PictureBuffer& pb = it->second; | 365 const media::PictureBuffer& pb = it->second; |
366 if (visible_rect.IsEmpty() || !gfx::Rect(pb.size()).Contains(visible_rect)) { | 366 if (visible_rect.IsEmpty() || !gfx::Rect(pb.size()).Contains(visible_rect)) { |
367 LOG(ERROR) << "Invalid picture size: " << visible_rect.ToString() | 367 LOG(ERROR) << "Invalid picture size: " << visible_rect.ToString() |
368 << " should fit in " << pb.size().ToString(); | 368 << " should fit in " << pb.size().ToString(); |
369 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); | 369 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); |
370 return; | 370 return; |
371 } | 371 } |
372 | 372 |
373 scoped_refptr<media::VideoFrame> frame = | 373 scoped_refptr<media::VideoFrame> frame = |
374 CreateVideoFrame(picture, pb, timestamp, visible_rect); | 374 CreateVideoFrame(picture, pb, timestamp, visible_rect); |
| 375 if (!frame) { |
| 376 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); |
| 377 return; |
| 378 } |
375 bool inserted = | 379 bool inserted = |
376 picture_buffers_at_display_.insert(std::make_pair( | 380 picture_buffers_at_display_.insert(std::make_pair( |
377 picture.picture_buffer_id(), | 381 picture.picture_buffer_id(), |
378 pb.texture_id())).second; | 382 pb.texture_id())).second; |
379 DCHECK(inserted); | 383 DCHECK(inserted); |
380 | 384 |
381 // Create a WebRTC video frame. | 385 // Create a WebRTC video frame. |
382 webrtc::VideoFrame decoded_image( | 386 webrtc::VideoFrame decoded_image( |
383 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(frame), timestamp, 0, | 387 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(frame), timestamp, 0, |
384 webrtc::kVideoRotation_0); | 388 webrtc::kVideoRotation_0); |
(...skipping 15 matching lines...) Expand all Loading... |
400 uint32_t timestamp, | 404 uint32_t timestamp, |
401 const gfx::Rect& visible_rect) { | 405 const gfx::Rect& visible_rect) { |
402 DCHECK(decoder_texture_target_); | 406 DCHECK(decoder_texture_target_); |
403 // Convert timestamp from 90KHz to ms. | 407 // Convert timestamp from 90KHz to ms. |
404 base::TimeDelta timestamp_ms = base::TimeDelta::FromInternalValue( | 408 base::TimeDelta timestamp_ms = base::TimeDelta::FromInternalValue( |
405 base::checked_cast<uint64_t>(timestamp) * 1000 / 90); | 409 base::checked_cast<uint64_t>(timestamp) * 1000 / 90); |
406 // TODO(mcasas): The incoming data is actually a YUV format, but is labelled | 410 // TODO(mcasas): The incoming data is actually a YUV format, but is labelled |
407 // as ARGB. This prevents the compositor from messing with it, since the | 411 // as ARGB. This prevents the compositor from messing with it, since the |
408 // underlying platform can handle the former format natively. Make sure the | 412 // underlying platform can handle the former format natively. Make sure the |
409 // correct format is used and everyone down the line understands it. | 413 // correct format is used and everyone down the line understands it. |
410 scoped_refptr<media::VideoFrame> frame(media::VideoFrame::WrapNativeTexture( | 414 scoped_refptr<media::VideoFrame> frame = media::VideoFrame::WrapNativeTexture( |
411 media::PIXEL_FORMAT_ARGB, | 415 media::PIXEL_FORMAT_ARGB, |
412 gpu::MailboxHolder(pb.texture_mailbox(), gpu::SyncToken(), | 416 gpu::MailboxHolder(pb.texture_mailbox(), gpu::SyncToken(), |
413 decoder_texture_target_), | 417 decoder_texture_target_), |
414 media::BindToCurrentLoop(base::Bind( | 418 media::BindToCurrentLoop(base::Bind( |
415 &RTCVideoDecoder::ReleaseMailbox, weak_factory_.GetWeakPtr(), | 419 &RTCVideoDecoder::ReleaseMailbox, weak_factory_.GetWeakPtr(), |
416 factories_, picture.picture_buffer_id(), pb.texture_id())), | 420 factories_, picture.picture_buffer_id(), pb.texture_id())), |
417 pb.size(), visible_rect, visible_rect.size(), timestamp_ms)); | 421 pb.size(), visible_rect, visible_rect.size(), timestamp_ms); |
418 if (picture.allow_overlay()) { | 422 if (frame && picture.allow_overlay()) { |
419 frame->metadata()->SetBoolean(media::VideoFrameMetadata::ALLOW_OVERLAY, | 423 frame->metadata()->SetBoolean(media::VideoFrameMetadata::ALLOW_OVERLAY, |
420 true); | 424 true); |
421 } | 425 } |
422 return frame; | 426 return frame; |
423 } | 427 } |
424 | 428 |
425 void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32_t id) { | 429 void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32_t id) { |
426 DVLOG(3) << "NotifyEndOfBitstreamBuffer. id=" << id; | 430 DVLOG(3) << "NotifyEndOfBitstreamBuffer. id=" << id; |
427 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent(); | 431 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent(); |
428 | 432 |
(...skipping 379 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
808 } | 812 } |
809 | 813 |
810 void RTCVideoDecoder::ClearPendingBuffers() { | 814 void RTCVideoDecoder::ClearPendingBuffers() { |
811 // Delete WebRTC input buffers. | 815 // Delete WebRTC input buffers. |
812 for (const auto& pending_buffer : pending_buffers_) | 816 for (const auto& pending_buffer : pending_buffers_) |
813 delete[] pending_buffer.first._buffer; | 817 delete[] pending_buffer.first._buffer; |
814 pending_buffers_.clear(); | 818 pending_buffers_.clear(); |
815 } | 819 } |
816 | 820 |
817 } // namespace content | 821 } // namespace content |
OLD | NEW |