Chromium Code Reviews| Index: content/renderer/pepper/video_decoder_proxy.cc |
| diff --git a/content/renderer/pepper/video_decoder_proxy.cc b/content/renderer/pepper/video_decoder_proxy.cc |
| new file mode 100644 |
| index 0000000000000000000000000000000000000000..181a609c2064bd49bf9f4ecacc37ae68f6d7a148 |
| --- /dev/null |
| +++ b/content/renderer/pepper/video_decoder_proxy.cc |
| @@ -0,0 +1,445 @@ |
| +// Copyright (c) 2014 The Chromium Authors. All rights reserved. |
| +// Use of this source code is governed by a BSD-style license that can be |
| +// found in the LICENSE file. |
| + |
| +#include <GLES2/gl2.h> |
| +#include <GLES2/gl2ext.h> |
| +#include <GLES2/gl2extchromium.h> |
| + |
| +#include "content/renderer/pepper/video_decoder_proxy.h" |
| + |
| +#include "base/bind.h" |
| +#include "content/public/renderer/render_thread.h" |
| +#include "content/renderer/pepper/pepper_video_decoder_host.h" |
| +#include "content/renderer/render_thread_impl.h" |
| +#include "gpu/command_buffer/client/gles2_implementation.h" |
| +#include "media/base/decoder_buffer.h" |
| +#include "media/filters/ffmpeg_video_decoder.h" |
| +#include "media/video/picture.h" |
| +#include "media/video/video_decode_accelerator.h" |
| +#include "ppapi/c/pp_errors.h" |
| +#include "third_party/libyuv/include/libyuv.h" |
| +#include "webkit/common/gpu/context_provider_web_context.h" |
| + |
| +namespace content { |
| + |
| +VideoDecoderProxy::PendingDecode::PendingDecode( |
| + uint32_t decode_id, |
| + const scoped_refptr<media::DecoderBuffer>& buffer) |
| + : decode_id(decode_id), buffer(buffer) { |
| +} |
| + |
| +VideoDecoderProxy::PendingDecode::~PendingDecode() { |
| +} |
| + |
| +VideoDecoderProxy::PendingFrame::PendingFrame(uint32_t decode_id, |
| + const gfx::Size& size) |
| + : decode_id(decode_id), |
| + size(size), |
| + pixels(size.width() * size.height() * 4) { |
| +} |
| + |
| +VideoDecoderProxy::PendingFrame::~PendingFrame() { |
| +} |
| + |
| +VideoDecoderProxy::Delegate::Delegate( |
| + const base::WeakPtr<VideoDecoderProxy>& proxy) |
| + : proxy_(proxy), main_message_loop_(base::MessageLoopProxy::current()) { |
| +} |
| + |
| +VideoDecoderProxy::Delegate::~Delegate() { |
| + DCHECK(pending_decodes_.empty()); |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
what guarantees this?
I think you might be relying
bbudge
2014/06/06 02:03:45
Done.
|
| +} |
| + |
| +void VideoDecoderProxy::Delegate::Initialize( |
| + scoped_ptr<media::VideoDecoder> decoder, |
| + media::VideoDecoderConfig config) { |
| + DCHECK(!decoder_); |
| + decoder_.reset(decoder.release()); |
|
dmichael (off chromium)
2014/06/05 23:00:43
tiny nit: I think I would write this as decoder_ =
bbudge
2014/06/06 02:03:45
This is no longer an argument.
|
| + decoder_->Initialize( |
| + config, |
| + true /* low_delay */, |
| + base::Bind(&VideoDecoderProxy::Delegate::OnPipelineStatus, |
| + base::Unretained(this))); |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
I guess the only reason this isn't a UAF waiting t
bbudge
2014/06/06 02:03:45
Done.
|
| +} |
| + |
| +void VideoDecoderProxy::Delegate::OnPipelineStatus( |
| + media::PipelineStatus status) { |
| + main_message_loop_->PostTask( |
| + FROM_HERE, |
| + base::Bind(&VideoDecoderProxy::OnPipelineStatus, proxy_, status)); |
| +} |
| + |
| +void VideoDecoderProxy::Delegate::ReceiveBuffer( |
| + uint32_t decode_id, |
| + scoped_refptr<media::DecoderBuffer> buffer) { |
| + bool decoder_busy = !pending_decodes_.empty(); |
| + pending_decodes_.push(PendingDecode(decode_id, buffer)); |
| + if (!decoder_busy) |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
FWIW can drop decoder_busy if you do
if (pending_d
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
AFAICT a client that never calls Reset() can never
bbudge
2014/06/06 02:03:44
I changed this and now call VD::GetMaxDecodeReques
bbudge
2014/06/06 02:03:45
It happens at startup and on reset. In the steady
|
| + Decode(); |
| +} |
| + |
| +void VideoDecoderProxy::Delegate::Decode() { |
| + DCHECK(!pending_decodes_.empty()); |
| + PendingDecode& next_decode = pending_decodes_.front(); |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
const& is less surprising
bbudge
2014/06/06 02:03:45
Done.
|
| + decoder_->Decode(next_decode.buffer, |
| + base::Bind(&VideoDecoderProxy::Delegate::ConvertFrame, |
| + base::Unretained(this), |
| + next_decode.decode_id)); |
| + pending_decodes_.pop(); |
| +} |
| + |
| +void VideoDecoderProxy::Delegate::ConvertFrame( |
| + uint32_t decode_id, |
| + media::VideoDecoder::Status status, |
| + const scoped_refptr<media::VideoFrame>& frame) { |
| + scoped_ptr<PendingFrame> pending_frame( |
| + new PendingFrame(decode_id, gfx::Size())); |
| + if (frame) { |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
You're using frame!=NULL as a proxy for status==kO
bbudge
2014/06/06 02:03:45
So:
if (status == kOk && frame && !frame->end_of_s
|
| + pending_frame->size = frame->coded_size(); |
| + pending_frame->pixels.resize(frame->coded_size().width() * |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
It's strange that you do the w*h*4 calc here _and_
bbudge
2014/06/06 02:03:45
Added a ctor to create an empty frame, and defer c
|
| + frame->coded_size().height() * 4); |
| + // Convert the decoded frame to ARGB pixels. |
| + libyuv::I420ToARGB(frame->data(media::VideoFrame::kYPlane), |
| + frame->stride(media::VideoFrame::kYPlane), |
| + frame->data(media::VideoFrame::kUPlane), |
| + frame->stride(media::VideoFrame::kUPlane), |
| + frame->data(media::VideoFrame::kVPlane), |
| + frame->stride(media::VideoFrame::kVPlane), |
| + &pending_frame->pixels.front(), |
| + frame->coded_size().width() * 4, |
| + frame->coded_size().width(), |
| + frame->coded_size().height()); |
| + } |
| + |
| + main_message_loop_->PostTask(FROM_HERE, |
| + base::Bind(&VideoDecoderProxy::ReceiveFrame, |
| + proxy_, |
| + status, |
| + base::Passed(&pending_frame))); |
| + |
| + if (!pending_decodes_.empty()) |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
per comment at l.78, can this arise in the absence
bbudge
2014/06/06 02:03:45
Stop() and Reset() now clear pending frames first,
|
| + Decode(); |
| +} |
| + |
| +void VideoDecoderProxy::Delegate::Reset() { |
| + decoder_->Reset(base::Bind(&VideoDecoderProxy::Delegate::OnResetComplete, |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
set some state so that decodes that arrive from no
bbudge
2014/06/06 02:03:44
No decodes can arrive until Reset completes becaus
|
| + base::Unretained(this))); |
| +} |
| + |
| +void VideoDecoderProxy::Delegate::OnResetComplete() { |
| + // Cancel all remaining decodes, and notify the host so it can free the shm |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
is it obvious why this shouldn't have been done in
bbudge
2014/06/06 02:03:44
No, it should be done in Reset() as you point out.
|
| + // buffers. We'll clear pending frames on the main thread. |
| + while (!pending_decodes_.empty()) { |
| + PendingDecode& next_decode = pending_decodes_.front(); |
| + scoped_ptr<PendingFrame> pending_frame( |
| + new PendingFrame(next_decode.decode_id, gfx::Size())); |
| + main_message_loop_->PostTask(FROM_HERE, |
| + base::Bind(&VideoDecoderProxy::ReceiveFrame, |
| + proxy_, |
| + media::VideoDecoder::kAborted, |
| + base::Passed(&pending_frame))); |
|
dmichael (off chromium)
2014/06/05 23:00:43
Any reason you can't pass a null pointer instead o
bbudge
2014/06/06 02:03:44
The frame has the decode_id, which is used to iden
|
| + pending_decodes_.pop(); |
| + } |
| + main_message_loop_->PostTask( |
| + FROM_HERE, base::Bind(&VideoDecoderProxy::OnResetComplete, proxy_)); |
| +} |
| + |
| +void VideoDecoderProxy::Delegate::Destroy() { |
| + DCHECK(decoder_); |
| + decoder_->Stop(); |
|
dmichael (off chromium)
2014/06/05 23:00:43
Would it be safe to instead put this in the destru
bbudge
2014/06/06 02:03:45
Stop can cause callbacks to run, and I'd rather no
|
| + // By now, our owning VideoDecoderProxy has invalidated our weak_ptr to it. |
| +} |
| + |
| +VideoDecoderProxy::VideoDecoderProxy(PepperVideoDecoderHost* host) |
| + : state_(UNINITIALIZED), |
| + host_(host), |
| + media_message_loop_( |
| + RenderThreadImpl::current()->GetMediaThreadMessageLoopProxy()), |
| + context_provider_( |
| + RenderThreadImpl::current()->SharedMainThreadContextProvider()), |
| + num_pending_decodes_(0), |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
the logic around this member requires that Decode(
bbudge
2014/06/06 02:03:44
Done.
|
| + weak_ptr_factory_(this) { |
| + DCHECK(host_); |
| + DCHECK(media_message_loop_); |
| + DCHECK(context_provider_); |
| + delegate_.reset(new Delegate(weak_ptr_factory_.GetWeakPtr())); |
| +} |
| + |
| +VideoDecoderProxy::~VideoDecoderProxy() { |
| + DCHECK(RenderThreadImpl::current()); |
| + DCHECK(!host_); |
| + // Delete any remaining video frames. |
| + while (!pending_frames_.empty()) { |
| + delete pending_frames_.front(); |
| + pending_frames_.pop(); |
| + } |
| + // Delete any remaining textures. |
| + TextureIdMap::iterator it = texture_id_map_.begin(); |
| + for (; it != texture_id_map_.end(); ++it) |
| + DeleteTexture(it->second); |
| + |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
clear the map once done?
bbudge
2014/06/06 02:03:44
It will be destroyed once we exit the body.
|
| + FlushCommandBuffer(); |
| +} |
| + |
| +void VideoDecoderProxy::Initialize(media::VideoCodecProfile profile) { |
| + DCHECK(RenderThreadImpl::current()); |
| + DCHECK_EQ(state_, UNINITIALIZED); |
| + media::VideoCodec codec = media::kUnknownVideoCodec; |
| + if (profile <= media::H264PROFILE_MAX) |
| + codec = media::kCodecH264; |
| + else if (profile <= media::VP8PROFILE_MAX) |
| + codec = media::kCodecVP8; |
| + DCHECK_NE(codec, media::kUnknownVideoCodec); |
| + |
| + media::VideoDecoderConfig config( |
| + codec, |
| + profile, |
| + media::VideoFrame::YV12, |
| + gfx::Size(32, 24), // Small sizes that won't fail. |
| + gfx::Rect(32, 24), |
| + gfx::Size(32, 24), |
| + NULL /* extra_data */, |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
durr, does this really work?
e.g. does the h264 te
bbudge
2014/06/06 02:03:45
I don't know. I'll have to try it. I have no idea
|
| + 0 /* extra_data_size */, |
| + false /* decryption */); |
| + |
| + scoped_ptr<media::VideoDecoder> decoder( |
| + new media::FFmpegVideoDecoder(media_message_loop_)); |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
Why not VpxVideoDecoder (for VP9) or one of the de
bbudge
2014/06/06 02:03:45
You're right. I moved VideoDecoder construction in
|
| + |
| + media_message_loop_->PostTask( |
| + FROM_HERE, |
| + base::Bind(&VideoDecoderProxy::Delegate::Initialize, |
| + base::Unretained(delegate_.get()), |
| + base::Passed(&decoder), |
| + config)); |
| + state_ = DECODING; |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
shouldn't this transition only happen in the OK ca
bbudge
2014/06/06 02:03:45
Yep.
|
| +} |
| + |
| +void VideoDecoderProxy::Decode(uint32_t decode_id, |
| + const uint8_t* buffer, |
| + uint32_t size) { |
| + DCHECK(RenderThreadImpl::current()); |
| + DCHECK_EQ(state_, DECODING); |
| + |
| + num_pending_decodes_++; |
| + |
| + media_message_loop_->PostTask( |
| + FROM_HERE, |
| + base::Bind(&VideoDecoderProxy::Delegate::ReceiveBuffer, |
| + base::Unretained(delegate_.get()), |
| + decode_id, |
| + media::DecoderBuffer::CopyFrom(buffer, size))); |
| +} |
| + |
| +void VideoDecoderProxy::AssignTextures( |
| + const std::vector<uint32_t>& texture_ids) { |
| + DCHECK(RenderThreadImpl::current()); |
| + DCHECK_EQ(state_, DECODING); |
| + DCHECK(texture_ids.size()); |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
DCHECK(texture_id_map_.empty());
?
dmichael (off chromium)
2014/06/05 23:00:44
With a "!", of course :)
bbudge
2014/06/06 02:03:45
I don't think so. We AssignTextures when the video
|
| + DCHECK_EQ(texture_ids.size(), pending_texture_mailboxes_.size()); |
| + uint32_t num_textures = static_cast<GLuint>(texture_ids.size()); |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
checked_cast to avoid silent overflow
bbudge
2014/06/06 02:03:45
Done.
|
| + std::vector<uint32_t> local_texture_ids(num_textures); |
| + gpu::gles2::GLES2Interface* gles2 = context_provider_->ContextGL(); |
| + gles2->GenTextures(num_textures, &local_texture_ids.front()); |
| + for (uint32_t i = 0; i < num_textures; i++) { |
| + gles2->ActiveTexture(GL_TEXTURE0); |
| + gles2->BindTexture(GL_TEXTURE_2D, local_texture_ids[i]); |
| + gles2->ConsumeTextureCHROMIUM(GL_TEXTURE_2D, |
| + pending_texture_mailboxes_[i].name); |
| + // Map the plugin texture id to the local texture id. |
| + texture_id_map_.insert( |
| + std::make_pair(texture_ids[i], local_texture_ids[i])); |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
since you're ignoring return value you may as well
bbudge
2014/06/06 02:03:44
Done.
|
| + } |
| + pending_texture_mailboxes_.clear(); |
| + available_textures_.insert( |
| + available_textures_.end(), texture_ids.begin(), texture_ids.end()); |
| + SendPictures(); |
| +} |
| + |
| +void VideoDecoderProxy::RecycleTexture(uint32_t texture_id) { |
| + DCHECK(RenderThreadImpl::current()); |
| + if (textures_to_dismiss_.find(texture_id) != textures_to_dismiss_.end()) { |
| + DismissTexture(texture_id); |
| + } else if (texture_id_map_.find(texture_id) != texture_id_map_.end()) { |
| + available_textures_.push_back(texture_id); |
| + SendPictures(); |
| + } else { |
| + NOTREACHED(); |
| + } |
| +} |
| + |
| +void VideoDecoderProxy::Flush() { |
| + DCHECK(RenderThreadImpl::current()); |
| + DCHECK_EQ(state_, DECODING); |
| + state_ = FLUSHING; |
| +} |
| + |
| +void VideoDecoderProxy::Reset() { |
| + DCHECK(RenderThreadImpl::current()); |
| + DCHECK_EQ(state_, DECODING); |
| + state_ = RESETTING; |
| + media_message_loop_->PostTask(FROM_HERE, |
| + base::Bind(&VideoDecoderProxy::Delegate::Reset, |
| + base::Unretained(delegate_.get()))); |
| +} |
| + |
| +void VideoDecoderProxy::Destroy() { |
| + DCHECK(RenderThreadImpl::current()); |
| + DCHECK(host_); |
| + host_ = NULL; |
| + // Cut the delegate loose. |
| + weak_ptr_factory_.InvalidateWeakPtrs(); |
| + media_message_loop_->PostTask( |
| + FROM_HERE, |
| + base::Bind(&VideoDecoderProxy::Delegate::Destroy, |
| + base::Owned(delegate_.release()))); |
| + |
| + delete this; |
|
dmichael (off chromium)
2014/06/05 23:00:43
Can we just do all this in a destructor? (aside fr
bbudge
2014/06/06 02:03:44
That's too late. There might be pending callbacks
|
| +} |
| + |
| +void VideoDecoderProxy::OnPipelineStatus(media::PipelineStatus status) { |
| + DCHECK(RenderThreadImpl::current()); |
| + DCHECK(host_); |
| + |
| + int32_t result; |
| + switch (status) { |
| + case media::PIPELINE_OK: |
| + result = PP_OK; |
| + break; |
| + case media::DECODER_ERROR_NOT_SUPPORTED: |
| + result = PP_ERROR_NOTSUPPORTED; |
| + break; |
| + default: |
| + result = PP_ERROR_FAILED; |
| + break; |
| + } |
| + host_->OnInitializeComplete(result); |
| +} |
| + |
| +void VideoDecoderProxy::ReceiveFrame(media::VideoDecoder::Status status, |
| + scoped_ptr<PendingFrame> frame) { |
| + DCHECK(RenderThreadImpl::current()); |
| + DCHECK(host_); |
| + |
| + num_pending_decodes_--; |
| + |
| + if (frame->pixels.size()) { |
| + if (texture_size_ != frame->size) { |
| + // If the size has changed, all current textures must be dismissed. Add |
| + // all textures to |textures_to_dismiss_| and dismiss any that aren't in |
| + // use by the plugin. We dismiss the rest as they are recycled. |
| + for (TextureIdMap::const_iterator it = texture_id_map_.begin(); |
| + it != texture_id_map_.end(); |
| + ++it) { |
| + textures_to_dismiss_.insert(it->second); |
| + } |
| + for (std::vector<uint32_t>::const_iterator it = |
| + available_textures_.begin(); |
| + it != available_textures_.end(); |
| + ++it) { |
| + DismissTexture(*it); |
| + } |
| + available_textures_.clear(); |
| + FlushCommandBuffer(); |
| + |
| + DCHECK(pending_texture_mailboxes_.empty()); |
| + const uint32_t num_textures = 8; |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
o rly 8?
bbudge
2014/06/06 02:03:44
Yeah, this is just a guess. It seems like 1 might
|
| + for (uint32_t i = 0; i < num_textures; i++) |
| + pending_texture_mailboxes_.push_back(gpu::Mailbox::Generate()); |
| + |
| + host_->RequestTextures( |
| + num_textures, frame->size, GL_TEXTURE_2D, pending_texture_mailboxes_); |
| + texture_size_ = frame->size; |
| + } |
| + |
| + pending_frames_.push(frame.release()); |
| + SendPictures(); |
| + } else { |
| + host_->NotifyEndOfBitstreamBuffer(frame->decode_id); |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
In case of error this is wrong.
bbudge
2014/06/06 02:03:44
I don't see why. With VideoDecodeAccelerator we no
|
| + } |
| + |
| + switch (status) { |
| + case media::VideoDecoder::kOk: |
| + case media::VideoDecoder::kAborted: |
| + // This is not necessarily an error. |
| + case media::VideoDecoder::kNotEnoughData: |
| + break; |
| + case media::VideoDecoder::kDecodeError: |
| + case media::VideoDecoder::kDecryptError: |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
_decrypt_ error should be unreachable, right?
bbudge
2014/06/06 02:03:44
Done.
|
| + host_->NotifyError(PP_ERROR_RESOURCE_FAILED); |
| + break; |
| + // No default case, to catch unhandled status values. |
| + } |
| +} |
| + |
| +void VideoDecoderProxy::SendPictures() { |
| + DCHECK(RenderThreadImpl::current()); |
| + DCHECK(host_); |
| + |
| + while (!pending_frames_.empty() && !available_textures_.empty()) { |
| + scoped_ptr<PendingFrame> frame(pending_frames_.front()); |
| + pending_frames_.pop(); |
| + |
| + uint32_t texture_id = available_textures_.back(); |
| + available_textures_.pop_back(); |
| + |
| + uint32_t local_texture_id = texture_id_map_[texture_id]; |
| + gpu::gles2::GLES2Interface* gles2 = context_provider_->ContextGL(); |
| + gles2->ActiveTexture(GL_TEXTURE0); |
| + gles2->BindTexture(GL_TEXTURE_2D, local_texture_id); |
| + gles2->TexImage2D(GL_TEXTURE_2D, |
| + 0, |
| + GL_RGBA, |
| + texture_size_.width(), |
| + texture_size_.height(), |
| + 0, |
| + GL_RGBA, |
| + GL_UNSIGNED_BYTE, |
| + &frame->pixels.front()); |
| + |
| + host_->NotifyEndOfBitstreamBuffer(frame->decode_id); |
| + host_->PictureReady(media::Picture(texture_id, frame->decode_id)); |
| + } |
| + |
| + FlushCommandBuffer(); |
| + |
| + if (state_ == FLUSHING && !num_pending_decodes_ && pending_frames_.empty()) { |
| + state_ = DECODING; |
| + host_->NotifyFlushDone(); |
| + } |
| +} |
| + |
| +void VideoDecoderProxy::OnResetComplete() { |
| + DCHECK(RenderThreadImpl::current()); |
| + DCHECK(host_); |
| + |
| + while (!pending_frames_.empty()) { |
| + scoped_ptr<PendingFrame> frame(pending_frames_.front()); |
| + host_->NotifyEndOfBitstreamBuffer(frame->decode_id); |
|
Ami GONE FROM CHROMIUM
2014/06/05 00:06:24
Not sure this is necessary, FWIW.
bbudge
2014/06/06 02:03:44
It makes it a little simpler in the host, since I
|
| + pending_frames_.pop(); |
| + } |
| + |
| + state_ = DECODING; |
| + host_->NotifyResetDone(); |
| +} |
| + |
| +void VideoDecoderProxy::DismissTexture(uint32_t texture_id) { |
| + DCHECK(host_); |
| + textures_to_dismiss_.erase(texture_id); |
| + DCHECK(texture_id_map_.find(texture_id) != texture_id_map_.end()); |
| + DeleteTexture(texture_id_map_[texture_id]); |
| + texture_id_map_.erase(texture_id); |
| + host_->DismissPictureBuffer(texture_id); |
| +} |
| + |
| +void VideoDecoderProxy::DeleteTexture(uint32_t texture_id) { |
| + gpu::gles2::GLES2Interface* gles2 = context_provider_->ContextGL(); |
| + gles2->DeleteTextures(1, &texture_id); |
| +} |
| + |
| +void VideoDecoderProxy::FlushCommandBuffer() { |
| + DCHECK(RenderThreadImpl::current()); |
| + context_provider_->ContextGL()->Flush(); |
| +} |
| + |
| +} // namespace content |