Index: content/common/gpu/media/gpu_video_encode_accelerator.cc |
diff --git a/content/common/gpu/media/gpu_video_encode_accelerator.cc b/content/common/gpu/media/gpu_video_encode_accelerator.cc |
index a61f1fe4d2e93b1997a3c4f3adf45e9b8e1e8c58..5f1f069392d3932e7e5e5c4534b1281ccc535958 100644 |
--- a/content/common/gpu/media/gpu_video_encode_accelerator.cc |
+++ b/content/common/gpu/media/gpu_video_encode_accelerator.cc |
@@ -112,7 +112,10 @@ void GpuVideoEncodeAccelerator::Initialize( |
bool GpuVideoEncodeAccelerator::OnMessageReceived(const IPC::Message& message) { |
bool handled = true; |
IPC_BEGIN_MESSAGE_MAP(GpuVideoEncodeAccelerator, message) |
- IPC_MESSAGE_HANDLER(AcceleratedVideoEncoderMsg_Encode, OnEncode) |
+ IPC_MESSAGE_HANDLER(AcceleratedVideoEncoderMsg_EncodeSharedMemory, |
+ OnEncodeSharedMemory) |
+ IPC_MESSAGE_HANDLER(AcceleratedVideoEncoderMsg_EncodeNativeTexture, |
+ OnEncodeNativeTexture) |
IPC_MESSAGE_HANDLER(AcceleratedVideoEncoderMsg_UseOutputBitstreamBuffer, |
OnUseOutputBitstreamBuffer) |
IPC_MESSAGE_HANDLER( |
@@ -207,11 +210,12 @@ GpuVideoEncodeAccelerator::CreateAndroidVEA() { |
return encoder.Pass(); |
} |
-void GpuVideoEncodeAccelerator::OnEncode(int32 frame_id, |
- base::SharedMemoryHandle buffer_handle, |
- uint32 buffer_offset, |
- uint32 buffer_size, |
- bool force_keyframe) { |
+void GpuVideoEncodeAccelerator::OnEncodeSharedMemory( |
+ int32 frame_id, |
+ base::SharedMemoryHandle buffer_handle, |
+ uint32 buffer_offset, |
+ uint32 buffer_size, |
+ bool force_keyframe) { |
DVLOG(3) << "GpuVideoEncodeAccelerator::OnEncode(): frame_id=" << frame_id |
<< ", buffer_size=" << buffer_size |
<< ", force_keyframe=" << force_keyframe; |
@@ -276,6 +280,43 @@ void GpuVideoEncodeAccelerator::OnEncode(int32 frame_id, |
encoder_->Encode(frame, force_keyframe); |
} |
+void GpuVideoEncodeAccelerator::OnEncodeNativeTexture( |
+ int32 frame_id, |
+ gpu::MailboxHolder mailbox_holder, |
+ bool force_keyframe) { |
+ DVLOG(3) << "GpuVideoEncodeAccelerator::OnEncodeNativeTexture(): frame_id=" |
+ << frame_id << ", force_keyframe=" << force_keyframe; |
+ if (!encoder_) |
+ return; |
+ if (frame_id < 0) { |
+ DLOG(ERROR) << "GpuVideoEncodeAccelerator::OnEncodeNativeTexture(): " |
+ "invalid frame_id=" << frame_id; |
+ NotifyError(media::VideoEncodeAccelerator::kPlatformFailureError); |
+ return; |
+ } |
+ DCHECK(!mailbox_holder.mailbox.IsZero()); |
+ DCHECK(mailbox_holder.mailbox.Verify()); |
+ DCHECK(mailbox_holder.texture_target); |
+ DCHECK(mailbox_holder.sync_point); |
+ |
+ scoped_refptr<media::VideoFrame> frame = media::VideoFrame::WrapNativeTexture( |
+ mailbox_holder, |
+ media::VideoFrame::ReleaseMailboxCB(), |
+ input_coded_size_, gfx::Rect(input_visible_size_), input_visible_size_, |
+ base::TimeDelta(), true /* allow_overlay */, true /* has_alpha */); |
+ frame->AddDestructionObserver(media::BindToCurrentLoop( |
+ base::Bind(&GpuVideoEncodeAccelerator::EncodeNativeTextureFrameFinished, |
+ weak_this_factory_.GetWeakPtr(), frame_id))); |
+ if (!frame.get()) { |
+ DLOG(ERROR) << "GpuVideoEncodeAccelerator::OnEncodeNativeTexture(): " |
+ "could not create VideoFrame for frame_id=" << frame_id; |
+ NotifyError(media::VideoEncodeAccelerator::kPlatformFailureError); |
+ return; |
+ } |
+ |
+ encoder_->Encode(frame, force_keyframe); |
+} |
+ |
void GpuVideoEncodeAccelerator::OnUseOutputBitstreamBuffer( |
int32 buffer_id, |
base::SharedMemoryHandle buffer_handle, |
@@ -324,6 +365,11 @@ void GpuVideoEncodeAccelerator::EncodeFrameFinished( |
frame_id)); |
// Just let shm fall out of scope. |
} |
+void GpuVideoEncodeAccelerator::EncodeNativeTextureFrameFinished( |
+ int32 frame_id) { |
+ Send(new AcceleratedVideoEncoderHostMsg_NotifyInputDone(host_route_id_, |
+ frame_id)); |
+} |
void GpuVideoEncodeAccelerator::Send(IPC::Message* message) { |
stub_->channel()->Send(message); |