Index: content/browser/renderer_host/media/video_capture_device_client.cc |
diff --git a/content/browser/renderer_host/media/video_capture_device_client.cc b/content/browser/renderer_host/media/video_capture_device_client.cc |
index 66b7fdb3d5d8064233e13585558b4a3894daac4e..55bb8cc5431b467eb0426b1bc23b29e31a063dfa 100644 |
--- a/content/browser/renderer_host/media/video_capture_device_client.cc |
+++ b/content/browser/renderer_host/media/video_capture_device_client.cc |
@@ -7,12 +7,23 @@ |
#include "base/bind.h" |
#include "base/strings/stringprintf.h" |
#include "base/trace_event/trace_event.h" |
+#include "content/browser/compositor/image_transport_factory.h" |
+#include "content/browser/gpu/browser_gpu_channel_host_factory.h" |
+#include "content/browser/gpu/browser_gpu_memory_buffer_manager.h" |
+#include "content/browser/gpu/gpu_data_manager_impl.h" |
#include "content/browser/renderer_host/media/video_capture_buffer_pool.h" |
#include "content/browser/renderer_host/media/video_capture_controller.h" |
+#include "content/common/gpu/client/context_provider_command_buffer.h" |
+#include "content/common/gpu/client/gl_helper.h" |
+#include "content/common/gpu/client/gpu_channel_host.h" |
+#include "content/common/gpu/client/webgraphicscontext3d_command_buffer_impl.h" |
+#include "content/common/gpu/gpu_process_launch_causes.h" |
#include "content/public/browser/browser_thread.h" |
+#include "gpu/command_buffer/common/mailbox_holder.h" |
#include "media/base/bind_to_current_loop.h" |
#include "media/base/video_capture_types.h" |
#include "media/base/video_frame.h" |
+#include "third_party/khronos/GLES2/gl2ext.h" |
#include "third_party/libyuv/include/libyuv.h" |
using media::VideoCaptureFormat; |
@@ -20,39 +31,181 @@ using media::VideoFrame; |
namespace content { |
+namespace { |
+ |
+#if !defined(OS_ANDROID) |
+// Modelled after GpuProcessTransportFactory::CreateContextCommon(). |
+scoped_ptr<content::WebGraphicsContext3DCommandBufferImpl> CreateContextCommon( |
+ scoped_refptr<content::GpuChannelHost> gpu_channel_host, |
+ int surface_id) { |
+ if (!content::GpuDataManagerImpl::GetInstance()-> |
+ CanUseGpuBrowserCompositor()) { |
+ DLOG(ERROR) << "No accelerated graphics found. Check chrome://gpu"; |
+ return scoped_ptr<content::WebGraphicsContext3DCommandBufferImpl>(); |
+ } |
+ blink::WebGraphicsContext3D::Attributes attrs; |
+ attrs.shareResources = true; |
+ attrs.depth = false; |
+ attrs.stencil = false; |
+ attrs.antialias = false; |
+ attrs.noAutomaticFlushes = true; |
+ |
+ if (!gpu_channel_host.get()) { |
+ DLOG(ERROR) << "Failed to establish GPU channel."; |
+ return scoped_ptr<content::WebGraphicsContext3DCommandBufferImpl>(); |
+ } |
+ GURL url("chrome://gpu/GpuProcessTransportFactory::CreateCaptureContext"); |
+ return make_scoped_ptr( |
+ new WebGraphicsContext3DCommandBufferImpl( |
+ surface_id, |
+ url, |
+ gpu_channel_host.get(), |
+ attrs, |
+ true /* lose_context_when_out_of_memory */, |
+ content::WebGraphicsContext3DCommandBufferImpl::SharedMemoryLimits(), |
+ NULL)); |
+} |
+ |
+// Modelled after |
+// GpuProcessTransportFactory::CreateOffscreenCommandBufferContext(). |
+scoped_ptr<content::WebGraphicsContext3DCommandBufferImpl> |
+CreateOffscreenCommandBufferContext() { |
+ content::CauseForGpuLaunch cause = content::CAUSE_FOR_GPU_LAUNCH_CANVAS_2D; |
+ // Android does not support synchronous opening of GPU channels. Should use |
+ // EstablishGpuChannel() instead. |
+ scoped_refptr<content::GpuChannelHost> gpu_channel_host( |
+ content::BrowserGpuChannelHostFactory::instance()-> |
+ EstablishGpuChannelSync(cause)); |
+ DCHECK(gpu_channel_host); |
+ return CreateContextCommon(gpu_channel_host, 0); |
+} |
+#endif |
+ |
+typedef base::Callback<void(scoped_refptr<ContextProviderCommandBuffer>)> |
+ ProcessContextCallback; |
+ |
+void CreateContextOnUIThread(ProcessContextCallback bottom_half) { |
+ DCHECK_CURRENTLY_ON(BrowserThread::UI); |
+#if !defined(OS_ANDROID) |
+ bottom_half.Run(ContextProviderCommandBuffer::Create( |
+ CreateOffscreenCommandBufferContext(), "Offscreen-CaptureThread")); |
+ return; |
+#endif |
+} |
+ |
+void ResetLostContextCallback( |
+ const scoped_refptr<ContextProviderCommandBuffer>& capture_thread_context) { |
+ capture_thread_context->SetLostContextCallback( |
+ cc::ContextProvider::LostContextCallback()); |
+} |
+ |
+} // anonymous namespace |
+ |
// Class combining a Client::Buffer interface implementation and a pool buffer |
// implementation to guarantee proper cleanup on destruction on our side. |
class AutoReleaseBuffer : public media::VideoCaptureDevice::Client::Buffer { |
public: |
AutoReleaseBuffer(const scoped_refptr<VideoCaptureBufferPool>& pool, |
- int buffer_id, |
- void* data, |
- size_t size) |
+ int buffer_id) |
: pool_(pool), |
id_(buffer_id), |
- data_(data), |
- size_(size) { |
+ buffer_handle_(pool_->GetBufferHandle(buffer_id).Pass()) { |
DCHECK(pool_.get()); |
} |
int id() const override { return id_; } |
- void* data() const override { return data_; } |
- size_t size() const override { return size_; } |
+ size_t size() const override { return buffer_handle_->size(); } |
+ scoped_ptr<media::DataHandle> GetDataHandle() override { |
+ return buffer_handle_->GetDataHandle(); |
+ } |
+ ClientBuffer AsClientBuffer() override { |
+ return buffer_handle_->AsClientBuffer(); |
+ } |
private: |
~AutoReleaseBuffer() override { pool_->RelinquishProducerReservation(id_); } |
const scoped_refptr<VideoCaptureBufferPool> pool_; |
const int id_; |
- void* const data_; |
- const size_t size_; |
+ const scoped_ptr<VideoCaptureBufferPool::BufferHandle> buffer_handle_; |
+}; |
+ |
+// Internal ref-counted class wrap an incoming GpuMemoryBuffer into a Texture |
+// backed VideoFrame. This VideoFrame creation is balanced by a waiting on the |
+// associated |sync_point|. After VideoFrame consumption the inserted |
+// ReleaseCallback() will be called, where the GpuMemoryBuffer is recycled. |
+// |
+// This class jumps between threads due to GPU-related thread limitations, i.e. |
+// some objects cannot be accessed from IO Thread, where we are constructed, |
+// others need to be constructed on UI Thread. For this reason most of the |
+// operations are carried out on Capture Thread (|capture_task_runner_|). |
+class VideoCaptureDeviceClient::TextureWrapperDelegate final |
+ : public base::RefCountedThreadSafe<TextureWrapperDelegate> { |
+ public: |
+ TextureWrapperDelegate( |
+ const base::WeakPtr<VideoCaptureController>& controller, |
+ const scoped_refptr<base::SingleThreadTaskRunner>& capture_task_runner, |
+ const media::VideoCaptureFormat& capture_format); |
+ |
+ // Wraps the GpuMemoryBuffer-backed |buffer| into a Texture, and sends it to |
+ // |controller_| wrapped in a VideoFrame. |
+ void OnIncomingCapturedGpuMemoryBuffer( |
+ const scoped_refptr<media::VideoCaptureDevice::Client::Buffer>& buffer, |
+ const gfx::Size& frame_size, |
+ const base::TimeTicks& timestamp); |
+ |
+ private: |
+ friend class base::RefCountedThreadSafe<TextureWrapperDelegate>; |
+ ~TextureWrapperDelegate(); |
+ |
+ // Creates some necessary members in |capture_task_runner_|. |
+ void Init(const media::VideoCaptureFormat& capture_format); |
+ // Runs the bottom half of the GlHelper creation. |
+ void CreateGlHelper( |
+ scoped_refptr<ContextProviderCommandBuffer> capture_thread_context); |
+ |
+ // Recycles |memory_buffer|, deletes Image and Texture on VideoFrame release. |
+ void ReleaseCallback(GLuint image_id, |
+ GLuint texture_id, |
+ //linked_ptr<gfx::GpuMemoryBuffer> memory_buffer, |
+ uint32 sync_point); |
+ |
+ // The Command Buffer lost the GL context, f.i. GPU process crashed. Signal |
+ // error to our owner so the capture can be torn down. |
+ void LostContextCallback(); |
+ |
+ // Prints the error |message| and notifies |controller_| of an error. |
+ void OnError(const std::string& message); |
+ |
+ const base::WeakPtr<VideoCaptureController> controller_; |
+ const scoped_refptr<base::SingleThreadTaskRunner> capture_task_runner_; |
+ |
+ // Command buffer reference, needs to be destroyed when unused. It is created |
+ // on UI Thread and bound to Capture Thread. In particular, it cannot be used |
+ // from IO Thread. |
+ scoped_refptr<ContextProviderCommandBuffer> capture_thread_context_; |
+ // Created and used from Capture Thread. Cannot be used from IO Thread. |
+ scoped_ptr<GLHelper> gl_helper_; |
+ |
+ DISALLOW_COPY_AND_ASSIGN(TextureWrapperDelegate); |
}; |
VideoCaptureDeviceClient::VideoCaptureDeviceClient( |
const base::WeakPtr<VideoCaptureController>& controller, |
- const scoped_refptr<VideoCaptureBufferPool>& buffer_pool) |
+ const scoped_refptr<VideoCaptureBufferPool>& buffer_pool, |
+ const scoped_refptr<base::SingleThreadTaskRunner>& capture_task_runner, |
+ const media::VideoCaptureFormat& capture_format) |
: controller_(controller), |
buffer_pool_(buffer_pool), |
- last_captured_pixel_format_(media::PIXEL_FORMAT_UNKNOWN) {} |
+ wrapper_delegate_( |
+ (capture_format.pixel_format == media::PIXEL_FORMAT_GPUMEMORYBUFFER) |
+ ? new TextureWrapperDelegate(controller, |
+ capture_task_runner, |
+ capture_format) |
+ : nullptr), |
+ capture_task_runner_(capture_task_runner), |
+ last_captured_pixel_format_(media::PIXEL_FORMAT_UNKNOWN) { |
+ DCHECK_CURRENTLY_ON(BrowserThread::IO); |
+} |
VideoCaptureDeviceClient::~VideoCaptureDeviceClient() {} |
@@ -110,7 +263,9 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData( |
if (!buffer.get()) |
return; |
- uint8* const yplane = reinterpret_cast<uint8*>(buffer->data()); |
+ const scoped_ptr<media::DataHandle> scoped_data_handle = |
+ buffer->GetDataHandle().Pass(); |
+ uint8* const yplane = reinterpret_cast<uint8*>(scoped_data_handle->data()); |
uint8* const uplane = |
yplane + VideoFrame::PlaneAllocationSize(VideoFrame::I420, |
VideoFrame::kYPlane, dimensions); |
@@ -201,31 +356,12 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData( |
frame_format.pixel_format); |
return; |
} |
- scoped_refptr<VideoFrame> frame = |
- VideoFrame::WrapExternalPackedMemory( |
- VideoFrame::I420, |
- dimensions, |
- gfx::Rect(dimensions), |
- dimensions, |
- yplane, |
- VideoFrame::AllocationSize(VideoFrame::I420, dimensions), |
- base::SharedMemory::NULLHandle(), |
- 0, |
- base::TimeDelta(), |
- base::Closure()); |
- DCHECK(frame.get()); |
- frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, |
- frame_format.frame_rate); |
- BrowserThread::PostTask( |
- BrowserThread::IO, |
- FROM_HERE, |
- base::Bind( |
- &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread, |
- controller_, |
- buffer, |
- frame, |
- timestamp)); |
+ OnIncomingCapturedBuffer(buffer, |
+ media::VideoCaptureFormat(dimensions, |
+ frame_format.frame_rate, |
+ media::PIXEL_FORMAT_I420), |
+ timestamp); |
} |
void |
@@ -247,6 +383,8 @@ VideoCaptureDeviceClient::OnIncomingCapturedYuvData( |
ReserveOutputBuffer(frame_format.pixel_format, frame_format.frame_size); |
if (!buffer.get()) |
return; |
+ const scoped_ptr<media::DataHandle> scoped_data_handle = |
+ buffer->GetDataHandle().Pass(); |
// Blit (copy) here from y,u,v into buffer.data()). Needed so we can return |
// the parameter buffer synchronously to the driver. |
@@ -254,7 +392,7 @@ VideoCaptureDeviceClient::OnIncomingCapturedYuvData( |
VideoFrame::kYPlane, frame_format.frame_size); |
const size_t u_plane_size = VideoFrame::PlaneAllocationSize( |
VideoFrame::I420, VideoFrame::kUPlane, frame_format.frame_size); |
- uint8* const dst_y = reinterpret_cast<uint8*>(buffer->data()); |
+ uint8* const dst_y = reinterpret_cast<uint8*>(scoped_data_handle->data()); |
uint8* const dst_u = dst_y + y_plane_size; |
uint8* const dst_v = dst_u + u_plane_size; |
@@ -280,32 +418,15 @@ VideoCaptureDeviceClient::OnIncomingCapturedYuvData( |
return; |
} |
- scoped_refptr<VideoFrame> video_frame = VideoFrame::WrapExternalYuvData( |
- VideoFrame::I420, frame_format.frame_size, |
- gfx::Rect(frame_format.frame_size), frame_format.frame_size, y_stride, |
- u_stride, v_stride, dst_y, dst_u, dst_v, base::TimeDelta(), |
- base::Closure()); |
- DCHECK(video_frame.get()); |
- video_frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, |
- frame_format.frame_rate); |
- |
- BrowserThread::PostTask( |
- BrowserThread::IO, |
- FROM_HERE, |
- base::Bind( |
- &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread, |
- controller_, |
- buffer, |
- video_frame, |
- timestamp)); |
+ OnIncomingCapturedBuffer(buffer, frame_format, timestamp); |
}; |
scoped_refptr<media::VideoCaptureDevice::Client::Buffer> |
VideoCaptureDeviceClient::ReserveOutputBuffer(media::VideoPixelFormat format, |
const gfx::Size& dimensions) { |
- DCHECK(format == media::PIXEL_FORMAT_TEXTURE || |
- format == media::PIXEL_FORMAT_I420 || |
- format == media::PIXEL_FORMAT_ARGB); |
+ DCHECK(format == media::PIXEL_FORMAT_I420 || |
+ format == media::PIXEL_FORMAT_TEXTURE || |
+ format == media::PIXEL_FORMAT_GPUMEMORYBUFFER); |
DCHECK_GT(dimensions.width(), 0); |
DCHECK_GT(dimensions.height(), 0); |
@@ -314,12 +435,9 @@ VideoCaptureDeviceClient::ReserveOutputBuffer(media::VideoPixelFormat format, |
buffer_pool_->ReserveForProducer(format, dimensions, &buffer_id_to_drop); |
if (buffer_id == VideoCaptureBufferPool::kInvalidId) |
return NULL; |
- void* data; |
- size_t size; |
- buffer_pool_->GetBufferInfo(buffer_id, &data, &size); |
scoped_refptr<media::VideoCaptureDevice::Client::Buffer> output_buffer( |
- new AutoReleaseBuffer(buffer_pool_, buffer_id, data, size)); |
+ new AutoReleaseBuffer(buffer_pool_, buffer_id)); |
if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) { |
BrowserThread::PostTask(BrowserThread::IO, |
@@ -331,8 +449,47 @@ VideoCaptureDeviceClient::ReserveOutputBuffer(media::VideoPixelFormat format, |
return output_buffer; |
} |
-void |
-VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame( |
+void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( |
+ const scoped_refptr<Buffer>& buffer, |
+ const media::VideoCaptureFormat& frame_format, |
+ const base::TimeTicks& timestamp) { |
+ if (frame_format.pixel_format == media::PIXEL_FORMAT_GPUMEMORYBUFFER) { |
+ capture_task_runner_->PostTask( |
+ FROM_HERE, |
+ base::Bind( |
+ &TextureWrapperDelegate::OnIncomingCapturedGpuMemoryBuffer, |
+ wrapper_delegate_, |
+ buffer, |
+ frame_format.frame_size, |
+ timestamp)); |
+ } else { |
+ const scoped_ptr<media::DataHandle> scoped_data_handle = |
+ buffer->GetDataHandle().Pass(); |
+ |
+ scoped_refptr<VideoFrame> video_frame = |
+ VideoFrame::WrapExternalPackedMemory( |
+ VideoFrame::I420, |
+ frame_format.frame_size, |
+ gfx::Rect(frame_format.frame_size), |
+ frame_format.frame_size, |
+ reinterpret_cast<uint8*>(scoped_data_handle->data()), |
+ VideoFrame::AllocationSize(VideoFrame::I420, |
+ frame_format.frame_size), |
+ base::SharedMemory::NULLHandle(), |
+ 0 /* shared_memory_offset */, |
+ base::TimeDelta(), |
+ base::Closure()); |
+ DCHECK(video_frame.get()); |
+ video_frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, |
+ frame_format.frame_rate); |
+ |
+ OnIncomingCapturedVideoFrame(buffer, |
+ video_frame, |
+ timestamp); |
+ } |
+} |
+ |
+void VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame( |
const scoped_refptr<Buffer>& buffer, |
const scoped_refptr<VideoFrame>& frame, |
const base::TimeTicks& timestamp) { |
@@ -368,4 +525,159 @@ void VideoCaptureDeviceClient::OnLog( |
controller_, message)); |
} |
+VideoCaptureDeviceClient::TextureWrapperDelegate::TextureWrapperDelegate( |
+ const base::WeakPtr<VideoCaptureController>& controller, |
+ const scoped_refptr<base::SingleThreadTaskRunner>& capture_task_runner, |
+ const media::VideoCaptureFormat& capture_format) |
+ : controller_(controller), |
+ capture_task_runner_(capture_task_runner) { |
+ DCHECK_CURRENTLY_ON(BrowserThread::IO); |
+ capture_task_runner_->PostTask(FROM_HERE, |
+ base::Bind(&TextureWrapperDelegate::Init, this, capture_format)); |
+} |
+ |
+void VideoCaptureDeviceClient::TextureWrapperDelegate:: |
+ OnIncomingCapturedGpuMemoryBuffer( |
+ const scoped_refptr<media::VideoCaptureDevice::Client::Buffer>& buffer, |
+ const gfx::Size& frame_size, |
+ const base::TimeTicks& timestamp) { |
+ DCHECK(capture_task_runner_->BelongsToCurrentThread()); |
+ DVLOG_IF(1, !gl_helper_) << " Skipping ingress frame, no GL context."; |
+ if (!gl_helper_) |
+ return; |
+ |
+ gpu::gles2::GLES2Interface* gl = capture_thread_context_->ContextGL(); |
+ GLuint image_id = gl->CreateImageCHROMIUM(buffer->AsClientBuffer(), |
+ frame_size.width(), |
+ frame_size.height(), GL_BGRA_EXT); |
+ DCHECK(image_id); |
+ |
+ GLuint texture_id = gl_helper_->CreateTexture(); |
+ DCHECK(texture_id); |
+ { |
+ content::ScopedTextureBinder<GL_TEXTURE_2D> texture_binder(gl, texture_id); |
+ gl->BindTexImage2DCHROMIUM(GL_TEXTURE_2D, image_id); |
+ } |
+ |
+ scoped_ptr<gpu::MailboxHolder> mailbox_holder(new gpu::MailboxHolder( |
+ gl_helper_->ProduceMailboxHolderFromTexture(texture_id))); |
+ DCHECK(!mailbox_holder->mailbox.IsZero()); |
+ DCHECK(mailbox_holder->mailbox.Verify()); |
+ DCHECK(mailbox_holder->texture_target); |
+ DCHECK(mailbox_holder->sync_point); |
+ |
+ scoped_refptr<media::VideoFrame> video_frame = |
+ media::VideoFrame::WrapNativeTexture( |
+ mailbox_holder.Pass(), |
+ media::BindToCurrentLoop( |
+ base::Bind(&VideoCaptureDeviceClient::TextureWrapperDelegate:: |
+ ReleaseCallback, |
+ this, image_id, texture_id /* gpu_memory_buffer */ )), |
+ frame_size, |
+ gfx::Rect(frame_size), |
+ frame_size, |
+ base::TimeDelta(), |
+ true /* allow_overlay */); |
+ |
+ BrowserThread::PostTask( |
+ BrowserThread::IO, FROM_HERE, |
+ base::Bind( |
+ &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread, |
+ controller_, buffer, video_frame, timestamp)); |
+} |
+ |
+VideoCaptureDeviceClient::TextureWrapperDelegate::~TextureWrapperDelegate() { |
+ // Might not be running on capture_task_runner_'s thread. Ensure owned objects |
+ // are destroyed on the correct threads. |
+ if (gl_helper_) |
+ capture_task_runner_->DeleteSoon(FROM_HERE, gl_helper_.release()); |
+ |
+ if (capture_thread_context_) { |
+ capture_task_runner_->PostTask( |
+ FROM_HERE, |
+ base::Bind(&ResetLostContextCallback, capture_thread_context_)); |
+ capture_thread_context_->AddRef(); |
+ ContextProviderCommandBuffer* raw_capture_thread_context = |
+ capture_thread_context_.get(); |
+ capture_thread_context_ = nullptr; |
+ capture_task_runner_->ReleaseSoon(FROM_HERE, raw_capture_thread_context); |
+ } |
+} |
+ |
+void VideoCaptureDeviceClient::TextureWrapperDelegate::Init( |
+ const media::VideoCaptureFormat& capture_format) { |
+ DCHECK(capture_task_runner_->BelongsToCurrentThread()); |
+ |
+ // In threaded compositing mode, we have to create our own context for Capture |
+ // to avoid using the GPU command queue from multiple threads. Context |
+ // creation must happen on UI thread; then the context needs to be bound to |
+ // the appropriate thread, which is done in CreateGlHelper(). |
+ BrowserThread::PostTask( |
+ BrowserThread::UI, FROM_HERE, |
+ base::Bind(&CreateContextOnUIThread, |
+ media::BindToCurrentLoop( |
+ base::Bind(&VideoCaptureDeviceClient:: |
+ TextureWrapperDelegate::CreateGlHelper, |
+ this)))); |
+} |
+ |
+void VideoCaptureDeviceClient::TextureWrapperDelegate::CreateGlHelper( |
+ scoped_refptr<ContextProviderCommandBuffer> capture_thread_context) { |
+ DCHECK(capture_task_runner_->BelongsToCurrentThread()); |
+ |
+ if (!capture_thread_context.get()) { |
+ DLOG(ERROR) << "No offscreen GL Context!"; |
+ return; |
+ } |
+ // This may not happen in IO Thread. The destructor resets the context lost |
+ // callback, so base::Unretained is safe; otherwise it'd be a circular ref |
+ // counted dependency. |
+ capture_thread_context->SetLostContextCallback(media::BindToCurrentLoop( |
+ base::Bind( |
+ &VideoCaptureDeviceClient::TextureWrapperDelegate:: |
+ LostContextCallback, |
+ base::Unretained(this)))); |
+ if (!capture_thread_context->BindToCurrentThread()) { |
+ capture_thread_context = NULL; |
+ DLOG(ERROR) << "Couldn't bind the Capture Context to the Capture Thread."; |
+ return; |
+ } |
+ DCHECK(capture_thread_context); |
+ capture_thread_context_ = capture_thread_context; |
+ |
+ // At this point, |capture_thread_context| is a cc::ContextProvider. Creation |
+ // of our GLHelper should happen on Capture Thread. |
+ gl_helper_.reset(new GLHelper(capture_thread_context->ContextGL(), |
+ capture_thread_context->ContextSupport())); |
+ DCHECK(gl_helper_); |
+} |
+ |
+void VideoCaptureDeviceClient::TextureWrapperDelegate::ReleaseCallback( |
+ GLuint image_id, |
+ GLuint texture_id, |
+ uint32 sync_point) { |
+ DCHECK(capture_task_runner_->BelongsToCurrentThread()); |
+ |
+ if (gl_helper_) { |
+ gl_helper_->DeleteTexture(texture_id); |
+ capture_thread_context_->ContextGL()->DestroyImageCHROMIUM(image_id); |
+ } |
+} |
+ |
+void VideoCaptureDeviceClient::TextureWrapperDelegate::LostContextCallback() { |
+ DCHECK(capture_task_runner_->BelongsToCurrentThread()); |
+ // Prevent incoming frames from being processed while OnError gets groked. |
+ gl_helper_.reset(); |
+ OnError("GLContext lost"); |
+} |
+ |
+void VideoCaptureDeviceClient::TextureWrapperDelegate::OnError( |
+ const std::string& message) { |
+ DCHECK(capture_task_runner_->BelongsToCurrentThread()); |
+ DLOG(ERROR) << message; |
+ BrowserThread::PostTask( |
+ BrowserThread::IO, FROM_HERE, |
+ base::Bind(&VideoCaptureController::DoErrorOnIOThread, controller_)); |
+} |
+ |
} // namespace content |