Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(958)

Side by Side Diff: content/browser/renderer_host/media/video_capture_device_client.cc

Issue 1090273006: Revert of VideoCapture: add support for GpuMemoryBuffer allocation and lifetime mgmt in VideoCaptureBufferPool (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 5 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/browser/renderer_host/media/video_capture_device_client.h" 5 #include "content/browser/renderer_host/media/video_capture_device_client.h"
6 6
7 #include "base/bind.h" 7 #include "base/bind.h"
8 #include "base/strings/stringprintf.h" 8 #include "base/strings/stringprintf.h"
9 #include "base/trace_event/trace_event.h" 9 #include "base/trace_event/trace_event.h"
10 #include "content/browser/compositor/image_transport_factory.h"
11 #include "content/browser/gpu/browser_gpu_channel_host_factory.h"
12 #include "content/browser/gpu/browser_gpu_memory_buffer_manager.h"
13 #include "content/browser/gpu/gpu_data_manager_impl.h"
14 #include "content/browser/renderer_host/media/video_capture_buffer_pool.h" 10 #include "content/browser/renderer_host/media/video_capture_buffer_pool.h"
15 #include "content/browser/renderer_host/media/video_capture_controller.h" 11 #include "content/browser/renderer_host/media/video_capture_controller.h"
16 #include "content/common/gpu/client/context_provider_command_buffer.h"
17 #include "content/common/gpu/client/gl_helper.h"
18 #include "content/common/gpu/client/gpu_channel_host.h"
19 #include "content/common/gpu/client/webgraphicscontext3d_command_buffer_impl.h"
20 #include "content/common/gpu/gpu_process_launch_causes.h"
21 #include "content/public/browser/browser_thread.h" 12 #include "content/public/browser/browser_thread.h"
22 #include "gpu/command_buffer/common/mailbox_holder.h"
23 #include "media/base/bind_to_current_loop.h" 13 #include "media/base/bind_to_current_loop.h"
24 #include "media/base/video_capture_types.h" 14 #include "media/base/video_capture_types.h"
25 #include "media/base/video_frame.h" 15 #include "media/base/video_frame.h"
26 #include "third_party/khronos/GLES2/gl2ext.h"
27 #include "third_party/libyuv/include/libyuv.h" 16 #include "third_party/libyuv/include/libyuv.h"
28 17
29 using media::VideoCaptureFormat; 18 using media::VideoCaptureFormat;
30 using media::VideoFrame; 19 using media::VideoFrame;
31 20
32 namespace content { 21 namespace content {
33 22
34 namespace {
35
36 #if !defined(OS_ANDROID)
37 // Modelled after GpuProcessTransportFactory::CreateContextCommon().
38 scoped_ptr<content::WebGraphicsContext3DCommandBufferImpl> CreateContextCommon(
39 scoped_refptr<content::GpuChannelHost> gpu_channel_host,
40 int surface_id) {
41 if (!content::GpuDataManagerImpl::GetInstance()->
42 CanUseGpuBrowserCompositor()) {
43 DLOG(ERROR) << "No accelerated graphics found. Check chrome://gpu";
44 return scoped_ptr<content::WebGraphicsContext3DCommandBufferImpl>();
45 }
46 blink::WebGraphicsContext3D::Attributes attrs;
47 attrs.shareResources = true;
48 attrs.depth = false;
49 attrs.stencil = false;
50 attrs.antialias = false;
51 attrs.noAutomaticFlushes = true;
52
53 if (!gpu_channel_host.get()) {
54 DLOG(ERROR) << "Failed to establish GPU channel.";
55 return scoped_ptr<content::WebGraphicsContext3DCommandBufferImpl>();
56 }
57 GURL url("chrome://gpu/GpuProcessTransportFactory::CreateCaptureContext");
58 return make_scoped_ptr(
59 new WebGraphicsContext3DCommandBufferImpl(
60 surface_id,
61 url,
62 gpu_channel_host.get(),
63 attrs,
64 true /* lose_context_when_out_of_memory */,
65 content::WebGraphicsContext3DCommandBufferImpl::SharedMemoryLimits(),
66 NULL));
67 }
68
69 // Modelled after
70 // GpuProcessTransportFactory::CreateOffscreenCommandBufferContext().
71 scoped_ptr<content::WebGraphicsContext3DCommandBufferImpl>
72 CreateOffscreenCommandBufferContext() {
73 content::CauseForGpuLaunch cause = content::CAUSE_FOR_GPU_LAUNCH_CANVAS_2D;
74 // Android does not support synchronous opening of GPU channels. Should use
75 // EstablishGpuChannel() instead.
76 if (!content::BrowserGpuChannelHostFactory::instance())
77 return scoped_ptr<content::WebGraphicsContext3DCommandBufferImpl>();
78 scoped_refptr<content::GpuChannelHost> gpu_channel_host(
79 content::BrowserGpuChannelHostFactory::instance()->
80 EstablishGpuChannelSync(cause));
81 DCHECK(gpu_channel_host);
82 return CreateContextCommon(gpu_channel_host, 0);
83 }
84 #endif
85
86 typedef base::Callback<void(scoped_refptr<ContextProviderCommandBuffer>)>
87 ProcessContextCallback;
88
89 void CreateContextOnUIThread(ProcessContextCallback bottom_half) {
90 DCHECK_CURRENTLY_ON(BrowserThread::UI);
91 #if !defined(OS_ANDROID)
92 bottom_half.Run(ContextProviderCommandBuffer::Create(
93 CreateOffscreenCommandBufferContext(), "Offscreen-CaptureThread"));
94 return;
95 #endif
96 }
97
98 void ResetLostContextCallback(
99 const scoped_refptr<ContextProviderCommandBuffer>& capture_thread_context) {
100 capture_thread_context->SetLostContextCallback(
101 cc::ContextProvider::LostContextCallback());
102 }
103
104 } // anonymous namespace
105
106 // Class combining a Client::Buffer interface implementation and a pool buffer 23 // Class combining a Client::Buffer interface implementation and a pool buffer
107 // implementation to guarantee proper cleanup on destruction on our side. 24 // implementation to guarantee proper cleanup on destruction on our side.
108 class AutoReleaseBuffer : public media::VideoCaptureDevice::Client::Buffer { 25 class AutoReleaseBuffer : public media::VideoCaptureDevice::Client::Buffer {
109 public: 26 public:
110 AutoReleaseBuffer(const scoped_refptr<VideoCaptureBufferPool>& pool, 27 AutoReleaseBuffer(const scoped_refptr<VideoCaptureBufferPool>& pool,
111 int buffer_id) 28 int buffer_id,
112 : id_(buffer_id), 29 void* data,
113 pool_(pool), 30 size_t size)
114 buffer_handle_(pool_->GetBufferHandle(buffer_id).Pass()) { 31 : pool_(pool),
32 id_(buffer_id),
33 data_(data),
34 size_(size) {
115 DCHECK(pool_.get()); 35 DCHECK(pool_.get());
116 } 36 }
117 int id() const override { return id_; } 37 int id() const override { return id_; }
118 size_t size() const override { return buffer_handle_->size(); } 38 void* data() const override { return data_; }
119 void* data() override { return buffer_handle_->data(); } 39 size_t size() const override { return size_; }
120 ClientBuffer AsClientBuffer() override {
121 return buffer_handle_->AsClientBuffer();
122 }
123 40
124 private: 41 private:
125 ~AutoReleaseBuffer() override { pool_->RelinquishProducerReservation(id_); } 42 ~AutoReleaseBuffer() override { pool_->RelinquishProducerReservation(id_); }
126 43
44 const scoped_refptr<VideoCaptureBufferPool> pool_;
127 const int id_; 45 const int id_;
128 const scoped_refptr<VideoCaptureBufferPool> pool_; 46 void* const data_;
129 const scoped_ptr<VideoCaptureBufferPool::BufferHandle> buffer_handle_; 47 const size_t size_;
130 };
131
132 // Internal ref-counted class wrapping an incoming GpuMemoryBuffer into a
133 // Texture backed VideoFrame. This VideoFrame creation is balanced by a waiting
134 // on the associated |sync_point|. After VideoFrame consumption the inserted
135 // ReleaseCallback() will be called, where the Texture is destroyed.
136 //
137 // This class jumps between threads due to GPU-related thread limitations, i.e.
138 // some objects cannot be accessed from IO Thread whereas others need to be
139 // constructed on UI Thread. For this reason most of the operations are carried
140 // out on Capture Thread (|capture_task_runner_|).
141 class VideoCaptureDeviceClient::TextureWrapHelper final
142 : public base::RefCountedThreadSafe<TextureWrapHelper> {
143 public:
144 TextureWrapHelper(
145 const base::WeakPtr<VideoCaptureController>& controller,
146 const scoped_refptr<base::SingleThreadTaskRunner>& capture_task_runner);
147
148 // Wraps the GpuMemoryBuffer-backed |buffer| into a Texture, and sends it to
149 // |controller_| wrapped in a VideoFrame.
150 void OnIncomingCapturedGpuMemoryBuffer(
151 scoped_ptr<media::VideoCaptureDevice::Client::Buffer> buffer,
152 const media::VideoCaptureFormat& frame_format,
153 const base::TimeTicks& timestamp);
154
155 private:
156 friend class base::RefCountedThreadSafe<TextureWrapHelper>;
157 ~TextureWrapHelper();
158
159 // Creates some necessary members in |capture_task_runner_|.
160 void Init();
161 // Runs the bottom half of the GlHelper creation.
162 void CreateGlHelper(
163 scoped_refptr<ContextProviderCommandBuffer> capture_thread_context);
164
165 // Recycles |memory_buffer|, deletes Image and Texture on VideoFrame release.
166 void ReleaseCallback(GLuint image_id,
167 GLuint texture_id,
168 uint32 sync_point);
169
170 // The Command Buffer lost the GL context, f.i. GPU process crashed. Signal
171 // error to our owner so the capture can be torn down.
172 void LostContextCallback();
173
174 // Prints the error |message| and notifies |controller_| of an error.
175 void OnError(const std::string& message);
176
177 // |controller_| should only be used on IO thread.
178 const base::WeakPtr<VideoCaptureController> controller_;
179 const scoped_refptr<base::SingleThreadTaskRunner> capture_task_runner_;
180
181 // Command buffer reference, needs to be destroyed when unused. It is created
182 // on UI Thread and bound to Capture Thread. In particular, it cannot be used
183 // from IO Thread.
184 scoped_refptr<ContextProviderCommandBuffer> capture_thread_context_;
185 // Created and used from Capture Thread. Cannot be used from IO Thread.
186 scoped_ptr<GLHelper> gl_helper_;
187
188 DISALLOW_COPY_AND_ASSIGN(TextureWrapHelper);
189 }; 48 };
190 49
191 VideoCaptureDeviceClient::VideoCaptureDeviceClient( 50 VideoCaptureDeviceClient::VideoCaptureDeviceClient(
192 const base::WeakPtr<VideoCaptureController>& controller, 51 const base::WeakPtr<VideoCaptureController>& controller,
193 const scoped_refptr<VideoCaptureBufferPool>& buffer_pool, 52 const scoped_refptr<VideoCaptureBufferPool>& buffer_pool)
194 const scoped_refptr<base::SingleThreadTaskRunner>& capture_task_runner)
195 : controller_(controller), 53 : controller_(controller),
196 buffer_pool_(buffer_pool), 54 buffer_pool_(buffer_pool),
197 capture_task_runner_(capture_task_runner), 55 last_captured_pixel_format_(media::PIXEL_FORMAT_UNKNOWN) {}
198 last_captured_pixel_format_(media::PIXEL_FORMAT_UNKNOWN) {
199 DCHECK_CURRENTLY_ON(BrowserThread::IO);
200 }
201 56
202 VideoCaptureDeviceClient::~VideoCaptureDeviceClient() {} 57 VideoCaptureDeviceClient::~VideoCaptureDeviceClient() {}
203 58
204 void VideoCaptureDeviceClient::OnIncomingCapturedData( 59 void VideoCaptureDeviceClient::OnIncomingCapturedData(
205 const uint8* data, 60 const uint8* data,
206 int length, 61 int length,
207 const VideoCaptureFormat& frame_format, 62 const VideoCaptureFormat& frame_format,
208 int rotation, 63 int rotation,
209 const base::TimeTicks& timestamp) { 64 const base::TimeTicks& timestamp) {
210 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedData"); 65 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedData");
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
243 rotation_mode = libyuv::kRotate270; 98 rotation_mode = libyuv::kRotate270;
244 99
245 const gfx::Size dimensions(destination_width, destination_height); 100 const gfx::Size dimensions(destination_width, destination_height);
246 if (!VideoFrame::IsValidConfig(VideoFrame::I420, 101 if (!VideoFrame::IsValidConfig(VideoFrame::I420,
247 dimensions, 102 dimensions,
248 gfx::Rect(dimensions), 103 gfx::Rect(dimensions),
249 dimensions)) { 104 dimensions)) {
250 return; 105 return;
251 } 106 }
252 107
253 scoped_ptr<Buffer> buffer( 108 scoped_refptr<Buffer> buffer =
254 ReserveOutputBuffer(media::PIXEL_FORMAT_I420, dimensions)); 109 ReserveOutputBuffer(media::PIXEL_FORMAT_I420, dimensions);
255 if (!buffer.get()) 110 if (!buffer.get())
256 return; 111 return;
257 112
258 uint8* const yplane = reinterpret_cast<uint8*>(buffer->data()); 113 uint8* const yplane = reinterpret_cast<uint8*>(buffer->data());
259 uint8* const uplane = 114 uint8* const uplane =
260 yplane + VideoFrame::PlaneAllocationSize(VideoFrame::I420, 115 yplane + VideoFrame::PlaneAllocationSize(VideoFrame::I420,
261 VideoFrame::kYPlane, dimensions); 116 VideoFrame::kYPlane, dimensions);
262 uint8* const vplane = 117 uint8* const vplane =
263 uplane + VideoFrame::PlaneAllocationSize(VideoFrame::I420, 118 uplane + VideoFrame::PlaneAllocationSize(VideoFrame::I420,
264 VideoFrame::kUPlane, dimensions); 119 VideoFrame::kUPlane, dimensions);
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
339 (flip ? -1 : 1) * frame_format.frame_size.height(), 194 (flip ? -1 : 1) * frame_format.frame_size.height(),
340 new_unrotated_width, 195 new_unrotated_width,
341 new_unrotated_height, 196 new_unrotated_height,
342 rotation_mode, 197 rotation_mode,
343 origin_colorspace) != 0) { 198 origin_colorspace) != 0) {
344 DLOG(WARNING) << "Failed to convert buffer's pixel format to I420 from " 199 DLOG(WARNING) << "Failed to convert buffer's pixel format to I420 from "
345 << media::VideoCaptureFormat::PixelFormatToString( 200 << media::VideoCaptureFormat::PixelFormatToString(
346 frame_format.pixel_format); 201 frame_format.pixel_format);
347 return; 202 return;
348 } 203 }
204 scoped_refptr<VideoFrame> frame =
205 VideoFrame::WrapExternalPackedMemory(
206 VideoFrame::I420,
207 dimensions,
208 gfx::Rect(dimensions),
209 dimensions,
210 yplane,
211 VideoFrame::AllocationSize(VideoFrame::I420, dimensions),
212 base::SharedMemory::NULLHandle(),
213 0,
214 base::TimeDelta(),
215 base::Closure());
216 DCHECK(frame.get());
217 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE,
218 frame_format.frame_rate);
349 219
350 OnIncomingCapturedBuffer(buffer.Pass(), 220 BrowserThread::PostTask(
351 media::VideoCaptureFormat(dimensions, 221 BrowserThread::IO,
352 frame_format.frame_rate, 222 FROM_HERE,
353 media::PIXEL_FORMAT_I420), 223 base::Bind(
354 timestamp); 224 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread,
225 controller_,
226 buffer,
227 frame,
228 timestamp));
355 } 229 }
356 230
357 void 231 void
358 VideoCaptureDeviceClient::OnIncomingCapturedYuvData( 232 VideoCaptureDeviceClient::OnIncomingCapturedYuvData(
359 const uint8* y_data, 233 const uint8* y_data,
360 const uint8* u_data, 234 const uint8* u_data,
361 const uint8* v_data, 235 const uint8* v_data,
362 size_t y_stride, 236 size_t y_stride,
363 size_t u_stride, 237 size_t u_stride,
364 size_t v_stride, 238 size_t v_stride,
365 const VideoCaptureFormat& frame_format, 239 const VideoCaptureFormat& frame_format,
366 int clockwise_rotation, 240 int clockwise_rotation,
367 const base::TimeTicks& timestamp) { 241 const base::TimeTicks& timestamp) {
368 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedYuvData"); 242 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedYuvData");
369 DCHECK_EQ(frame_format.pixel_format, media::PIXEL_FORMAT_I420); 243 DCHECK_EQ(frame_format.pixel_format, media::PIXEL_FORMAT_I420);
370 DCHECK_EQ(clockwise_rotation, 0) << "Rotation not supported"; 244 DCHECK_EQ(clockwise_rotation, 0) << "Rotation not supported";
371 245
372 scoped_ptr<Buffer> buffer( 246 scoped_refptr<Buffer> buffer =
373 ReserveOutputBuffer(frame_format.pixel_format, frame_format.frame_size)); 247 ReserveOutputBuffer(frame_format.pixel_format, frame_format.frame_size);
374 if (!buffer.get()) 248 if (!buffer.get())
375 return; 249 return;
376 250
377 // Blit (copy) here from y,u,v into buffer.data()). Needed so we can return 251 // Blit (copy) here from y,u,v into buffer.data()). Needed so we can return
378 // the parameter buffer synchronously to the driver. 252 // the parameter buffer synchronously to the driver.
379 const size_t y_plane_size = VideoFrame::PlaneAllocationSize(VideoFrame::I420, 253 const size_t y_plane_size = VideoFrame::PlaneAllocationSize(VideoFrame::I420,
380 VideoFrame::kYPlane, frame_format.frame_size); 254 VideoFrame::kYPlane, frame_format.frame_size);
381 const size_t u_plane_size = VideoFrame::PlaneAllocationSize( 255 const size_t u_plane_size = VideoFrame::PlaneAllocationSize(
382 VideoFrame::I420, VideoFrame::kUPlane, frame_format.frame_size); 256 VideoFrame::I420, VideoFrame::kUPlane, frame_format.frame_size);
383 uint8* const dst_y = reinterpret_cast<uint8*>(buffer->data()); 257 uint8* const dst_y = reinterpret_cast<uint8*>(buffer->data());
(...skipping 15 matching lines...) Expand all
399 v_data, v_stride, 273 v_data, v_stride,
400 dst_y, dst_y_stride, 274 dst_y, dst_y_stride,
401 dst_u, dst_u_stride, 275 dst_u, dst_u_stride,
402 dst_v, dst_v_stride, 276 dst_v, dst_v_stride,
403 frame_format.frame_size.width(), 277 frame_format.frame_size.width(),
404 frame_format.frame_size.height())) { 278 frame_format.frame_size.height())) {
405 DLOG(WARNING) << "Failed to copy buffer"; 279 DLOG(WARNING) << "Failed to copy buffer";
406 return; 280 return;
407 } 281 }
408 282
409 OnIncomingCapturedBuffer(buffer.Pass(), frame_format, timestamp); 283 scoped_refptr<VideoFrame> video_frame = VideoFrame::WrapExternalYuvData(
284 VideoFrame::I420, frame_format.frame_size,
285 gfx::Rect(frame_format.frame_size), frame_format.frame_size, y_stride,
286 u_stride, v_stride, dst_y, dst_u, dst_v, base::TimeDelta(),
287 base::Closure());
288 DCHECK(video_frame.get());
289 video_frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE,
290 frame_format.frame_rate);
291
292 BrowserThread::PostTask(
293 BrowserThread::IO,
294 FROM_HERE,
295 base::Bind(
296 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread,
297 controller_,
298 buffer,
299 video_frame,
300 timestamp));
410 }; 301 };
411 302
412 scoped_ptr<media::VideoCaptureDevice::Client::Buffer> 303 scoped_refptr<media::VideoCaptureDevice::Client::Buffer>
413 VideoCaptureDeviceClient::ReserveOutputBuffer(media::VideoPixelFormat format, 304 VideoCaptureDeviceClient::ReserveOutputBuffer(media::VideoPixelFormat format,
414 const gfx::Size& dimensions) { 305 const gfx::Size& dimensions) {
415 DCHECK(format == media::PIXEL_FORMAT_I420 || 306 DCHECK(format == media::PIXEL_FORMAT_TEXTURE ||
416 format == media::PIXEL_FORMAT_TEXTURE || 307 format == media::PIXEL_FORMAT_I420 ||
417 format == media::PIXEL_FORMAT_GPUMEMORYBUFFER); 308 format == media::PIXEL_FORMAT_ARGB);
418 DCHECK_GT(dimensions.width(), 0); 309 DCHECK_GT(dimensions.width(), 0);
419 DCHECK_GT(dimensions.height(), 0); 310 DCHECK_GT(dimensions.height(), 0);
420 311
421 if (format == media::PIXEL_FORMAT_GPUMEMORYBUFFER && !texture_wrap_helper_) {
422 texture_wrap_helper_ =
423 new TextureWrapHelper(controller_, capture_task_runner_);
424 }
425
426 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; 312 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId;
427 const int buffer_id = 313 const int buffer_id =
428 buffer_pool_->ReserveForProducer(format, dimensions, &buffer_id_to_drop); 314 buffer_pool_->ReserveForProducer(format, dimensions, &buffer_id_to_drop);
429 if (buffer_id == VideoCaptureBufferPool::kInvalidId) 315 if (buffer_id == VideoCaptureBufferPool::kInvalidId)
430 return NULL; 316 return NULL;
317 void* data;
318 size_t size;
319 buffer_pool_->GetBufferInfo(buffer_id, &data, &size);
431 320
432 scoped_ptr<media::VideoCaptureDevice::Client::Buffer> output_buffer( 321 scoped_refptr<media::VideoCaptureDevice::Client::Buffer> output_buffer(
433 new AutoReleaseBuffer(buffer_pool_, buffer_id)); 322 new AutoReleaseBuffer(buffer_pool_, buffer_id, data, size));
434 323
435 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) { 324 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) {
436 BrowserThread::PostTask(BrowserThread::IO, 325 BrowserThread::PostTask(BrowserThread::IO,
437 FROM_HERE, 326 FROM_HERE,
438 base::Bind(&VideoCaptureController::DoBufferDestroyedOnIOThread, 327 base::Bind(&VideoCaptureController::DoBufferDestroyedOnIOThread,
439 controller_, buffer_id_to_drop)); 328 controller_, buffer_id_to_drop));
440 } 329 }
441 330
442 return output_buffer.Pass(); 331 return output_buffer;
443 } 332 }
444 333
445 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( 334 void
446 scoped_ptr<Buffer> buffer, 335 VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame(
447 const media::VideoCaptureFormat& frame_format, 336 const scoped_refptr<Buffer>& buffer,
448 const base::TimeTicks& timestamp) {
449 if (frame_format.pixel_format == media::PIXEL_FORMAT_GPUMEMORYBUFFER) {
450 capture_task_runner_->PostTask(
451 FROM_HERE,
452 base::Bind(&TextureWrapHelper::OnIncomingCapturedGpuMemoryBuffer,
453 texture_wrap_helper_,
454 base::Passed(&buffer),
455 frame_format,
456 timestamp));
457 } else {
458 DCHECK_EQ(frame_format.pixel_format, media::PIXEL_FORMAT_I420);
459 scoped_refptr<VideoFrame> video_frame =
460 VideoFrame::WrapExternalPackedMemory(
461 VideoFrame::I420,
462 frame_format.frame_size,
463 gfx::Rect(frame_format.frame_size),
464 frame_format.frame_size,
465 reinterpret_cast<uint8*>(buffer->data()),
466 VideoFrame::AllocationSize(VideoFrame::I420,
467 frame_format.frame_size),
468 base::SharedMemory::NULLHandle(),
469 0 /* shared_memory_offset */,
470 base::TimeDelta(),
471 base::Closure());
472 DCHECK(video_frame.get());
473 video_frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE,
474 frame_format.frame_rate);
475 OnIncomingCapturedVideoFrame(buffer.Pass(), video_frame, timestamp);
476 }
477 }
478
479 void VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame(
480 scoped_ptr<Buffer> buffer,
481 const scoped_refptr<VideoFrame>& frame, 337 const scoped_refptr<VideoFrame>& frame,
482 const base::TimeTicks& timestamp) { 338 const base::TimeTicks& timestamp) {
483 BrowserThread::PostTask( 339 BrowserThread::PostTask(
484 BrowserThread::IO, 340 BrowserThread::IO,
485 FROM_HERE, 341 FROM_HERE,
486 base::Bind( 342 base::Bind(
487 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread, 343 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread,
488 controller_, 344 controller_,
489 base::Passed(&buffer), 345 buffer,
490 frame, 346 frame,
491 timestamp)); 347 timestamp));
492 } 348 }
493 349
494 void VideoCaptureDeviceClient::OnError( 350 void VideoCaptureDeviceClient::OnError(
495 const std::string& reason) { 351 const std::string& reason) {
496 const std::string log_message = base::StringPrintf( 352 const std::string log_message = base::StringPrintf(
497 "Error on video capture: %s, OS message: %s", 353 "Error on video capture: %s, OS message: %s",
498 reason.c_str(), 354 reason.c_str(),
499 logging::SystemErrorCodeToString( 355 logging::SystemErrorCodeToString(
500 logging::GetLastSystemErrorCode()).c_str()); 356 logging::GetLastSystemErrorCode()).c_str());
501 DLOG(ERROR) << log_message; 357 DLOG(ERROR) << log_message;
502 OnLog(log_message); 358 OnLog(log_message);
503 BrowserThread::PostTask(BrowserThread::IO, 359 BrowserThread::PostTask(BrowserThread::IO,
504 FROM_HERE, 360 FROM_HERE,
505 base::Bind(&VideoCaptureController::DoErrorOnIOThread, controller_)); 361 base::Bind(&VideoCaptureController::DoErrorOnIOThread, controller_));
506 } 362 }
507 363
508 void VideoCaptureDeviceClient::OnLog( 364 void VideoCaptureDeviceClient::OnLog(
509 const std::string& message) { 365 const std::string& message) {
510 BrowserThread::PostTask(BrowserThread::IO, FROM_HERE, 366 BrowserThread::PostTask(BrowserThread::IO, FROM_HERE,
511 base::Bind(&VideoCaptureController::DoLogOnIOThread, 367 base::Bind(&VideoCaptureController::DoLogOnIOThread,
512 controller_, message)); 368 controller_, message));
513 } 369 }
514 370
515 VideoCaptureDeviceClient::TextureWrapHelper::TextureWrapHelper(
516 const base::WeakPtr<VideoCaptureController>& controller,
517 const scoped_refptr<base::SingleThreadTaskRunner>& capture_task_runner)
518 : controller_(controller),
519 capture_task_runner_(capture_task_runner) {
520 capture_task_runner_->PostTask(FROM_HERE,
521 base::Bind(&TextureWrapHelper::Init, this));
522 }
523
524 void
525 VideoCaptureDeviceClient::TextureWrapHelper::OnIncomingCapturedGpuMemoryBuffer(
526 scoped_ptr<media::VideoCaptureDevice::Client::Buffer> buffer,
527 const media::VideoCaptureFormat& frame_format,
528 const base::TimeTicks& timestamp) {
529 DCHECK(capture_task_runner_->BelongsToCurrentThread());
530 DCHECK_EQ(frame_format.pixel_format, media::PIXEL_FORMAT_GPUMEMORYBUFFER);
531 if (!gl_helper_) {
532 // |gl_helper_| might not exist due to asynchronous initialization not
533 // finished or due to termination in process after a context loss.
534 DVLOG(1) << " Skipping ingress frame, no GL context.";
535 return;
536 }
537
538 gpu::gles2::GLES2Interface* gl = capture_thread_context_->ContextGL();
539 GLuint image_id = gl->CreateImageCHROMIUM(buffer->AsClientBuffer(),
540 frame_format.frame_size.width(),
541 frame_format.frame_size.height(),
542 GL_BGRA_EXT);
543 DCHECK(image_id);
544
545 GLuint texture_id = gl_helper_->CreateTexture();
546 DCHECK(texture_id);
547 {
548 content::ScopedTextureBinder<GL_TEXTURE_2D> texture_binder(gl, texture_id);
549 gl->BindTexImage2DCHROMIUM(GL_TEXTURE_2D, image_id);
550 }
551
552 scoped_ptr<gpu::MailboxHolder> mailbox_holder(new gpu::MailboxHolder(
553 gl_helper_->ProduceMailboxHolderFromTexture(texture_id)));
554 DCHECK(!mailbox_holder->mailbox.IsZero());
555 DCHECK(mailbox_holder->mailbox.Verify());
556 DCHECK(mailbox_holder->texture_target);
557 DCHECK(mailbox_holder->sync_point);
558
559 scoped_refptr<media::VideoFrame> video_frame =
560 media::VideoFrame::WrapNativeTexture(
561 mailbox_holder.Pass(),
562 media::BindToCurrentLoop(
563 base::Bind(&VideoCaptureDeviceClient::TextureWrapHelper::
564 ReleaseCallback,
565 this, image_id, texture_id)),
566 frame_format.frame_size,
567 gfx::Rect(frame_format.frame_size),
568 frame_format.frame_size,
569 base::TimeDelta(),
570 true /* allow_overlay */);
571 video_frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE,
572 frame_format.frame_rate);
573
574 BrowserThread::PostTask(
575 BrowserThread::IO, FROM_HERE,
576 base::Bind(
577 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread,
578 controller_, base::Passed(&buffer), video_frame, timestamp));
579 }
580
581 VideoCaptureDeviceClient::TextureWrapHelper::~TextureWrapHelper() {
582 // Might not be running on capture_task_runner_'s thread. Ensure owned objects
583 // are destroyed on the correct threads.
584 if (gl_helper_)
585 capture_task_runner_->DeleteSoon(FROM_HERE, gl_helper_.release());
586
587 if (capture_thread_context_) {
588 capture_task_runner_->PostTask(
589 FROM_HERE,
590 base::Bind(&ResetLostContextCallback, capture_thread_context_));
591 capture_thread_context_->AddRef();
592 ContextProviderCommandBuffer* raw_capture_thread_context =
593 capture_thread_context_.get();
594 capture_thread_context_ = nullptr;
595 capture_task_runner_->ReleaseSoon(FROM_HERE, raw_capture_thread_context);
596 }
597 }
598
599 void VideoCaptureDeviceClient::TextureWrapHelper::Init() {
600 DCHECK(capture_task_runner_->BelongsToCurrentThread());
601
602 // In threaded compositing mode, we have to create our own context for Capture
603 // to avoid using the GPU command queue from multiple threads. Context
604 // creation must happen on UI thread; then the context needs to be bound to
605 // the appropriate thread, which is done in CreateGlHelper().
606 BrowserThread::PostTask(
607 BrowserThread::UI, FROM_HERE,
608 base::Bind(
609 &CreateContextOnUIThread,
610 media::BindToCurrentLoop(base::Bind(
611 &VideoCaptureDeviceClient::TextureWrapHelper::CreateGlHelper,
612 this))));
613 }
614
615 void VideoCaptureDeviceClient::TextureWrapHelper::CreateGlHelper(
616 scoped_refptr<ContextProviderCommandBuffer> capture_thread_context) {
617 DCHECK(capture_task_runner_->BelongsToCurrentThread());
618
619 if (!capture_thread_context.get()) {
620 DLOG(ERROR) << "No offscreen GL Context!";
621 return;
622 }
623 // This may not happen in IO Thread. The destructor resets the context lost
624 // callback, so base::Unretained is safe; otherwise it'd be a circular ref
625 // counted dependency.
626 capture_thread_context->SetLostContextCallback(media::BindToCurrentLoop(
627 base::Bind(
628 &VideoCaptureDeviceClient::TextureWrapHelper::LostContextCallback,
629 base::Unretained(this))));
630 if (!capture_thread_context->BindToCurrentThread()) {
631 capture_thread_context = NULL;
632 DLOG(ERROR) << "Couldn't bind the Capture Context to the Capture Thread.";
633 return;
634 }
635 DCHECK(capture_thread_context);
636 capture_thread_context_ = capture_thread_context;
637
638 // At this point, |capture_thread_context| is a cc::ContextProvider. Creation
639 // of our GLHelper should happen on Capture Thread.
640 gl_helper_.reset(new GLHelper(capture_thread_context->ContextGL(),
641 capture_thread_context->ContextSupport()));
642 DCHECK(gl_helper_);
643 }
644
645 void VideoCaptureDeviceClient::TextureWrapHelper::ReleaseCallback(
646 GLuint image_id,
647 GLuint texture_id,
648 uint32 sync_point) {
649 DCHECK(capture_task_runner_->BelongsToCurrentThread());
650
651 if (gl_helper_) {
652 gl_helper_->DeleteTexture(texture_id);
653 capture_thread_context_->ContextGL()->DestroyImageCHROMIUM(image_id);
654 }
655 }
656
657 void VideoCaptureDeviceClient::TextureWrapHelper::LostContextCallback() {
658 DCHECK(capture_task_runner_->BelongsToCurrentThread());
659 // Prevent incoming frames from being processed while OnError gets groked.
660 gl_helper_.reset();
661 OnError("GLContext lost");
662 }
663
664 void VideoCaptureDeviceClient::TextureWrapHelper::OnError(
665 const std::string& message) {
666 DCHECK(capture_task_runner_->BelongsToCurrentThread());
667 DLOG(ERROR) << message;
668 BrowserThread::PostTask(
669 BrowserThread::IO, FROM_HERE,
670 base::Bind(&VideoCaptureController::DoErrorOnIOThread, controller_));
671 }
672
673 } // namespace content 371 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698