Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(94)

Side by Side Diff: content/renderer/media/video_capture_impl.cc

Issue 1439533004: Remove dead code paths around PIXEL_STORAGE_TEXTURE in capture pipeline. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: mcasas's second round comments REBASE Created 5 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 // 4 //
5 // Notes about usage of this object by VideoCaptureImplManager. 5 // Notes about usage of this object by VideoCaptureImplManager.
6 // 6 //
7 // VideoCaptureImplManager access this object by using a Unretained() 7 // VideoCaptureImplManager access this object by using a Unretained()
8 // binding and tasks on the IO thread. It is then important that 8 // binding and tasks on the IO thread. It is then important that
9 // VideoCaptureImpl never post task to itself. All operations must be 9 // VideoCaptureImpl never post task to itself. All operations must be
10 // synchronous. 10 // synchronous.
11 11
12 #include "content/renderer/media/video_capture_impl.h" 12 #include "content/renderer/media/video_capture_impl.h"
13 13
14 #include "base/bind.h" 14 #include "base/bind.h"
15 #include "base/stl_util.h" 15 #include "base/stl_util.h"
16 #include "base/thread_task_runner_handle.h" 16 #include "base/thread_task_runner_handle.h"
17 #include "content/child/child_process.h" 17 #include "content/child/child_process.h"
18 #include "content/common/gpu/client/gpu_memory_buffer_impl.h" 18 #include "content/common/gpu/client/gpu_memory_buffer_impl.h"
19 #include "content/common/media/video_capture_messages.h" 19 #include "content/common/media/video_capture_messages.h"
20 #include "media/base/bind_to_current_loop.h" 20 #include "media/base/bind_to_current_loop.h"
21 #include "media/base/limits.h" 21 #include "media/base/limits.h"
22 #include "media/base/video_frame.h" 22 #include "media/base/video_frame.h"
23 23
24 namespace content { 24 namespace content {
25 25
26 namespace {
27
28 // This is called on an unknown thread when the VideoFrame destructor executes.
29 // As of this writing, this callback mechanism is the only interface in
30 // VideoFrame to provide the final value for |release_sync_token|.
31 // VideoCaptureImpl::DidFinishConsumingFrame() will read the value saved here,
32 // and pass it back to the IO thread to pass back to the host via the
33 // BufferReady IPC.
34 void SaveReleaseSyncToken(gpu::SyncToken* sync_token_storage,
35 const gpu::SyncToken& release_sync_token) {
36 *sync_token_storage = release_sync_token;
37 }
38
39 } // namespace
40
41 // A holder of a memory-backed buffer and accessors to it. 26 // A holder of a memory-backed buffer and accessors to it.
42 class VideoCaptureImpl::ClientBuffer 27 class VideoCaptureImpl::ClientBuffer
43 : public base::RefCountedThreadSafe<ClientBuffer> { 28 : public base::RefCountedThreadSafe<ClientBuffer> {
44 public: 29 public:
45 ClientBuffer(scoped_ptr<base::SharedMemory> buffer, size_t buffer_size) 30 ClientBuffer(scoped_ptr<base::SharedMemory> buffer, size_t buffer_size)
46 : buffer_(buffer.Pass()), buffer_size_(buffer_size) {} 31 : buffer_(buffer.Pass()), buffer_size_(buffer_size) {}
47 32
48 base::SharedMemory* buffer() const { return buffer_.get(); } 33 base::SharedMemory* buffer() const { return buffer_.get(); }
49 size_t buffer_size() const { return buffer_size_; } 34 size_t buffer_size() const { return buffer_size_; }
50 35
(...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after
305 } 290 }
306 } 291 }
307 292
308 void VideoCaptureImpl::OnBufferReceived( 293 void VideoCaptureImpl::OnBufferReceived(
309 int buffer_id, 294 int buffer_id,
310 base::TimeTicks timestamp, 295 base::TimeTicks timestamp,
311 const base::DictionaryValue& metadata, 296 const base::DictionaryValue& metadata,
312 media::VideoPixelFormat pixel_format, 297 media::VideoPixelFormat pixel_format,
313 media::VideoFrame::StorageType storage_type, 298 media::VideoFrame::StorageType storage_type,
314 const gfx::Size& coded_size, 299 const gfx::Size& coded_size,
315 const gfx::Rect& visible_rect, 300 const gfx::Rect& visible_rect) {
316 const gpu::MailboxHolder& mailbox_holder) {
317 DCHECK(io_task_runner_->BelongsToCurrentThread()); 301 DCHECK(io_task_runner_->BelongsToCurrentThread());
318 if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_) { 302 if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_ ||
303 pixel_format != media::PIXEL_FORMAT_I420 ||
304 (storage_type != media::VideoFrame::STORAGE_SHMEM &&
305 storage_type != media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS)) {
306 // Crash in debug builds since the host should not have provided a buffer
307 // with an unsupported pixel format or storage type.
308 DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format);
309 DCHECK(storage_type == media::VideoFrame::STORAGE_SHMEM ||
310 storage_type == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS);
319 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, 311 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id,
320 gpu::SyncToken(), -1.0)); 312 gpu::SyncToken(), -1.0));
321 return; 313 return;
322 } 314 }
323 if (first_frame_timestamp_.is_null()) 315 if (first_frame_timestamp_.is_null())
324 first_frame_timestamp_ = timestamp; 316 first_frame_timestamp_ = timestamp;
325 317
326 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc 318 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc
327 TRACE_EVENT_INSTANT2("cast_perf_test", "OnBufferReceived", 319 TRACE_EVENT_INSTANT2("cast_perf_test", "OnBufferReceived",
328 TRACE_EVENT_SCOPE_THREAD, "timestamp", 320 TRACE_EVENT_SCOPE_THREAD, "timestamp",
329 timestamp.ToInternalValue(), "time_delta", 321 timestamp.ToInternalValue(), "time_delta",
330 (timestamp - first_frame_timestamp_).ToInternalValue()); 322 (timestamp - first_frame_timestamp_).ToInternalValue());
331 323
332 scoped_refptr<media::VideoFrame> frame; 324 scoped_refptr<media::VideoFrame> frame;
333 BufferFinishedCallback buffer_finished_callback; 325 BufferFinishedCallback buffer_finished_callback;
334 scoped_ptr<gpu::SyncToken> release_sync_token(new gpu::SyncToken); 326 scoped_ptr<gpu::SyncToken> release_sync_token(new gpu::SyncToken);
335 if (storage_type == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS) { 327 switch (storage_type) {
336 DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format); 328 case media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS: {
337 const auto& iter = client_buffer2s_.find(buffer_id); 329 const auto& iter = client_buffer2s_.find(buffer_id);
338 DCHECK(iter != client_buffer2s_.end()); 330 DCHECK(iter != client_buffer2s_.end());
339 scoped_refptr<ClientBuffer2> buffer = iter->second; 331 scoped_refptr<ClientBuffer2> buffer = iter->second;
340 const auto& handles = buffer->gpu_memory_buffer_handles(); 332 const auto& handles = buffer->gpu_memory_buffer_handles();
341 frame = media::VideoFrame::WrapExternalYuvGpuMemoryBuffers( 333 frame = media::VideoFrame::WrapExternalYuvGpuMemoryBuffers(
342 media::PIXEL_FORMAT_I420, 334 media::PIXEL_FORMAT_I420,
343 coded_size,
344 gfx::Rect(coded_size),
345 coded_size,
346 buffer->stride(media::VideoFrame::kYPlane),
347 buffer->stride(media::VideoFrame::kUPlane),
348 buffer->stride(media::VideoFrame::kVPlane),
349 buffer->data(media::VideoFrame::kYPlane),
350 buffer->data(media::VideoFrame::kUPlane),
351 buffer->data(media::VideoFrame::kVPlane),
352 handles[media::VideoFrame::kYPlane],
353 handles[media::VideoFrame::kUPlane],
354 handles[media::VideoFrame::kVPlane],
355 timestamp - first_frame_timestamp_);
356 DCHECK(frame);
357 buffer_finished_callback = media::BindToCurrentLoop(
358 base::Bind(&VideoCaptureImpl::OnClientBufferFinished2,
359 weak_factory_.GetWeakPtr(), buffer_id, buffer));
360 } else {
361 scoped_refptr<ClientBuffer> buffer;
362 if (storage_type == media::VideoFrame::STORAGE_OPAQUE) {
363 DCHECK(mailbox_holder.mailbox.Verify());
364 DCHECK_EQ(media::PIXEL_FORMAT_ARGB, pixel_format);
365 frame = media::VideoFrame::WrapNativeTexture(
366 pixel_format,
367 mailbox_holder,
368 base::Bind(&SaveReleaseSyncToken, release_sync_token.get()),
369 coded_size, 335 coded_size,
370 gfx::Rect(coded_size), 336 gfx::Rect(coded_size),
371 coded_size, 337 coded_size,
338 buffer->stride(media::VideoFrame::kYPlane),
339 buffer->stride(media::VideoFrame::kUPlane),
340 buffer->stride(media::VideoFrame::kVPlane),
341 buffer->data(media::VideoFrame::kYPlane),
342 buffer->data(media::VideoFrame::kUPlane),
343 buffer->data(media::VideoFrame::kVPlane),
344 handles[media::VideoFrame::kYPlane],
345 handles[media::VideoFrame::kUPlane],
346 handles[media::VideoFrame::kVPlane],
372 timestamp - first_frame_timestamp_); 347 timestamp - first_frame_timestamp_);
348 buffer_finished_callback = media::BindToCurrentLoop(
349 base::Bind(&VideoCaptureImpl::OnClientBufferFinished2,
350 weak_factory_.GetWeakPtr(), buffer_id, buffer));
351 break;
373 } 352 }
374 else { 353 case media::VideoFrame::STORAGE_SHMEM: {
375 DCHECK(storage_type == media::VideoFrame::STORAGE_UNOWNED_MEMORY ||
376 storage_type == media::VideoFrame::STORAGE_SHMEM);
377 DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format);
378 const auto& iter = client_buffers_.find(buffer_id); 354 const auto& iter = client_buffers_.find(buffer_id);
379 DCHECK(iter != client_buffers_.end()); 355 DCHECK(iter != client_buffers_.end());
380 buffer = iter->second; 356 const scoped_refptr<ClientBuffer> buffer = iter->second;
381 frame = media::VideoFrame::WrapExternalSharedMemory( 357 frame = media::VideoFrame::WrapExternalSharedMemory(
382 pixel_format, 358 pixel_format,
383 coded_size, 359 coded_size,
384 visible_rect, 360 visible_rect,
385 gfx::Size(visible_rect.width(), 361 gfx::Size(visible_rect.width(),
386 visible_rect.height()), 362 visible_rect.height()),
387 reinterpret_cast<uint8*>(buffer->buffer()->memory()), 363 reinterpret_cast<uint8*>(buffer->buffer()->memory()),
388 buffer->buffer_size(), 364 buffer->buffer_size(),
389 buffer->buffer()->handle(), 365 buffer->buffer()->handle(),
390 0 /* shared_memory_offset */, 366 0 /* shared_memory_offset */,
391 timestamp - first_frame_timestamp_); 367 timestamp - first_frame_timestamp_);
368 buffer_finished_callback = media::BindToCurrentLoop(
369 base::Bind(&VideoCaptureImpl::OnClientBufferFinished,
370 weak_factory_.GetWeakPtr(), buffer_id, buffer));
371 break;
392 } 372 }
393 DCHECK(frame); 373 default:
394 buffer_finished_callback = media::BindToCurrentLoop( 374 NOTREACHED();
395 base::Bind(&VideoCaptureImpl::OnClientBufferFinished, 375 break;
396 weak_factory_.GetWeakPtr(), buffer_id, buffer));
397 } 376 }
377 DCHECK(frame);
378
398 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, 379 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME,
399 timestamp); 380 timestamp);
400 frame->AddDestructionObserver( 381 frame->AddDestructionObserver(
401 base::Bind(&VideoCaptureImpl::DidFinishConsumingFrame, frame->metadata(), 382 base::Bind(&VideoCaptureImpl::DidFinishConsumingFrame, frame->metadata(),
402 base::Passed(&release_sync_token), buffer_finished_callback)); 383 base::Passed(&release_sync_token), buffer_finished_callback));
403 384
404 frame->metadata()->MergeInternalValuesFrom(metadata); 385 frame->metadata()->MergeInternalValuesFrom(metadata);
405 386
406 for (const auto& client : clients_) 387 for (const auto& client : clients_)
407 client.second.deliver_frame_cb.Run(frame, timestamp); 388 client.second.deliver_frame_cb.Run(frame, timestamp);
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
566 double consumer_resource_utilization = -1.0; 547 double consumer_resource_utilization = -1.0;
567 if (!metadata->GetDouble(media::VideoFrameMetadata::RESOURCE_UTILIZATION, 548 if (!metadata->GetDouble(media::VideoFrameMetadata::RESOURCE_UTILIZATION,
568 &consumer_resource_utilization)) { 549 &consumer_resource_utilization)) {
569 consumer_resource_utilization = -1.0; 550 consumer_resource_utilization = -1.0;
570 } 551 }
571 552
572 callback_to_io_thread.Run(*release_sync_token, consumer_resource_utilization); 553 callback_to_io_thread.Run(*release_sync_token, consumer_resource_utilization);
573 } 554 }
574 555
575 } // namespace content 556 } // namespace content
OLDNEW
« no previous file with comments | « content/renderer/media/video_capture_impl.h ('k') | content/renderer/media/video_capture_impl_unittest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698