OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 // | 4 // |
5 // Notes about usage of this object by VideoCaptureImplManager. | 5 // Notes about usage of this object by VideoCaptureImplManager. |
6 // | 6 // |
7 // VideoCaptureImplManager access this object by using a Unretained() | 7 // VideoCaptureImplManager access this object by using a Unretained() |
8 // binding and tasks on the IO thread. It is then important that | 8 // binding and tasks on the IO thread. It is then important that |
9 // VideoCaptureImpl never post task to itself. All operations must be | 9 // VideoCaptureImpl never post task to itself. All operations must be |
10 // synchronous. | 10 // synchronous. |
11 | 11 |
12 #include "content/renderer/media/video_capture_impl.h" | 12 #include "content/renderer/media/video_capture_impl.h" |
13 | 13 |
14 #include "base/bind.h" | 14 #include "base/bind.h" |
15 #include "base/stl_util.h" | 15 #include "base/stl_util.h" |
16 #include "base/thread_task_runner_handle.h" | 16 #include "base/thread_task_runner_handle.h" |
17 #include "content/child/child_process.h" | 17 #include "content/child/child_process.h" |
18 #include "content/common/gpu/client/gpu_memory_buffer_impl.h" | 18 #include "content/common/gpu/client/gpu_memory_buffer_impl.h" |
19 #include "content/common/media/video_capture_messages.h" | 19 #include "content/common/media/video_capture_messages.h" |
20 #include "media/base/bind_to_current_loop.h" | 20 #include "media/base/bind_to_current_loop.h" |
21 #include "media/base/limits.h" | 21 #include "media/base/limits.h" |
22 #include "media/base/video_frame.h" | 22 #include "media/base/video_frame.h" |
23 | 23 |
24 namespace content { | 24 namespace content { |
25 | 25 |
26 namespace { | 26 namespace { |
27 | 27 |
28 // This is called on an unknown thread when the VideoFrame destructor executes. | 28 // This is called on an unknown thread when the VideoFrame destructor executes. |
29 // As of this writing, this callback mechanism is the only interface in | 29 // As of this writing, this callback mechanism is the only interface in |
30 // VideoFrame to provide the final value for |release_sync_point|. | 30 // VideoFrame to provide the final value for |release_sync_token|. |
31 // VideoCaptureImpl::DidFinishConsumingFrame() will read the value saved here, | 31 // VideoCaptureImpl::DidFinishConsumingFrame() will read the value saved here, |
32 // and pass it back to the IO thread to pass back to the host via the | 32 // and pass it back to the IO thread to pass back to the host via the |
33 // BufferReady IPC. | 33 // BufferReady IPC. |
34 void SaveReleaseSyncPoint(uint32* storage, uint32 release_sync_point) { | 34 void SaveReleaseSyncToken(gpu::SyncToken* sync_token_storage, |
35 *storage = release_sync_point; | 35 const gpu::SyncToken& release_sync_token) { |
| 36 *sync_token_storage = release_sync_token; |
36 } | 37 } |
37 | 38 |
38 } // namespace | 39 } // namespace |
39 | 40 |
40 // A holder of a memory-backed buffer and accessors to it. | 41 // A holder of a memory-backed buffer and accessors to it. |
41 class VideoCaptureImpl::ClientBuffer | 42 class VideoCaptureImpl::ClientBuffer |
42 : public base::RefCountedThreadSafe<ClientBuffer> { | 43 : public base::RefCountedThreadSafe<ClientBuffer> { |
43 public: | 44 public: |
44 ClientBuffer(scoped_ptr<base::SharedMemory> buffer, size_t buffer_size) | 45 ClientBuffer(scoped_ptr<base::SharedMemory> buffer, size_t buffer_size) |
45 : buffer_(buffer.Pass()), buffer_size_(buffer_size) {} | 46 : buffer_(buffer.Pass()), buffer_size_(buffer_size) {} |
(...skipping 261 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
307 int buffer_id, | 308 int buffer_id, |
308 base::TimeTicks timestamp, | 309 base::TimeTicks timestamp, |
309 const base::DictionaryValue& metadata, | 310 const base::DictionaryValue& metadata, |
310 media::VideoPixelFormat pixel_format, | 311 media::VideoPixelFormat pixel_format, |
311 media::VideoFrame::StorageType storage_type, | 312 media::VideoFrame::StorageType storage_type, |
312 const gfx::Size& coded_size, | 313 const gfx::Size& coded_size, |
313 const gfx::Rect& visible_rect, | 314 const gfx::Rect& visible_rect, |
314 const std::vector<gpu::MailboxHolder>& mailbox_holders) { | 315 const std::vector<gpu::MailboxHolder>& mailbox_holders) { |
315 DCHECK(io_task_runner_->BelongsToCurrentThread()); | 316 DCHECK(io_task_runner_->BelongsToCurrentThread()); |
316 if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_) { | 317 if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_) { |
317 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, 0, -1.0)); | 318 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, |
| 319 gpu::SyncToken(), -1.0)); |
318 return; | 320 return; |
319 } | 321 } |
320 if (first_frame_timestamp_.is_null()) | 322 if (first_frame_timestamp_.is_null()) |
321 first_frame_timestamp_ = timestamp; | 323 first_frame_timestamp_ = timestamp; |
322 | 324 |
323 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc | 325 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc |
324 TRACE_EVENT_INSTANT2("cast_perf_test", "OnBufferReceived", | 326 TRACE_EVENT_INSTANT2("cast_perf_test", "OnBufferReceived", |
325 TRACE_EVENT_SCOPE_THREAD, "timestamp", | 327 TRACE_EVENT_SCOPE_THREAD, "timestamp", |
326 timestamp.ToInternalValue(), "time_delta", | 328 timestamp.ToInternalValue(), "time_delta", |
327 (timestamp - first_frame_timestamp_).ToInternalValue()); | 329 (timestamp - first_frame_timestamp_).ToInternalValue()); |
328 | 330 |
329 scoped_refptr<media::VideoFrame> frame; | 331 scoped_refptr<media::VideoFrame> frame; |
330 base::Callback<void(uint32, double)> buffer_finished_callback; | 332 BufferFinishedCallback buffer_finished_callback; |
331 uint32* release_sync_point_storage = new uint32(0); | 333 gpu::SyncToken* release_sync_token_storage = new gpu::SyncToken; |
332 if (mailbox_holders.empty()) { | 334 if (mailbox_holders.empty()) { |
333 DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format); | 335 DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format); |
334 const auto& iter = client_buffers_.find(buffer_id); | 336 const auto& iter = client_buffers_.find(buffer_id); |
335 DCHECK(iter != client_buffers_.end()); | 337 DCHECK(iter != client_buffers_.end()); |
336 const scoped_refptr<ClientBuffer> buffer = iter->second; | 338 const scoped_refptr<ClientBuffer> buffer = iter->second; |
337 frame = media::VideoFrame::WrapExternalSharedMemory( | 339 frame = media::VideoFrame::WrapExternalSharedMemory( |
338 pixel_format, | 340 pixel_format, |
339 coded_size, | 341 coded_size, |
340 visible_rect, | 342 visible_rect, |
341 gfx::Size(visible_rect.width(), visible_rect.height()), | 343 gfx::Size(visible_rect.width(), visible_rect.height()), |
(...skipping 10 matching lines...) Expand all Loading... |
352 for (const auto& mailbox_holder : mailbox_holders) | 354 for (const auto& mailbox_holder : mailbox_holders) |
353 DCHECK(mailbox_holder.mailbox.Verify()); | 355 DCHECK(mailbox_holder.mailbox.Verify()); |
354 DCHECK(mailbox_holders.size() == 1u || mailbox_holders.size() == 3u); | 356 DCHECK(mailbox_holders.size() == 1u || mailbox_holders.size() == 3u); |
355 #endif | 357 #endif |
356 | 358 |
357 scoped_refptr<ClientBuffer2> buffer; | 359 scoped_refptr<ClientBuffer2> buffer; |
358 if (mailbox_holders.size() == | 360 if (mailbox_holders.size() == |
359 media::VideoFrame::NumPlanes(media::PIXEL_FORMAT_ARGB)) { | 361 media::VideoFrame::NumPlanes(media::PIXEL_FORMAT_ARGB)) { |
360 DCHECK_EQ(media::PIXEL_FORMAT_ARGB, pixel_format); | 362 DCHECK_EQ(media::PIXEL_FORMAT_ARGB, pixel_format); |
361 frame = media::VideoFrame::WrapNativeTexture( | 363 frame = media::VideoFrame::WrapNativeTexture( |
362 pixel_format, | 364 pixel_format, mailbox_holders[0], |
363 mailbox_holders[0], | 365 base::Bind(&SaveReleaseSyncToken, release_sync_token_storage), |
364 base::Bind(&SaveReleaseSyncPoint, release_sync_point_storage), | 366 coded_size, gfx::Rect(coded_size), coded_size, |
365 coded_size, | |
366 gfx::Rect(coded_size), | |
367 coded_size, | |
368 timestamp - first_frame_timestamp_); | 367 timestamp - first_frame_timestamp_); |
369 } else if (mailbox_holders.size() == | 368 } else if (mailbox_holders.size() == |
370 media::VideoFrame::NumPlanes(media::PIXEL_FORMAT_I420)) { | 369 media::VideoFrame::NumPlanes(media::PIXEL_FORMAT_I420)) { |
371 DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format); | 370 DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format); |
372 const auto& iter = client_buffer2s_.find(buffer_id); | 371 const auto& iter = client_buffer2s_.find(buffer_id); |
373 DCHECK(iter != client_buffer2s_.end()); | 372 DCHECK(iter != client_buffer2s_.end()); |
374 buffer = iter->second; | 373 buffer = iter->second; |
375 frame = media::VideoFrame::WrapYUV420NativeTextures( | 374 frame = media::VideoFrame::WrapYUV420NativeTextures( |
376 mailbox_holders[media::VideoFrame::kYPlane], | 375 mailbox_holders[media::VideoFrame::kYPlane], |
377 mailbox_holders[media::VideoFrame::kUPlane], | 376 mailbox_holders[media::VideoFrame::kUPlane], |
378 mailbox_holders[media::VideoFrame::kVPlane], | 377 mailbox_holders[media::VideoFrame::kVPlane], |
379 base::Bind(&SaveReleaseSyncPoint, release_sync_point_storage), | 378 base::Bind(&SaveReleaseSyncToken, release_sync_token_storage), |
380 coded_size, | 379 coded_size, gfx::Rect(coded_size), coded_size, |
381 gfx::Rect(coded_size), | |
382 coded_size, | |
383 timestamp - first_frame_timestamp_); | 380 timestamp - first_frame_timestamp_); |
384 } | 381 } |
385 buffer_finished_callback = media::BindToCurrentLoop( | 382 buffer_finished_callback = media::BindToCurrentLoop( |
386 base::Bind(&VideoCaptureImpl::OnClientBufferFinished2, | 383 base::Bind(&VideoCaptureImpl::OnClientBufferFinished2, |
387 weak_factory_.GetWeakPtr(), buffer_id, buffer)); | 384 weak_factory_.GetWeakPtr(), buffer_id, buffer)); |
388 } | 385 } |
389 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, | 386 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, |
390 timestamp); | 387 timestamp); |
391 frame->AddDestructionObserver( | 388 frame->AddDestructionObserver( |
392 base::Bind(&VideoCaptureImpl::DidFinishConsumingFrame, frame->metadata(), | 389 base::Bind(&VideoCaptureImpl::DidFinishConsumingFrame, frame->metadata(), |
393 release_sync_point_storage, buffer_finished_callback)); | 390 release_sync_token_storage, buffer_finished_callback)); |
394 | 391 |
395 frame->metadata()->MergeInternalValuesFrom(metadata); | 392 frame->metadata()->MergeInternalValuesFrom(metadata); |
396 | 393 |
397 for (const auto& client : clients_) | 394 for (const auto& client : clients_) |
398 client.second.deliver_frame_cb.Run(frame, timestamp); | 395 client.second.deliver_frame_cb.Run(frame, timestamp); |
399 } | 396 } |
400 | 397 |
401 void VideoCaptureImpl::OnClientBufferFinished( | 398 void VideoCaptureImpl::OnClientBufferFinished( |
402 int buffer_id, | 399 int buffer_id, |
403 const scoped_refptr<ClientBuffer>& /* ignored_buffer */, | 400 const scoped_refptr<ClientBuffer>& /* ignored_buffer */, |
404 uint32 release_sync_point, | 401 const gpu::SyncToken& release_sync_token, |
405 double consumer_resource_utilization) { | 402 double consumer_resource_utilization) { |
406 DCHECK(io_task_runner_->BelongsToCurrentThread()); | 403 DCHECK(io_task_runner_->BelongsToCurrentThread()); |
407 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, | 404 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, |
408 release_sync_point, | 405 release_sync_token, |
409 consumer_resource_utilization)); | 406 consumer_resource_utilization)); |
410 } | 407 } |
411 void VideoCaptureImpl::OnClientBufferFinished2( | 408 void VideoCaptureImpl::OnClientBufferFinished2( |
412 int buffer_id, | 409 int buffer_id, |
413 const scoped_refptr<ClientBuffer2>& gpu_memory_buffer /* ignored_buffer */, | 410 const scoped_refptr<ClientBuffer2>& gpu_memory_buffer /* ignored_buffer */, |
414 uint32 release_sync_point, | 411 const gpu::SyncToken& release_sync_token, |
415 double consumer_resource_utilization) { | 412 double consumer_resource_utilization) { |
416 OnClientBufferFinished(buffer_id, scoped_refptr<ClientBuffer>(), | 413 OnClientBufferFinished(buffer_id, scoped_refptr<ClientBuffer>(), |
417 release_sync_point, consumer_resource_utilization); | 414 release_sync_token, consumer_resource_utilization); |
418 } | 415 } |
419 | 416 |
420 void VideoCaptureImpl::OnStateChanged(VideoCaptureState state) { | 417 void VideoCaptureImpl::OnStateChanged(VideoCaptureState state) { |
421 DCHECK(io_task_runner_->BelongsToCurrentThread()); | 418 DCHECK(io_task_runner_->BelongsToCurrentThread()); |
422 | 419 |
423 switch (state) { | 420 switch (state) { |
424 case VIDEO_CAPTURE_STATE_STARTED: | 421 case VIDEO_CAPTURE_STATE_STARTED: |
425 // Camera has started in the browser process. Since we have already | 422 // Camera has started in the browser process. Since we have already |
426 // told all clients that we have started there's nothing to do. | 423 // told all clients that we have started there's nothing to do. |
427 break; | 424 break; |
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
543 it->second.state_update_cb.Run(VIDEO_CAPTURE_STATE_STOPPED); | 540 it->second.state_update_cb.Run(VIDEO_CAPTURE_STATE_STOPPED); |
544 clients->erase(it); | 541 clients->erase(it); |
545 found = true; | 542 found = true; |
546 } | 543 } |
547 return found; | 544 return found; |
548 } | 545 } |
549 | 546 |
550 // static | 547 // static |
551 void VideoCaptureImpl::DidFinishConsumingFrame( | 548 void VideoCaptureImpl::DidFinishConsumingFrame( |
552 const media::VideoFrameMetadata* metadata, | 549 const media::VideoFrameMetadata* metadata, |
553 uint32* release_sync_point_storage, | 550 gpu::SyncToken* release_sync_token_storage, |
554 const base::Callback<void(uint32, double)>& callback_to_io_thread) { | 551 const BufferFinishedCallback& callback_to_io_thread) { |
555 // Note: This function may be called on any thread by the VideoFrame | 552 // Note: This function may be called on any thread by the VideoFrame |
556 // destructor. |metadata| is still valid for read-access at this point. | 553 // destructor. |metadata| is still valid for read-access at this point. |
557 | 554 gpu::SyncToken release_sync_token; |
558 uint32 release_sync_point = 0u; | 555 if (release_sync_token_storage) { |
559 if (release_sync_point_storage) { | 556 release_sync_token = *release_sync_token_storage; |
560 release_sync_point = *release_sync_point_storage; | 557 delete release_sync_token_storage; |
561 delete release_sync_point_storage; | |
562 } | 558 } |
563 | 559 |
564 double consumer_resource_utilization = -1.0; | 560 double consumer_resource_utilization = -1.0; |
565 if (!metadata->GetDouble(media::VideoFrameMetadata::RESOURCE_UTILIZATION, | 561 if (!metadata->GetDouble(media::VideoFrameMetadata::RESOURCE_UTILIZATION, |
566 &consumer_resource_utilization)) { | 562 &consumer_resource_utilization)) { |
567 consumer_resource_utilization = -1.0; | 563 consumer_resource_utilization = -1.0; |
568 } | 564 } |
569 | 565 |
570 callback_to_io_thread.Run(release_sync_point, consumer_resource_utilization); | 566 callback_to_io_thread.Run(release_sync_token, consumer_resource_utilization); |
571 } | 567 } |
572 | 568 |
573 } // namespace content | 569 } // namespace content |
OLD | NEW |