OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 // | 4 // |
5 // Notes about usage of this object by VideoCaptureImplManager. | 5 // Notes about usage of this object by VideoCaptureImplManager. |
6 // | 6 // |
7 // VideoCaptureImplManager access this object by using a Unretained() | 7 // VideoCaptureImplManager access this object by using a Unretained() |
8 // binding and tasks on the IO thread. It is then important that | 8 // binding and tasks on the IO thread. It is then important that |
9 // VideoCaptureImpl never post task to itself. All operations must be | 9 // VideoCaptureImpl never post task to itself. All operations must be |
10 // synchronous. | 10 // synchronous. |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
46 const size_t buffer_size_; | 46 const size_t buffer_size_; |
47 | 47 |
48 DISALLOW_COPY_AND_ASSIGN(ClientBuffer); | 48 DISALLOW_COPY_AND_ASSIGN(ClientBuffer); |
49 }; | 49 }; |
50 | 50 |
51 // A holder of a GpuMemoryBuffer-backed buffer, Map()ed on ctor and Unmap()ed on | 51 // A holder of a GpuMemoryBuffer-backed buffer, Map()ed on ctor and Unmap()ed on |
52 // dtor. Creates and owns GpuMemoryBuffer instances. | 52 // dtor. Creates and owns GpuMemoryBuffer instances. |
53 class VideoCaptureImpl::ClientBuffer2 | 53 class VideoCaptureImpl::ClientBuffer2 |
54 : public base::RefCountedThreadSafe<ClientBuffer2> { | 54 : public base::RefCountedThreadSafe<ClientBuffer2> { |
55 public: | 55 public: |
56 ClientBuffer2( | 56 ClientBuffer2(const std::vector<gfx::GpuMemoryBufferHandle>& client_handles, |
57 const std::vector<gfx::GpuMemoryBufferHandle>& client_handles, | 57 const gfx::Size& size, |
58 const gfx::Size& size) | 58 media::VideoPixelFormat format) |
59 : handles_(client_handles), | 59 : handles_(client_handles) { |
60 size_(size) { | 60 DCHECK(format == media::PIXEL_FORMAT_I420 || |
61 const media::VideoPixelFormat format = media::PIXEL_FORMAT_I420; | 61 format == media::PIXEL_FORMAT_Y8 || |
| 62 format == media::PIXEL_FORMAT_Y16); |
62 DCHECK_EQ(handles_.size(), media::VideoFrame::NumPlanes(format)); | 63 DCHECK_EQ(handles_.size(), media::VideoFrame::NumPlanes(format)); |
63 for (size_t i = 0; i < handles_.size(); ++i) { | 64 for (size_t i = 0; i < handles_.size(); ++i) { |
64 const size_t width = media::VideoFrame::Columns(i, format, size_.width()); | 65 const size_t width = media::VideoFrame::Columns(i, format, size.width()); |
65 const size_t height = media::VideoFrame::Rows(i, format, size_.height()); | 66 const size_t height = media::VideoFrame::Rows(i, format, size.height()); |
66 buffers_.push_back(gpu::GpuMemoryBufferImpl::CreateFromHandle( | 67 buffers_.push_back(gpu::GpuMemoryBufferImpl::CreateFromHandle( |
67 handles_[i], gfx::Size(width, height), gfx::BufferFormat::R_8, | 68 handles_[i], gfx::Size(width, height), |
| 69 media::VideoFrame::BufferFormat(format), |
68 gfx::BufferUsage::GPU_READ_CPU_READ_WRITE, | 70 gfx::BufferUsage::GPU_READ_CPU_READ_WRITE, |
69 base::Bind(&ClientBuffer2::DestroyGpuMemoryBuffer, | 71 base::Bind(&ClientBuffer2::DestroyGpuMemoryBuffer, |
70 base::Unretained(this)))); | 72 base::Unretained(this)))); |
71 bool rv = buffers_[i]->Map(); | 73 bool rv = buffers_[i]->Map(); |
72 DCHECK(rv); | 74 DCHECK(rv); |
73 data_[i] = reinterpret_cast<uint8_t*>(buffers_[i]->memory(0u)); | 75 data_[i] = reinterpret_cast<uint8_t*>(buffers_[i]->memory(0u)); |
74 strides_[i] = width; | 76 strides_[i] = width; |
75 } | 77 } |
76 } | 78 } |
77 | 79 |
78 uint8_t* data(int plane) const { return data_[plane]; } | 80 uint8_t* data(int plane) const { return data_[plane]; } |
79 int32_t stride(int plane) const { return strides_[plane]; } | 81 int32_t stride(int plane) const { return strides_[plane]; } |
80 std::vector<gfx::GpuMemoryBufferHandle> gpu_memory_buffer_handles() { | 82 std::vector<gfx::GpuMemoryBufferHandle> gpu_memory_buffer_handles() { |
81 return handles_; | 83 return handles_; |
82 } | 84 } |
83 | 85 |
84 private: | 86 private: |
85 friend class base::RefCountedThreadSafe<ClientBuffer2>; | 87 friend class base::RefCountedThreadSafe<ClientBuffer2>; |
86 | 88 |
87 virtual ~ClientBuffer2() { | 89 virtual ~ClientBuffer2() { |
88 for (auto* buffer : buffers_) | 90 for (auto* buffer : buffers_) |
89 buffer->Unmap(); | 91 buffer->Unmap(); |
90 } | 92 } |
91 | 93 |
92 void DestroyGpuMemoryBuffer(const gpu::SyncToken& sync_token) {} | 94 void DestroyGpuMemoryBuffer(const gpu::SyncToken& sync_token) {} |
93 | 95 |
94 const std::vector<gfx::GpuMemoryBufferHandle> handles_; | 96 const std::vector<gfx::GpuMemoryBufferHandle> handles_; |
95 const gfx::Size size_; | |
96 ScopedVector<gfx::GpuMemoryBuffer> buffers_; | 97 ScopedVector<gfx::GpuMemoryBuffer> buffers_; |
97 uint8_t* data_[media::VideoFrame::kMaxPlanes]; | 98 uint8_t* data_[media::VideoFrame::kMaxPlanes]; |
98 int32_t strides_[media::VideoFrame::kMaxPlanes]; | 99 int32_t strides_[media::VideoFrame::kMaxPlanes]; |
99 | 100 |
100 DISALLOW_COPY_AND_ASSIGN(ClientBuffer2); | 101 DISALLOW_COPY_AND_ASSIGN(ClientBuffer2); |
101 }; | 102 }; |
102 | 103 |
103 VideoCaptureImpl::ClientInfo::ClientInfo() {} | 104 VideoCaptureImpl::ClientInfo::ClientInfo() {} |
104 VideoCaptureImpl::ClientInfo::ClientInfo(const ClientInfo& other) = default; | 105 VideoCaptureImpl::ClientInfo::ClientInfo(const ClientInfo& other) = default; |
105 VideoCaptureImpl::ClientInfo::~ClientInfo() {} | 106 VideoCaptureImpl::ClientInfo::~ClientInfo() {} |
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
261 client_buffers_.insert(std::make_pair( | 262 client_buffers_.insert(std::make_pair( |
262 buffer_id, | 263 buffer_id, |
263 new ClientBuffer(std::move(shm), length))) | 264 new ClientBuffer(std::move(shm), length))) |
264 .second; | 265 .second; |
265 DCHECK(inserted); | 266 DCHECK(inserted); |
266 } | 267 } |
267 | 268 |
268 void VideoCaptureImpl::OnBufferCreated2( | 269 void VideoCaptureImpl::OnBufferCreated2( |
269 const std::vector<gfx::GpuMemoryBufferHandle>& handles, | 270 const std::vector<gfx::GpuMemoryBufferHandle>& handles, |
270 const gfx::Size& size, | 271 const gfx::Size& size, |
| 272 media::VideoPixelFormat format, |
271 int buffer_id) { | 273 int buffer_id) { |
272 DCHECK(io_task_runner_->BelongsToCurrentThread()); | 274 DCHECK(io_task_runner_->BelongsToCurrentThread()); |
273 | 275 |
274 // In case client calls StopCapture before the arrival of created buffer, | 276 // In case client calls StopCapture before the arrival of created buffer, |
275 // just close this buffer and return. | 277 // just close this buffer and return. |
276 if (state_ != VIDEO_CAPTURE_STATE_STARTED) | 278 if (state_ != VIDEO_CAPTURE_STATE_STARTED) |
277 return; | 279 return; |
278 | 280 |
279 const bool inserted = | 281 const bool inserted = |
280 client_buffer2s_.insert(std::make_pair(buffer_id, | 282 client_buffer2s_ |
281 new ClientBuffer2(handles, size))) | 283 .insert(std::make_pair(buffer_id, |
| 284 new ClientBuffer2(handles, size, format))) |
282 .second; | 285 .second; |
283 DCHECK(inserted); | 286 DCHECK(inserted); |
284 } | 287 } |
285 | 288 |
286 void VideoCaptureImpl::OnBufferDestroyed(int buffer_id) { | 289 void VideoCaptureImpl::OnBufferDestroyed(int buffer_id) { |
287 DCHECK(io_task_runner_->BelongsToCurrentThread()); | 290 DCHECK(io_task_runner_->BelongsToCurrentThread()); |
288 | 291 |
289 const auto& cb_iter = client_buffers_.find(buffer_id); | 292 const auto& cb_iter = client_buffers_.find(buffer_id); |
290 if (cb_iter != client_buffers_.end()) { | 293 if (cb_iter != client_buffers_.end()) { |
291 DCHECK(!cb_iter->second.get() || cb_iter->second->HasOneRef()) | 294 DCHECK(!cb_iter->second.get() || cb_iter->second->HasOneRef()) |
(...skipping 12 matching lines...) Expand all Loading... |
304 void VideoCaptureImpl::OnBufferReceived( | 307 void VideoCaptureImpl::OnBufferReceived( |
305 int buffer_id, | 308 int buffer_id, |
306 base::TimeDelta timestamp, | 309 base::TimeDelta timestamp, |
307 const base::DictionaryValue& metadata, | 310 const base::DictionaryValue& metadata, |
308 media::VideoPixelFormat pixel_format, | 311 media::VideoPixelFormat pixel_format, |
309 media::VideoFrame::StorageType storage_type, | 312 media::VideoFrame::StorageType storage_type, |
310 const gfx::Size& coded_size, | 313 const gfx::Size& coded_size, |
311 const gfx::Rect& visible_rect) { | 314 const gfx::Rect& visible_rect) { |
312 DCHECK(io_task_runner_->BelongsToCurrentThread()); | 315 DCHECK(io_task_runner_->BelongsToCurrentThread()); |
313 if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_ || | 316 if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_ || |
314 pixel_format != media::PIXEL_FORMAT_I420 || | 317 (pixel_format != media::PIXEL_FORMAT_I420 && |
| 318 pixel_format != media::PIXEL_FORMAT_Y8 && |
| 319 pixel_format != media::PIXEL_FORMAT_Y16) || |
315 (storage_type != media::VideoFrame::STORAGE_SHMEM && | 320 (storage_type != media::VideoFrame::STORAGE_SHMEM && |
316 storage_type != media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS)) { | 321 storage_type != media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS)) { |
317 // Crash in debug builds since the host should not have provided a buffer | 322 // Crash in debug builds since the host should not have provided a buffer |
318 // with an unsupported pixel format or storage type. | 323 // with an unsupported pixel format or storage type. |
319 DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format); | 324 DCHECK(media::PIXEL_FORMAT_I420 == pixel_format || |
| 325 media::PIXEL_FORMAT_Y8 == pixel_format || |
| 326 media::PIXEL_FORMAT_Y16 == pixel_format); |
320 DCHECK(storage_type == media::VideoFrame::STORAGE_SHMEM || | 327 DCHECK(storage_type == media::VideoFrame::STORAGE_SHMEM || |
321 storage_type == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS); | 328 storage_type == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS); |
322 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, | 329 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, |
323 gpu::SyncToken(), -1.0)); | 330 gpu::SyncToken(), -1.0)); |
324 return; | 331 return; |
325 } | 332 } |
326 | 333 |
327 base::TimeTicks reference_time; | 334 base::TimeTicks reference_time; |
328 media::VideoFrameMetadata frame_metadata; | 335 media::VideoFrameMetadata frame_metadata; |
329 frame_metadata.MergeInternalValuesFrom(metadata); | 336 frame_metadata.MergeInternalValuesFrom(metadata); |
(...skipping 21 matching lines...) Expand all Loading... |
351 | 358 |
352 scoped_refptr<media::VideoFrame> frame; | 359 scoped_refptr<media::VideoFrame> frame; |
353 BufferFinishedCallback buffer_finished_callback; | 360 BufferFinishedCallback buffer_finished_callback; |
354 std::unique_ptr<gpu::SyncToken> release_sync_token(new gpu::SyncToken); | 361 std::unique_ptr<gpu::SyncToken> release_sync_token(new gpu::SyncToken); |
355 switch (storage_type) { | 362 switch (storage_type) { |
356 case media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS: { | 363 case media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS: { |
357 const auto& iter = client_buffer2s_.find(buffer_id); | 364 const auto& iter = client_buffer2s_.find(buffer_id); |
358 DCHECK(iter != client_buffer2s_.end()); | 365 DCHECK(iter != client_buffer2s_.end()); |
359 scoped_refptr<ClientBuffer2> buffer = iter->second; | 366 scoped_refptr<ClientBuffer2> buffer = iter->second; |
360 const auto& handles = buffer->gpu_memory_buffer_handles(); | 367 const auto& handles = buffer->gpu_memory_buffer_handles(); |
361 frame = media::VideoFrame::WrapExternalYuvGpuMemoryBuffers( | 368 frame = |
362 media::PIXEL_FORMAT_I420, coded_size, gfx::Rect(coded_size), | 369 (pixel_format == media::PIXEL_FORMAT_I420) |
363 coded_size, buffer->stride(media::VideoFrame::kYPlane), | 370 ? media::VideoFrame::WrapExternalYuvGpuMemoryBuffers( |
364 buffer->stride(media::VideoFrame::kUPlane), | 371 media::PIXEL_FORMAT_I420, coded_size, gfx::Rect(coded_size), |
365 buffer->stride(media::VideoFrame::kVPlane), | 372 coded_size, buffer->stride(media::VideoFrame::kYPlane), |
366 buffer->data(media::VideoFrame::kYPlane), | 373 buffer->stride(media::VideoFrame::kUPlane), |
367 buffer->data(media::VideoFrame::kUPlane), | 374 buffer->stride(media::VideoFrame::kVPlane), |
368 buffer->data(media::VideoFrame::kVPlane), | 375 buffer->data(media::VideoFrame::kYPlane), |
369 handles[media::VideoFrame::kYPlane], | 376 buffer->data(media::VideoFrame::kUPlane), |
370 handles[media::VideoFrame::kUPlane], | 377 buffer->data(media::VideoFrame::kVPlane), |
371 handles[media::VideoFrame::kVPlane], timestamp); | 378 handles[media::VideoFrame::kYPlane], |
| 379 handles[media::VideoFrame::kUPlane], |
| 380 handles[media::VideoFrame::kVPlane], timestamp) |
| 381 : media::VideoFrame::WrapExternalGpuMemoryBuffer( |
| 382 pixel_format, coded_size, gfx::Rect(coded_size), coded_size, |
| 383 buffer->data(0), handles[0], timestamp); |
372 buffer_finished_callback = media::BindToCurrentLoop( | 384 buffer_finished_callback = media::BindToCurrentLoop( |
373 base::Bind(&VideoCaptureImpl::OnClientBufferFinished2, | 385 base::Bind(&VideoCaptureImpl::OnClientBufferFinished2, |
374 weak_factory_.GetWeakPtr(), buffer_id, buffer)); | 386 weak_factory_.GetWeakPtr(), buffer_id, buffer)); |
375 break; | 387 break; |
376 } | 388 } |
377 case media::VideoFrame::STORAGE_SHMEM: { | 389 case media::VideoFrame::STORAGE_SHMEM: { |
378 const auto& iter = client_buffers_.find(buffer_id); | 390 const auto& iter = client_buffers_.find(buffer_id); |
379 DCHECK(iter != client_buffers_.end()); | 391 DCHECK(iter != client_buffers_.end()); |
380 const scoped_refptr<ClientBuffer> buffer = iter->second; | 392 const scoped_refptr<ClientBuffer> buffer = iter->second; |
381 frame = media::VideoFrame::WrapExternalSharedMemory( | 393 frame = media::VideoFrame::WrapExternalSharedMemory( |
(...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
570 double consumer_resource_utilization = -1.0; | 582 double consumer_resource_utilization = -1.0; |
571 if (!metadata->GetDouble(media::VideoFrameMetadata::RESOURCE_UTILIZATION, | 583 if (!metadata->GetDouble(media::VideoFrameMetadata::RESOURCE_UTILIZATION, |
572 &consumer_resource_utilization)) { | 584 &consumer_resource_utilization)) { |
573 consumer_resource_utilization = -1.0; | 585 consumer_resource_utilization = -1.0; |
574 } | 586 } |
575 | 587 |
576 callback_to_io_thread.Run(*release_sync_token, consumer_resource_utilization); | 588 callback_to_io_thread.Run(*release_sync_token, consumer_resource_utilization); |
577 } | 589 } |
578 | 590 |
579 } // namespace content | 591 } // namespace content |
OLD | NEW |