Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(417)

Side by Side Diff: media/capture/video/chromeos/stream_buffer_manager.cc

Issue 2936373002: Revert of media: add video capture device for ARC++ camera HAL v3 (Closed)
Patch Set: Created 3 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2017 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "media/capture/video/chromeos/stream_buffer_manager.h"
6
7 #include <sync/sync.h>
8
9 #include "base/memory/ptr_util.h"
10 #include "base/memory/shared_memory.h"
11 #include "media/capture/video/chromeos/camera_device_context.h"
12 #include "media/capture/video/chromeos/camera_metadata_utils.h"
13 #include "media/capture/video/chromeos/pixel_format_utils.h"
14 #include "mojo/edk/embedder/embedder.h"
15 #include "mojo/edk/embedder/scoped_platform_handle.h"
16
17 namespace media {
18
19 StreamBufferManager::StreamBufferManager(
20 arc::mojom::Camera3CallbackOpsRequest callback_ops_request,
21 std::unique_ptr<StreamCaptureInterface> capture_interface,
22 CameraDeviceContext* device_context,
23 scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner)
24 : callback_ops_(this, std::move(callback_ops_request)),
25 capture_interface_(std::move(capture_interface)),
26 device_context_(device_context),
27 ipc_task_runner_(std::move(ipc_task_runner)),
28 capturing_(false),
29 frame_number_(0),
30 partial_result_count_(1),
31 first_frame_shutter_time_(base::TimeTicks()),
32 weak_ptr_factory_(this) {
33 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
34 DCHECK(callback_ops_.is_bound());
35 DCHECK(device_context_);
36 }
37
38 StreamBufferManager::~StreamBufferManager() {}
39
40 void StreamBufferManager::SetUpStreamAndBuffers(
41 VideoCaptureFormat capture_format,
42 uint32_t partial_result_count,
43 arc::mojom::Camera3StreamPtr stream) {
44 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
45 DCHECK(!stream_context_);
46
47 VLOG(2) << "Stream " << stream->id << " configured: usage=" << stream->usage
48 << " max_buffers=" << stream->max_buffers;
49
50 const size_t kMaximumAllowedBuffers = 15;
51 if (stream->max_buffers > kMaximumAllowedBuffers) {
52 device_context_->SetErrorState(
53 FROM_HERE, std::string("Camera HAL requested ") +
54 std::to_string(stream->max_buffers) +
55 std::string(" buffers which exceeds the allowed maximum "
56 "number of buffers"));
57 return;
58 }
59
60 partial_result_count_ = partial_result_count;
61 stream_context_ = base::MakeUnique<StreamContext>();
62 stream_context_->capture_format = capture_format;
63 stream_context_->stream = std::move(stream);
64
65 // Allocate buffers.
66 size_t num_buffers = stream_context_->stream->max_buffers;
67 stream_context_->buffers.resize(num_buffers);
68 for (size_t j = 0; j < num_buffers; ++j) {
69 const VideoCaptureFormat frame_format(
70 gfx::Size(stream_context_->stream->width,
71 stream_context_->stream->height),
72 0.0, stream_context_->capture_format.pixel_format);
73 auto buffer = base::MakeUnique<base::SharedMemory>();
74 base::SharedMemoryCreateOptions options;
75 options.size = frame_format.ImageAllocationSize();
76 options.share_read_only = false;
77 bool ret = buffer->Create(options);
78 if (!ret) {
79 device_context_->SetErrorState(FROM_HERE,
80 "Failed to create SharedMemory buffer");
81 return;
82 }
83 ret = buffer->Map(buffer->requested_size());
84 if (!ret) {
85 device_context_->SetErrorState(FROM_HERE,
86 "Failed to map SharedMemory buffer");
87 return;
88 }
89 stream_context_->buffers[j] = std::move(buffer);
90 stream_context_->free_buffers.push(j);
91 }
92 VLOG(2) << "Allocated " << stream_context_->stream->max_buffers << " buffers";
93 }
94
95 void StreamBufferManager::StartCapture(arc::mojom::CameraMetadataPtr settings) {
96 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
97 DCHECK(stream_context_);
98 DCHECK(stream_context_->request_settings.is_null());
99
100 capturing_ = true;
101 stream_context_->request_settings = std::move(settings);
102 // We cannot use a loop to register all the free buffers in one shot here
103 // because the camera HAL v3 API specifies that the client cannot call
104 // ProcessCaptureRequest before the previous one returns.
105 RegisterBuffer();
106 }
107
108 void StreamBufferManager::StopCapture() {
109 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
110 capturing_ = false;
111 }
112
113 void StreamBufferManager::RegisterBuffer() {
114 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
115 DCHECK(stream_context_);
116
117 if (!capturing_) {
118 return;
119 }
120
121 if (stream_context_->free_buffers.empty()) {
122 return;
123 }
124
125 size_t buffer_id = stream_context_->free_buffers.front();
126 stream_context_->free_buffers.pop();
127 const base::SharedMemory* buffer = stream_context_->buffers[buffer_id].get();
128
129 VideoPixelFormat buffer_format = stream_context_->capture_format.pixel_format;
130 uint32_t drm_format = PixFormatChromiumToDrm(buffer_format);
131 if (!drm_format) {
132 device_context_->SetErrorState(
133 FROM_HERE, std::string("Unsupported video pixel format") +
134 VideoPixelFormatToString(buffer_format));
135 return;
136 }
137 arc::mojom::HalPixelFormat hal_pixel_format = stream_context_->stream->format;
138
139 size_t num_planes = VideoFrame::NumPlanes(buffer_format);
140 std::vector<StreamCaptureInterface::Plane> planes(num_planes);
141 for (size_t i = 0; i < num_planes; ++i) {
142 base::SharedMemoryHandle shm_handle = buffer->handle();
143 // Wrap the platform handle.
144 MojoHandle wrapped_handle;
145 MojoResult result = mojo::edk::CreatePlatformHandleWrapper(
146 mojo::edk::ScopedPlatformHandle(mojo::edk::PlatformHandle(
147 base::SharedMemory::DuplicateHandle(shm_handle).GetHandle())),
148 &wrapped_handle);
149 if (result != MOJO_RESULT_OK) {
150 device_context_->SetErrorState(FROM_HERE,
151 "Failed to wrap shared memory handle");
152 return;
153 }
154 planes[i].fd.reset(mojo::Handle(wrapped_handle));
155 planes[i].stride = VideoFrame::RowBytes(
156 i, buffer_format, stream_context_->capture_format.frame_size.width());
157 if (!i) {
158 planes[i].offset = 0;
159 } else {
160 planes[i].offset =
161 planes[i - 1].offset +
162 VideoFrame::PlaneSize(buffer_format, i - 1,
163 stream_context_->capture_format.frame_size)
164 .GetArea();
165 }
166 }
167 capture_interface_->RegisterBuffer(
168 buffer_id, arc::mojom::Camera3DeviceOps::BufferType::SHM, drm_format,
169 hal_pixel_format, stream_context_->stream->width,
170 stream_context_->stream->height, std::move(planes),
171 base::Bind(&StreamBufferManager::OnRegisteredBuffer,
172 weak_ptr_factory_.GetWeakPtr(), buffer_id));
173 VLOG(2) << "Registered buffer " << buffer_id;
174 }
175
176 void StreamBufferManager::OnRegisteredBuffer(size_t buffer_id, int32_t result) {
177 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
178
179 if (!capturing_) {
180 return;
181 }
182 if (result) {
183 device_context_->SetErrorState(FROM_HERE,
184 std::string("Failed to register buffer: ") +
185 std::string(strerror(result)));
186 return;
187 }
188 ProcessCaptureRequest(buffer_id);
189 }
190
191 void StreamBufferManager::ProcessCaptureRequest(size_t buffer_id) {
192 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
193 DCHECK(stream_context_);
194
195 arc::mojom::Camera3StreamBufferPtr buffer =
196 arc::mojom::Camera3StreamBuffer::New();
197 buffer->stream_id = static_cast<uint64_t>(
198 arc::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW);
199 buffer->buffer_id = buffer_id;
200 buffer->status = arc::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_OK;
201
202 arc::mojom::Camera3CaptureRequestPtr request =
203 arc::mojom::Camera3CaptureRequest::New();
204 request->frame_number = frame_number_;
205 request->settings = stream_context_->request_settings.Clone();
206 request->output_buffers.push_back(std::move(buffer));
207
208 capture_interface_->ProcessCaptureRequest(
209 std::move(request),
210 base::Bind(&StreamBufferManager::OnProcessedCaptureRequest,
211 weak_ptr_factory_.GetWeakPtr()));
212 VLOG(2) << "Requested capture for frame " << frame_number_ << " with buffer "
213 << buffer_id;
214 frame_number_++;
215 }
216
217 void StreamBufferManager::OnProcessedCaptureRequest(int32_t result) {
218 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
219
220 if (!capturing_) {
221 return;
222 }
223 if (result) {
224 device_context_->SetErrorState(
225 FROM_HERE, std::string("Process capture request failed") +
226 std::string(strerror(result)));
227 return;
228 }
229 RegisterBuffer();
230 }
231
232 void StreamBufferManager::ProcessCaptureResult(
233 arc::mojom::Camera3CaptureResultPtr result) {
234 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
235
236 if (!capturing_) {
237 return;
238 }
239 uint32_t frame_number = result->frame_number;
240 // A new partial result may be created in either ProcessCaptureResult or
241 // Notify.
242 CaptureResult& partial_result = partial_results_[frame_number];
243 if (partial_results_.size() > stream_context_->stream->max_buffers) {
244 device_context_->SetErrorState(
245 FROM_HERE,
246 "Received more capture results than the maximum number of buffers");
247 return;
248 }
249 if (result->output_buffers) {
250 if (result->output_buffers->size() != 1) {
251 device_context_->SetErrorState(
252 FROM_HERE,
253 std::string("Incorrect number of output buffers received: ") +
254 std::to_string(result->output_buffers->size()));
255 return;
256 }
257 arc::mojom::Camera3StreamBufferPtr& stream_buffer =
258 result->output_buffers.value()[0];
259 VLOG(2) << "Received capture result for frame " << frame_number
260 << " stream_id: " << stream_buffer->stream_id;
261 // The camera HAL v3 API specifies that only one capture result can carry
262 // the result buffer for any given frame number.
263 if (!partial_result.buffer.is_null()) {
264 device_context_->SetErrorState(
265 FROM_HERE,
266 std::string("Received multiple result buffers for frame ") +
267 std::to_string(frame_number));
268 return;
269 } else {
270 partial_result.buffer = std::move(stream_buffer);
271 // If the buffer is marked as error it is due to either a request or a
272 // buffer error. In either case the content of the buffer must be dropped
273 // and the buffer can be reused. We simply submit the buffer here and
274 // don't wait for any partial results. SubmitCaptureResult() will drop
275 // and reuse the buffer.
276 if (partial_result.buffer->status ==
277 arc::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_ERROR) {
278 SubmitCaptureResult(frame_number);
279 return;
280 }
281 }
282 }
283
284 // |result->partial_result| is set to 0 if the capture result contains only
285 // the result buffer handles and no result metadata.
286 if (result->partial_result) {
287 uint32_t result_id = result->partial_result;
288 if (result_id > partial_result_count_) {
289 device_context_->SetErrorState(
290 FROM_HERE, std::string("Invalid partial_result id: ") +
291 std::to_string(result_id));
292 return;
293 }
294 if (partial_result.partial_metadata_received.find(result_id) !=
295 partial_result.partial_metadata_received.end()) {
296 device_context_->SetErrorState(
297 FROM_HERE, std::string("Received duplicated partial metadata: ") +
298 std::to_string(result_id));
299 return;
300 }
301 partial_result.partial_metadata_received.insert(result_id);
302 MergeMetadata(&partial_result.metadata, result->result);
303 }
304
305 SubmitCaptureResultIfComplete(frame_number);
306 }
307
308 void StreamBufferManager::Notify(arc::mojom::Camera3NotifyMsgPtr message) {
309 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
310
311 if (!capturing_) {
312 return;
313 }
314 if (message->type == arc::mojom::Camera3MsgType::CAMERA3_MSG_ERROR) {
315 uint32_t frame_number = message->message->get_error()->frame_number;
316 uint64_t error_stream_id = message->message->get_error()->error_stream_id;
317 arc::mojom::Camera3ErrorMsgCode error_code =
318 message->message->get_error()->error_code;
319 HandleNotifyError(frame_number, error_stream_id, error_code);
320 } else { // arc::mojom::Camera3MsgType::CAMERA3_MSG_SHUTTER
321 uint32_t frame_number = message->message->get_shutter()->frame_number;
322 uint64_t shutter_time = message->message->get_shutter()->timestamp;
323 // A new partial result may be created in either ProcessCaptureResult or
324 // Notify.
325 VLOG(2) << "Received shutter time for frame " << frame_number;
326 if (!shutter_time) {
327 device_context_->SetErrorState(
328 FROM_HERE, std::string("Received invalid shutter time: ") +
329 std::to_string(shutter_time));
330 return;
331 }
332 CaptureResult& partial_result = partial_results_[frame_number];
333 if (partial_results_.size() > stream_context_->stream->max_buffers) {
334 device_context_->SetErrorState(
335 FROM_HERE,
336 "Received more capture results than the maximum number of buffers");
337 return;
338 }
339 // Shutter timestamp is in ns.
340 base::TimeTicks reference_time =
341 base::TimeTicks::FromInternalValue(shutter_time / 1000);
342 partial_result.reference_time = reference_time;
343 if (first_frame_shutter_time_.is_null()) {
344 // Record the shutter time of the first frame for calculating the
345 // timestamp.
346 first_frame_shutter_time_ = reference_time;
347 }
348 partial_result.timestamp = reference_time - first_frame_shutter_time_;
349 SubmitCaptureResultIfComplete(frame_number);
350 }
351 }
352
353 void StreamBufferManager::HandleNotifyError(
354 uint32_t frame_number,
355 uint64_t error_stream_id,
356 arc::mojom::Camera3ErrorMsgCode error_code) {
357 std::string warning_msg;
358
359 switch (error_code) {
360 case arc::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_DEVICE:
361 // Fatal error and no more frames will be produced by the device.
362 device_context_->SetErrorState(FROM_HERE, "Fatal device error");
363 return;
364
365 case arc::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_REQUEST:
366 // An error has occurred in processing the request; the request
367 // specified by |frame_number| has been dropped by the camera device.
368 // Subsequent requests are unaffected.
369 //
370 // The HAL will call ProcessCaptureResult with the buffers' state set to
371 // STATUS_ERROR. The content of the buffers will be dropped and the
372 // buffers will be reused in SubmitCaptureResult.
373 warning_msg =
374 std::string("An error occurred while processing request for frame ") +
375 std::to_string(frame_number);
376 break;
377
378 case arc::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_RESULT:
379 // An error has occurred in producing the output metadata buffer for a
380 // result; the output metadata will not be available for the frame
381 // specified by |frame_number|. Subsequent requests are unaffected.
382 warning_msg = std::string(
383 "An error occurred while producing result "
384 "metadata for frame ") +
385 std::to_string(frame_number);
386 break;
387
388 case arc::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_BUFFER:
389 // An error has occurred in placing the output buffer into a stream for
390 // a request. |frame_number| specifies the request for which the buffer
391 // was dropped, and |error_stream_id| specifies the stream that dropped
392 // the buffer.
393 //
394 // The HAL will call ProcessCaptureResult with the buffer's state set to
395 // STATUS_ERROR. The content of the buffer will be dropped and the
396 // buffer will be reused in SubmitCaptureResult.
397 warning_msg =
398 std::string(
399 "An error occurred while filling output buffer of stream ") +
400 std::to_string(error_stream_id) + std::string(" in frame ") +
401 std::to_string(frame_number);
402 break;
403
404 default:
405 // To eliminate the warning for not handling CAMERA3_MSG_NUM_ERRORS
406 break;
407 }
408
409 LOG(WARNING) << warning_msg;
410 device_context_->LogToClient(warning_msg);
411 // If the buffer is already returned by the HAL, submit it and we're done.
412 auto partial_result = partial_results_.find(frame_number);
413 if (partial_result != partial_results_.end() &&
414 !partial_result->second.buffer.is_null()) {
415 SubmitCaptureResult(frame_number);
416 }
417 }
418
419 void StreamBufferManager::SubmitCaptureResultIfComplete(uint32_t frame_number) {
420 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
421 DCHECK(partial_results_.find(frame_number) != partial_results_.end());
422
423 CaptureResult& partial_result = partial_results_[frame_number];
424 if (partial_result.partial_metadata_received.size() < partial_result_count_ ||
425 partial_result.buffer.is_null() ||
426 partial_result.reference_time == base::TimeTicks()) {
427 // We can only submit the result buffer when:
428 // 1. All the result metadata are received, and
429 // 2. The result buffer is received, and
430 // 3. The the shutter time is received.
431 return;
432 }
433 SubmitCaptureResult(frame_number);
434 }
435
436 void StreamBufferManager::SubmitCaptureResult(uint32_t frame_number) {
437 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
438 DCHECK(partial_results_.find(frame_number) != partial_results_.end());
439
440 CaptureResult& partial_result = partial_results_[frame_number];
441 if (partial_results_.begin()->first != frame_number) {
442 device_context_->SetErrorState(
443 FROM_HERE, std::string("Received frame is out-of-order; expect ") +
444 std::to_string(partial_results_.begin()->first) +
445 std::string(" but got ") + std::to_string(frame_number));
446 return;
447 }
448
449 VLOG(2) << "Submit capture result of frame " << frame_number;
450 uint32_t buffer_id = partial_result.buffer->buffer_id;
451
452 // Wait on release fence before delivering the result buffer to client.
453 if (partial_result.buffer->release_fence.is_valid()) {
454 const int kSyncWaitTimeoutMs = 1000;
455 mojo::edk::ScopedPlatformHandle fence;
456 MojoResult result = mojo::edk::PassWrappedPlatformHandle(
457 partial_result.buffer->release_fence.release().value(), &fence);
458 if (result != MOJO_RESULT_OK) {
459 device_context_->SetErrorState(FROM_HERE,
460 "Failed to unwrap release fence fd");
461 return;
462 }
463 if (!sync_wait(fence.get().handle, kSyncWaitTimeoutMs)) {
464 device_context_->SetErrorState(FROM_HERE,
465 "Sync wait on release fence timed out");
466 return;
467 }
468 }
469
470 // Deliver the captured data to client and then re-queue the buffer.
471 if (partial_result.buffer->status !=
472 arc::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_ERROR) {
473 const base::SharedMemory* shm_buffer =
474 stream_context_->buffers[buffer_id].get();
475 device_context_->SubmitCapturedData(
476 reinterpret_cast<uint8_t*>(shm_buffer->memory()),
477 shm_buffer->mapped_size(), stream_context_->capture_format,
478 partial_result.reference_time, partial_result.timestamp);
479 }
480 stream_context_->free_buffers.push(buffer_id);
481 partial_results_.erase(frame_number);
482 RegisterBuffer();
483 }
484
485 StreamBufferManager::StreamContext::StreamContext() {}
486
487 StreamBufferManager::StreamContext::~StreamContext() {}
488
489 StreamBufferManager::CaptureResult::CaptureResult()
490 : metadata(arc::mojom::CameraMetadata::New()) {}
491
492 StreamBufferManager::CaptureResult::~CaptureResult() {}
493
494 } // namespace media
OLDNEW
« no previous file with comments | « media/capture/video/chromeos/stream_buffer_manager.h ('k') | media/capture/video/chromeos/stream_buffer_manager_unittest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698