Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(689)

Side by Side Diff: media/capture/video/chromeos/stream_buffer_manager.cc

Issue 2837273004: media: add video capture device for ARC++ camera HAL v3 (Closed)
Patch Set: restore patch set 24 Created 3 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2017 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "media/capture/video/chromeos/stream_buffer_manager.h"
6
7 #include "media/capture/video/chromeos/camera_device_context.h"
8 #include "media/capture/video/chromeos/camera_metadata_utils.h"
9 #include "media/capture/video/chromeos/pixel_format_utils.h"
10 #include "mojo/edk/embedder/embedder.h"
11 #include "mojo/edk/embedder/scoped_platform_handle.h"
12 #include "third_party/libsync/include/sync/sync.h"
13
14 namespace media {
15
16 StreamBufferManager::StreamBufferManager(
17 arc::mojom::Camera3CallbackOpsRequest callback_ops_request,
18 std::unique_ptr<StreamCaptureInterface> capture_interface,
19 CameraDeviceContext* device_context,
20 scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner)
21 : callback_ops_(this, std::move(callback_ops_request)),
22 capture_interface_(std::move(capture_interface)),
23 device_context_(device_context),
24 ipc_task_runner_(std::move(ipc_task_runner)),
25 capturing_(false),
26 frame_number_(0),
27 partial_result_count_(1),
28 first_frame_shutter_time_(base::TimeTicks::Now()) {
29 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
30 DCHECK(callback_ops_.is_bound());
31 DCHECK(device_context_);
32 }
33
34 void StreamBufferManager::SetUpStreamAndBuffers(
35 VideoCaptureFormat capture_format,
36 uint32_t partial_result_count,
37 arc::mojom::Camera3StreamPtr stream) {
38 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
39 DCHECK(!stream_context_);
40
41 VLOG(2) << "Stream " << stream->id << " configured: usage=" << stream->usage
42 << " max_buffers=" << stream->max_buffers;
43
44 const size_t kMaximumAllowedBuffers = 15;
45 if (stream->max_buffers > kMaximumAllowedBuffers) {
46 device_context_->SetErrorState(
47 FROM_HERE, std::string("Camera HAL requested ") +
48 std::to_string(stream->max_buffers) +
49 std::string(" buffers which exceeds the allowed maximum "
50 "number of buffers"));
51 return;
52 }
53
54 partial_result_count_ = partial_result_count;
55 stream_context_.reset(new StreamContext{capture_format, std::move(stream)});
56
57 // Allocate buffers.
58 size_t num_buffers = stream_context_->stream->max_buffers;
59 stream_context_->buffers.resize(num_buffers);
60 for (size_t j = 0; j < num_buffers; ++j) {
61 const VideoCaptureFormat frame_format(
62 gfx::Size(stream_context_->stream->width,
63 stream_context_->stream->height),
64 0.0, stream_context_->capture_format.pixel_format);
65 std::unique_ptr<base::SharedMemory> buffer(new base::SharedMemory());
66 base::SharedMemoryCreateOptions options;
67 options.size = frame_format.ImageAllocationSize();
68 options.share_read_only = false;
69 bool ret = buffer->Create(options);
70 if (!ret) {
71 device_context_->SetErrorState(FROM_HERE,
72 "Failed to create SharedMemory buffer");
73 return;
74 }
75 ret = buffer->Map(buffer->requested_size());
76 if (!ret) {
77 device_context_->SetErrorState(FROM_HERE,
78 "Failed to map SharedMemory buffer");
79 return;
80 }
81 stream_context_->buffers[j] = std::move(buffer);
82 stream_context_->free_buffers.push(j);
83 }
84 VLOG(2) << "Allocated " << stream_context_->stream->max_buffers << " buffers";
85 }
86
87 void StreamBufferManager::StartCapture(arc::mojom::CameraMetadataPtr settings) {
88 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
89 DCHECK(stream_context_);
90 DCHECK(stream_context_->request_settings.is_null());
91
92 capturing_ = true;
93 stream_context_->request_settings = std::move(settings);
94 // We cannot use a loop to register all the free buffers in one shot here
95 // because the camera HAL v3 API specifies that the client cannot call
96 // ProcessCaptureRequest before the previous one returns.
97 RegisterBuffer();
98 }
99
100 void StreamBufferManager::StopCapture() {
101 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
102 capturing_ = false;
103 }
104
105 void StreamBufferManager::RegisterBuffer() {
106 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
107 DCHECK(stream_context_);
108
109 if (!capturing_) {
110 return;
111 }
112
113 if (stream_context_->free_buffers.empty()) {
114 return;
115 }
116
117 size_t buffer_id = stream_context_->free_buffers.front();
118 stream_context_->free_buffers.pop();
119 const base::SharedMemory* buffer = stream_context_->buffers[buffer_id].get();
120
121 VideoPixelFormat buffer_format = stream_context_->capture_format.pixel_format;
122 uint32_t drm_format = PixFormatChromiumToDrm(buffer_format);
123 if (!drm_format) {
124 device_context_->SetErrorState(
125 FROM_HERE, std::string("Unsupported video pixel format") +
126 VideoPixelFormatToString(buffer_format));
127 return;
128 }
129 arc::mojom::HalPixelFormat hal_pixel_format = stream_context_->stream->format;
130
131 size_t num_planes = VideoFrame::NumPlanes(buffer_format);
132 std::vector<mojo::ScopedHandle> fds(num_planes);
133 std::vector<uint32_t> strides(num_planes);
134 std::vector<uint32_t> offsets(num_planes);
135 for (size_t i = 0; i < num_planes; ++i) {
136 base::SharedMemoryHandle shm_handle = buffer->handle();
137 // Wrap the platform handle.
138 MojoHandle wrapped_handle;
139 MojoResult result = mojo::edk::CreatePlatformHandleWrapper(
140 mojo::edk::ScopedPlatformHandle(mojo::edk::PlatformHandle(
141 base::SharedMemory::DuplicateHandle(shm_handle).GetHandle())),
142 &wrapped_handle);
143 if (result != MOJO_RESULT_OK) {
144 device_context_->SetErrorState(FROM_HERE,
145 "Failed to wrap shared memory handle");
146 return;
147 }
148 fds[i].reset(mojo::Handle(wrapped_handle));
149 strides[i] = VideoFrame::RowBytes(
150 i, buffer_format, stream_context_->capture_format.frame_size.width());
151 if (!i) {
152 offsets[i] = 0;
153 } else {
154 offsets[i] =
155 offsets[i - 1] +
156 VideoFrame::PlaneSize(buffer_format, i - 1,
157 stream_context_->capture_format.frame_size)
158 .GetArea();
159 }
160 }
161 capture_interface_->RegisterBuffer(
162 buffer_id, arc::mojom::Camera3DeviceOps::BufferType::SHM, std::move(fds),
163 drm_format, hal_pixel_format, stream_context_->stream->width,
164 stream_context_->stream->height, std::move(strides), std::move(offsets),
165 base::Bind(&StreamBufferManager::OnRegisteredBuffer, this, buffer_id));
166 VLOG(2) << "Registered buffer " << buffer_id;
167 }
168
169 void StreamBufferManager::OnRegisteredBuffer(size_t buffer_id, int32_t result) {
170 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
171
172 if (!capturing_) {
173 return;
174 }
175 if (result) {
176 device_context_->SetErrorState(FROM_HERE,
177 std::string("Failed to register buffer: ") +
178 std::string(strerror(result)));
179 return;
180 }
181 ProcessCaptureRequest(buffer_id);
182 }
183
184 void StreamBufferManager::ProcessCaptureRequest(size_t buffer_id) {
185 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
186 DCHECK(stream_context_);
187
188 arc::mojom::Camera3StreamBufferPtr buffer =
189 arc::mojom::Camera3StreamBuffer::New();
190 buffer->stream_id = static_cast<uint64_t>(
191 arc::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW);
192 buffer->buffer_id = buffer_id;
193 buffer->status = arc::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_OK;
194
195 arc::mojom::Camera3CaptureRequestPtr request =
196 arc::mojom::Camera3CaptureRequest::New();
197 request->frame_number = frame_number_;
198 request->settings = stream_context_->request_settings.Clone();
199 request->output_buffers.push_back(std::move(buffer));
200
201 capture_interface_->ProcessCaptureRequest(
202 std::move(request),
203 base::Bind(&StreamBufferManager::OnProcessedCaptureRequest, this));
204 VLOG(2) << "Requested capture for frame " << frame_number_ << " with buffer "
205 << buffer_id;
206 frame_number_++;
207 // In case |frame_number_| wraps around, we start at 1 to avoid resetting
208 // |first_frame_shutter_time_|.
209 if (!frame_number_) {
210 frame_number_++;
211 }
212 }
213
214 void StreamBufferManager::OnProcessedCaptureRequest(int32_t result) {
215 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
216
217 if (!capturing_) {
218 return;
219 }
220 if (result) {
221 device_context_->SetErrorState(
222 FROM_HERE, std::string("Process capture request failed") +
223 std::string(strerror(result)));
224 return;
225 }
226 RegisterBuffer();
227 }
228
229 void StreamBufferManager::ProcessCaptureResult(
230 arc::mojom::Camera3CaptureResultPtr result) {
231 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
232
233 if (!capturing_) {
234 return;
235 }
236 uint32_t frame_number = result->frame_number;
237 // A new partial result may be created in either ProcessCaptureResult or
238 // Notify.
239 CaptureResult& partial_result = partial_results_[frame_number];
240 if (partial_results_.size() > stream_context_->stream->max_buffers) {
241 device_context_->SetErrorState(
242 FROM_HERE,
243 "Received more capture results than the maximum number of buffers");
244 return;
245 }
246 if (result->output_buffers) {
247 if (result->output_buffers->size() != 1) {
248 device_context_->SetErrorState(
249 FROM_HERE,
250 std::string("Incorrect number of output buffers received: ") +
251 std::to_string(result->output_buffers->size()));
252 return;
253 }
254 arc::mojom::Camera3StreamBufferPtr& stream_buffer =
255 result->output_buffers.value()[0];
256 VLOG(2) << "Received capture result for frame " << frame_number
257 << " stream_id: " << stream_buffer->stream_id;
258 // The camera HAL v3 API specifies that only one capture result can carry
259 // the result buffer for any given frame number.
260 if (!partial_result.buffer.is_null()) {
261 device_context_->SetErrorState(
262 FROM_HERE,
263 std::string("Received multiple result buffers for frame ") +
264 std::to_string(frame_number));
265 return;
266 } else {
267 partial_result.buffer = std::move(stream_buffer);
268 // If the buffer is marked as error it is due to either a request or a
269 // buffer error. In either case the content of the buffer must be dropped
270 // and the buffer can be reused. We simply submit the buffer here and
271 // don't wait for any partial results. SubmitCaptureResult() will drop
272 // and resuse the buffer.
273 if (partial_result.buffer->status ==
274 arc::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_ERROR) {
275 SubmitCaptureResult(frame_number);
276 return;
277 }
278 }
279 }
280
281 // |result->partial_result| is set to 0 if the capture result contains only
282 // the result buffer handles and no result metadata.
283 if (result->partial_result) {
284 uint32_t result_id = result->partial_result;
285 if (result_id > partial_result_count_) {
286 device_context_->SetErrorState(
287 FROM_HERE, std::string("Invalid partial_result id: ") +
288 std::to_string(result_id));
289 return;
290 }
291 if (partial_result.partial_metadata_received.find(result_id) !=
292 partial_result.partial_metadata_received.end()) {
293 device_context_->SetErrorState(
294 FROM_HERE, std::string("Received duplicated partial metadata: ") +
295 std::to_string(result_id));
296 return;
297 }
298 partial_result.partial_metadata_received.insert(result_id);
299 MergeMetadata(&partial_result.metadata, result->result);
300 }
301
302 if (partial_result.partial_metadata_received.size() ==
303 partial_result_count_ &&
304 !partial_result.buffer.is_null()) {
305 // We can only submit the result buffer after we receive the shutter time.
306 if (partial_result.reference_time != base::TimeTicks()) {
307 SubmitCaptureResult(frame_number);
308 }
309 }
310 }
311
312 void StreamBufferManager::Notify(arc::mojom::Camera3NotifyMsgPtr message) {
313 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
314
315 if (!capturing_) {
316 return;
317 }
318 if (message->type == arc::mojom::Camera3MsgType::CAMERA3_MSG_ERROR) {
319 uint32_t frame_number = message->message->get_error()->frame_number;
320 uint64_t error_stream_id = message->message->get_error()->error_stream_id;
321 arc::mojom::Camera3ErrorMsgCode error_code =
322 message->message->get_error()->error_code;
323 HandleNotifyError(frame_number, error_stream_id, error_code);
324 } else { // arc::mojom::Camera3MsgType::CAMERA3_MSG_SHUTTER
325 uint32_t frame_number = message->message->get_shutter()->frame_number;
326 uint64_t shutter_time = message->message->get_shutter()->timestamp;
327 // A new partial result may be created in either ProcessCaptureResult or
328 // Notify.
329 VLOG(2) << "Received shutter time for frame " << frame_number;
330 if (!shutter_time) {
331 device_context_->SetErrorState(
332 FROM_HERE, std::string("Received invalid shutter time: ") +
333 std::to_string(shutter_time));
334 return;
335 }
336 CaptureResult& partial_result = partial_results_[frame_number];
chfremer 2017/06/08 21:58:38 Idea for a possible improvement: If we wanted to f
jcliang 2017/06/09 05:16:01 This sounds like a good design improvement. I'll s
337 if (partial_results_.size() > stream_context_->stream->max_buffers) {
338 device_context_->SetErrorState(
339 FROM_HERE,
340 "Received more capture results than the maximum number of buffers");
341 return;
342 }
343 // Shutter timestamp is in ns.
344 base::TimeTicks reference_time =
345 base::TimeTicks::FromInternalValue(shutter_time / 1000);
346 partial_result.reference_time = reference_time;
347 if (!frame_number) {
348 // Record the shutter time of the first frame for calculating the
349 // timestamp.
350 first_frame_shutter_time_ = reference_time;
351 }
352 partial_result.timestamp = reference_time - first_frame_shutter_time_;
353 if (partial_result.partial_metadata_received.size() ==
354 partial_result_count_ &&
355 !partial_result.buffer.is_null()) {
356 SubmitCaptureResult(frame_number);
357 }
358 }
359 }
360
361 void StreamBufferManager::HandleNotifyError(
362 uint32_t frame_number,
363 uint64_t error_stream_id,
364 arc::mojom::Camera3ErrorMsgCode error_code) {
365 switch (error_code) {
366 case arc::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_DEVICE:
367 // Fatal error and no more frames will be produced by the device.
368 device_context_->SetErrorState(FROM_HERE, "Fatal device error");
369 break;
370 case arc::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_REQUEST: {
371 // An error has occurred in processing the request; the request
372 // specified by |frame_number| has been dropped by the camera device.
373 // Subsequent requests are unaffected.
374 //
375 // The HAL will call ProcessCaptureResult with the buffers' state set to
376 // STATUS_ERROR. The content of the buffers will be dropped and the
377 // buffers will be reused in SubmitCaptureResult.
378 std::string warning_msg =
379 std::string("An error occurred while processing request for frame ") +
380 std::to_string(frame_number);
381 LOG(WARNING) << warning_msg;
382 device_context_->LogToClient(warning_msg);
383 break;
384 }
385 case arc::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_RESULT: {
386 // An error has occurred in producing the output metadata buffer for a
387 // result; the output metadata will not be available for the frame
388 // specified by |frame_number|. Subsequent requests are unaffected.
389 std::string warning_msg = std::string(
390 "An error occurred while producing result "
391 "metadata for frame ") +
392 std::to_string(frame_number);
393 LOG(WARNING) << warning_msg;
394 device_context_->LogToClient(warning_msg);
395 CaptureResult& partial_result = partial_results_[frame_number];
396 // The result metadata will not be complete so we don't need to wait for
397 // partial results on frame |frame_number|.
398 partial_result.partial_metadata_received.clear();
399 for (uint32_t i = 0; i < partial_result_count_; ++i) {
400 partial_result.partial_metadata_received.insert(i);
401 }
402 // If the buffer is already returned by the HAL, submit it and we're done.
403 if (!partial_result.buffer.is_null()) {
404 SubmitCaptureResult(frame_number);
405 }
406 break;
407 }
408 case arc::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_BUFFER:
409 // An error has occurred in placing the output buffer into a stream for
410 // a request. |frame_number| specifies the request for which the buffer
411 // was dropped, and |error_stream_id| specifies the stream that dropped
412 // the buffer.
413 //
414 // The HAL will call ProcessCaptureResult with the buffer's state set to
415 // STATUS_ERROR. The content of the buffer will be dropped and the
416 // buffer will be reused in SubmitCaptureResult.
417 device_context_->LogToClient(
418 std::string(
419 "An error occurred while filling output buffer of stream ") +
420 std::to_string(error_stream_id) + std::string(" in frame ") +
421 std::to_string(frame_number));
422 break;
423 default:
424 // To eliminate the warning for not handling CAMERA3_MSG_NUM_ERRORS
425 break;
426 }
427 }
428
429 void StreamBufferManager::SubmitCaptureResult(uint32_t frame_number) {
430 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
431
432 if (partial_results_.begin()->first != frame_number) {
433 device_context_->SetErrorState(
434 FROM_HERE, std::string("Received frame is out-of-order; expect ") +
435 std::to_string(partial_results_.begin()->first) +
436 std::string(" but got ") + std::to_string(frame_number));
437 return;
438 }
439
440 VLOG(2) << "Submit capture result of frame " << frame_number;
441 CaptureResult& partial_result = partial_results_[frame_number];
442 DCHECK(partial_result.buffer);
443 uint32_t buffer_id = partial_result.buffer->buffer_id;
444
445 // Wait on release fence before delivering the result buffer to client.
446 if (partial_result.buffer->release_fence.is_valid()) {
447 const int kSyncWaitTimeoutMs = 1000;
448 mojo::edk::ScopedPlatformHandle fence;
449 MojoResult result = mojo::edk::PassWrappedPlatformHandle(
450 partial_result.buffer->release_fence.release().value(), &fence);
451 if (result != MOJO_RESULT_OK) {
452 device_context_->SetErrorState(FROM_HERE,
453 "Failed to unwrap release fence fd");
454 return;
455 }
456 if (!sync_wait(fence.get().handle, kSyncWaitTimeoutMs)) {
chfremer 2017/06/08 21:58:38 So, if I understand this correctly, for each frame
jcliang 2017/06/09 05:16:01 Yes your understanding regarding the buffer circul
chfremer 2017/06/09 17:53:17 I have to say that I don't fully understand the co
jcliang 2017/06/12 08:52:39 The camera service on Chrome OS has two parts: 1.
457 device_context_->SetErrorState(FROM_HERE,
458 "Sync wait on release fence timed out");
459 return;
460 }
461 }
462
463 // Deliver the captured data to client and then re-queue the buffer.
464 if (partial_result.buffer->status !=
465 arc::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_ERROR) {
466 DCHECK_EQ(partial_result.partial_metadata_received.size(),
467 partial_result_count_);
468 const base::SharedMemory* shm_buffer =
469 stream_context_->buffers[buffer_id].get();
470 device_context_->SubmitCapturedData(
471 reinterpret_cast<uint8_t*>(shm_buffer->memory()),
472 shm_buffer->mapped_size(), stream_context_->capture_format,
473 partial_result.reference_time, partial_result.timestamp);
474 }
475 stream_context_->free_buffers.push(buffer_id);
476 partial_results_.erase(frame_number);
477 RegisterBuffer();
478 }
479
480 } // namespace media
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698