Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(234)

Side by Side Diff: media/capture/video/chromeos/camera_device_delegate.cc

Issue 2837273004: media: add video capture device for ARC++ camera HAL v3 (Closed)
Patch Set: address wuchengli@'s comments Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2017 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "media/capture/video/chromeos/camera_device_delegate.h"
6
7 #include <libdrm/drm_fourcc.h>
8
9 #include "media/capture/video/chromeos/camera_metadata_utils.h"
10 #include "mojo/edk/embedder/embedder.h"
11 #include "mojo/edk/embedder/scoped_platform_handle.h"
12 #include "third_party/libsync/include/sync/sync.h"
13
14 namespace media {
15
16 namespace {
17
18 struct SupportedFormat {
19 VideoPixelFormat chromium_format;
20 arc::mojom::HalPixelFormat hal_format;
21 uint32_t drm_format;
22 } const kSupportedFormats[] = {
wuchengli 2017/05/08 04:22:02 Document more. (1) why we need to use video record
jcliang 2017/05/13 08:53:14 Done.
23 {PIXEL_FORMAT_I420,
24 arc::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888,
25 DRM_FORMAT_YUV420},
26 // TODO(jcliang): Do not use IMPLEMENTATION_DEFINED formats at all as it is
27 // nearly impossible to get it right across all boards.
28 {PIXEL_FORMAT_RGB32,
29 arc::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
30 DRM_FORMAT_XBGR8888},
31 };
32
33 } // namespace
34
35 CameraDeviceDelegate::CameraDeviceDelegate(
36 VideoCaptureDeviceDescriptor device_descriptor,
37 arc::mojom::CameraMetadataPtr static_metadata,
38 mojo::InterfacePtrInfo<arc::mojom::Camera3DeviceOps> device_ops_info,
39 const scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner)
40 : device_descriptor_(device_descriptor),
41 static_metadata_(std::move(static_metadata)),
42 state_(kStopped),
43 closed_(nullptr),
44 rotation_(0),
45 device_ops_info_(std::move(device_ops_info)),
46 callback_ops_(this),
47 ipc_task_runner_(ipc_task_runner),
48 frame_number_(0),
49 partial_result_count_(1),
50 first_frame_shutter_time_(base::TimeTicks::Now()) {}
51
52 // static
53 VideoPixelFormat CameraDeviceDelegate::PixFormatHalToChromium(
54 arc::mojom::HalPixelFormat from) {
55 auto it =
56 std::find_if(std::begin(kSupportedFormats), std::end(kSupportedFormats),
57 [from](SupportedFormat f) { return f.hal_format == from; });
58 if (it == std::end(kSupportedFormats)) {
59 return PIXEL_FORMAT_UNKNOWN;
60 }
61 return it->chromium_format;
62 }
63
64 // static
65 uint32_t CameraDeviceDelegate::PixFormatChromiumToDrm(VideoPixelFormat from) {
66 auto it = std::find_if(
67 std::begin(kSupportedFormats), std::end(kSupportedFormats),
68 [from](SupportedFormat f) { return f.chromium_format == from; });
69 if (it == std::end(kSupportedFormats)) {
70 return 0;
71 }
72 return it->drm_format;
73 }
74
75 void CameraDeviceDelegate::AllocateAndStart(
76 const VideoCaptureParams& params,
77 std::unique_ptr<VideoCaptureDevice::Client> client) {
78 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
79 DCHECK(!client_);
80 DCHECK_EQ(state_, kStopped);
81 const arc::mojom::CameraMetadataEntryPtr* partial_count = GetMetadataEntry(
82 static_metadata_,
83 arc::mojom::CameraMetadataTag::ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
84 // The partial result count metadata is optional. It defaults to 1 in case it
85 // is not set in the static metadata.
86 if (partial_count) {
87 partial_result_count_ =
88 *reinterpret_cast<int32_t*>((*partial_count)->data.data());
89 }
90
91 client_ = std::move(client);
92 device_ops_.Bind(std::move(device_ops_info_), ipc_task_runner_);
93 device_ops_.set_connection_error_handler(
94 base::Bind(&CameraDeviceDelegate::OnMojoConnectionError, this));
95 frame_number_ = 0;
96 streams_.clear();
97 partial_results_.clear();
wuchengli 2017/05/08 04:22:02 Do we need to reset rotation_? Reset first_frame_
jcliang 2017/05/13 08:53:14 We shouldn't reset |rotation_| because it should b
98
99 // Set up context for preview stream.
100 arc::mojom::Camera3StreamPtr preview_stream =
101 arc::mojom::Camera3Stream::New();
102 preview_stream->id = static_cast<uint64_t>(
103 arc::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW);
104 preview_stream->stream_type =
105 arc::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
106 preview_stream->width = params.requested_format.frame_size.width();
107 preview_stream->height = params.requested_format.frame_size.height();
108 // TODO(jcliang): We should not use implementation defined format here.
109 // preview_stream->format = HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888;
110 preview_stream->format =
111 arc::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
112 preview_stream->data_space = 0;
113 preview_stream->rotation =
114 arc::mojom::Camera3StreamRotation::CAMERA3_STREAM_ROTATION_0;
115 streams_[arc::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW] = {
116 .params = params, .stream = std::move(preview_stream),
117 };
118 // TODO(jcliang): Set up context for still capture stream.
119
120 SetState(kStarting);
121 Initialize();
122 }
123
124 void CameraDeviceDelegate::StopAndDeAllocate(base::WaitableEvent* closed) {
125 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
126 // StopAndDeAllocate may be called at any state.
127
128 if (!device_ops_.is_bound() || state_ == kStopping) {
wuchengli 2017/05/08 04:22:02 StopAndDeAllocate shouldn't be called twice in a r
jcliang 2017/05/13 08:53:14 Done.
129 // In case of Mojo connection error |device_ops_| and |callback_ops_| are
130 // unbound.
wuchengli 2017/05/08 04:22:02 Signal |closed|
jcliang 2017/05/13 08:53:14 Done.
131 return;
132 }
133 closed_ = closed;
134 SetState(kStopping);
135 device_ops_->Close(base::Bind(&CameraDeviceDelegate::OnClosed, this));
136 }
137
138 void CameraDeviceDelegate::TakePhoto(
139 VideoCaptureDevice::TakePhotoCallback callback) {
140 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
141 // TODO(jcliang): Implement TakePhoto.
142 }
143
144 void CameraDeviceDelegate::GetPhotoCapabilities(
145 VideoCaptureDevice::GetPhotoCapabilitiesCallback callback) {
146 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
147 // TODO(jcliang): Implement GetPhotoCapabilities.
148 }
149
150 void CameraDeviceDelegate::SetPhotoOptions(
151 mojom::PhotoSettingsPtr settings,
152 VideoCaptureDevice::SetPhotoOptionsCallback callback) {
153 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
154 // TODO(jcliang): Implement SetPhotoOptions.
155 }
156
157 void CameraDeviceDelegate::SetRotation(int rotation) {
158 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
159 DCHECK(rotation >= 0 && rotation < 360 && rotation % 90 == 0);
160 rotation_ = rotation;
161 }
162
163 void CameraDeviceDelegate::SetState(State state) {
164 state_ = state;
165 }
166
167 void CameraDeviceDelegate::SetErrorState(
168 const tracked_objects::Location& from_here,
169 const std::string& reason) {
170 state_ = kError;
171 client_->OnError(from_here, reason);
172 }
173
174 void CameraDeviceDelegate::ResetMojoInterface() {
175 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
176 device_ops_.reset();
177 if (callback_ops_.is_bound()) {
178 callback_ops_.Unbind();
179 }
180 }
181
182 void CameraDeviceDelegate::OnMojoConnectionError() {
183 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
184 if (state_ == kStopping) {
185 // When in stopping state the camera HAL adapter may terminate the Mojo
186 // channel before we do.
wuchengli 2017/05/08 04:22:02 Document this means we don't get OnClosed from HAL
jcliang 2017/05/13 08:53:15 Done.
187 OnClosed(0);
188 } else {
189 // The Mojo channel terminated unexpectedly.
190 ResetMojoInterface();
191 SetState(kStopped);
192 SetErrorState(FROM_HERE, "Mojo connection error");
193 }
194 }
195
196 void CameraDeviceDelegate::OnClosed(int32_t result) {
197 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
198 DCHECK(closed_);
199 if (result) {
200 client_->OnLog(std::string("Failed to close device: ") +
201 std::string(strerror(result)));
202 }
203 ResetMojoInterface();
204 client_.reset();
205 SetState(kStopped);
206 closed_->Signal();
207 closed_ = nullptr;
208 }
209
210 void CameraDeviceDelegate::Initialize() {
211 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
212 DCHECK_EQ(state_, kStarting);
213
214 device_ops_->Initialize(
215 callback_ops_.CreateInterfacePtrAndBind(),
216 base::Bind(&CameraDeviceDelegate::OnInitialized, this));
217 callback_ops_.set_connection_error_handler(
218 base::Bind(&CameraDeviceDelegate::OnMojoConnectionError, this));
219 }
220
221 void CameraDeviceDelegate::OnInitialized(int32_t result) {
222 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
223 DCHECK(state_ == kStarting || state_ == kStopping);
224
225 if (state_ == kStopping) {
226 return;
227 }
228 if (result) {
229 SetErrorState(FROM_HERE, "Failed to initialize camera device");
230 return;
231 }
232 SetState(kInitialized);
233 ConfigureStreams();
234 }
235
236 void CameraDeviceDelegate::ConfigureStreams() {
237 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
238 DCHECK(state_ == kInitialized || state_ == kStopping);
239
240 arc::mojom::Camera3StreamConfigurationPtr stream_config =
241 arc::mojom::Camera3StreamConfiguration::New();
242 stream_config->num_streams = streams_.size();
243 for (const auto& context : streams_) {
244 stream_config->streams.push_back(context.second.stream.Clone());
245 }
246 stream_config->operation_mode = arc::mojom::Camera3StreamConfigurationMode::
247 CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE;
248 device_ops_->ConfigureStreams(
249 std::move(stream_config),
250 base::Bind(&CameraDeviceDelegate::OnConfiguredStreams, this));
251 }
252
253 void CameraDeviceDelegate::OnConfiguredStreams(
254 arc::mojom::Camera3StreamConfigurationPtr updated_config) {
255 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
256 DCHECK(state_ == kInitialized || state_ == kStopping);
257
258 if (state_ == kStopping) {
259 return;
260 }
261 for (size_t i = 0; i < updated_config->num_streams; ++i) {
262 auto& updated_stream = updated_config->streams[i];
263 arc::mojom::Camera3RequestTemplate stream_type =
264 static_cast<arc::mojom::Camera3RequestTemplate>(updated_stream->id);
265 StreamContext* stream_context = GetStreamContext(stream_type);
266 if (!stream_context) {
267 SetErrorState(FROM_HERE, "ConfigureStreams returned invalid stream");
268 continue;
269 }
270 // TODO(jcliang): Determine the best format from metadata.
271 VideoCaptureFormat capture_format = stream_context->params.requested_format;
272 capture_format.pixel_format = PIXEL_FORMAT_RGB32;
273 stream_context->capture_format = capture_format;
274 stream_context->stream->usage = updated_stream->usage;
275 stream_context->stream->max_buffers = updated_stream->max_buffers;
276
277 VLOG(2) << "Stream " << updated_stream->id
278 << " configured: usage=" << updated_stream->usage
279 << " max_buffers=" << updated_stream->max_buffers;
280
281 // Allocate buffers.
282 size_t num_buffers = stream_context->stream->max_buffers;
283 stream_context->buffers.resize(num_buffers);
284 for (size_t j = 0; j < num_buffers; ++j) {
285 const VideoCaptureFormat frame_format(
286 gfx::Size(stream_context->stream->width,
287 stream_context->stream->height),
288 0.0, stream_context->capture_format.pixel_format);
289 std::unique_ptr<base::SharedMemory> buffer(new base::SharedMemory());
290 base::SharedMemoryCreateOptions options;
291 options.size = frame_format.ImageAllocationSize();
292 options.share_read_only = false;
293 buffer->Create(options);
294 buffer->Map(buffer->requested_size());
295 stream_context->buffers[j] = std::move(buffer);
296 stream_context->free_buffers.push(j);
297 }
298 VLOG(2) << "Allocated " << stream_context->stream->max_buffers
299 << " buffers for stream " << stream_type;
300
301 // TODO(jcliang): Construct default request settings for still capture.
302 ConstructDefaultRequestSettings(
303 arc::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW);
304 }
305
306 client_->OnStarted();
307 }
308
309 void CameraDeviceDelegate::ConstructDefaultRequestSettings(
310 arc::mojom::Camera3RequestTemplate stream_type) {
311 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
312 DCHECK(GetStreamContext(stream_type));
313
314 device_ops_->ConstructDefaultRequestSettings(
315 stream_type,
316 base::Bind(&CameraDeviceDelegate::OnConstructedDefaultRequestSettings,
317 this, stream_type));
318 }
319
320 void CameraDeviceDelegate::OnConstructedDefaultRequestSettings(
321 arc::mojom::Camera3RequestTemplate stream_type,
322 arc::mojom::CameraMetadataPtr settings) {
323 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
324
325 if (state_ == kStopping) {
326 return;
327 }
328 StreamContext* stream_context = GetStreamContext(stream_type);
329 DCHECK(stream_context);
330 stream_context->request_settings = std::move(settings);
331 // TODO(jcliang): Once we have the still capture stream we need to change it
332 // to only SetState when both preview and still capture streams
333 // are configured.
334 SetState(kStreamConfigured);
335 if (stream_type ==
336 arc::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW) {
337 StartCapture(stream_type);
338 }
339 }
340
341 void CameraDeviceDelegate::StartCapture(
342 arc::mojom::Camera3RequestTemplate stream_type) {
343 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
344 // We may get here when either after the streams are configured, or when we
345 // start still capture while the preview capture is running.
346 DCHECK(state_ == kStreamConfigured || state_ == kCapturing ||
347 state_ == kStopping);
348
349 if (state_ == kStopping) {
350 return;
351 }
352 StreamContext* stream_context = GetStreamContext(stream_type);
353 DCHECK(stream_context);
354 DCHECK(!stream_context->request_settings.is_null());
355 SetState(kCapturing);
356 RegisterBuffer(stream_type);
357 }
358
359 void CameraDeviceDelegate::RegisterBuffer(
360 arc::mojom::Camera3RequestTemplate stream_type) {
361 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
362 DCHECK(state_ == kCapturing || state_ == kStopping);
363
364 if (state_ == kStopping) {
365 return;
366 }
367 StreamContext* stream_context = GetStreamContext(stream_type);
368 DCHECK(stream_context);
369 if (stream_context->free_buffers.empty()) {
370 return;
371 }
372
373 const VideoCaptureParams& params = stream_context->params;
374 const arc::mojom::Camera3StreamPtr& stream = stream_context->stream;
375 size_t buffer_id = stream_context->free_buffers.front();
376 stream_context->free_buffers.pop();
377 const base::SharedMemory* buffer = stream_context->buffers[buffer_id].get();
378
379 VideoPixelFormat buffer_format = stream_context->capture_format.pixel_format;
380 uint32_t drm_format = PixFormatChromiumToDrm(buffer_format);
381 if (!drm_format) {
382 SetErrorState(FROM_HERE, "Unsupported video pixel format");
383 return;
384 }
385 arc::mojom::HalPixelFormat hal_pixel_format = stream->format;
386
387 size_t num_planes = VideoFrame::NumPlanes(buffer_format);
388 std::vector<mojo::ScopedHandle> fds(num_planes);
389 std::vector<uint32_t> strides(num_planes);
390 std::vector<uint32_t> offsets(num_planes);
391 for (size_t i = 0; i < num_planes; ++i) {
392 base::SharedMemoryHandle shm_handle = buffer->handle();
393 // Wrap the platform handle.
394 MojoHandle wrapped_handle;
395 MojoResult result = mojo::edk::CreatePlatformHandleWrapper(
396 mojo::edk::ScopedPlatformHandle(mojo::edk::PlatformHandle(
397 base::SharedMemory::DuplicateHandle(shm_handle).fd)),
398 &wrapped_handle);
399 if (result != MOJO_RESULT_OK) {
400 SetErrorState(FROM_HERE, "Failed to wrap shared memory handle");
401 return;
402 }
403 fds[i].reset(mojo::Handle(wrapped_handle));
404 strides[i] = VideoFrame::RowBytes(i, buffer_format, stream->width);
405 if (!i) {
406 offsets[i] = 0;
407 } else {
408 offsets[i] = offsets[i - 1] +
409 VideoFrame::PlaneSize(buffer_format, i,
410 params.requested_format.frame_size)
411 .GetArea();
412 }
413 }
414 device_ops_->RegisterBuffer(
415 buffer_id, arc::mojom::Camera3DeviceOps::BufferType::SHM, std::move(fds),
416 drm_format, hal_pixel_format, stream_context->stream->width,
417 stream_context->stream->height, std::move(strides), std::move(offsets),
418 base::Bind(&CameraDeviceDelegate::OnRegisteredBuffer, this, stream_type,
419 buffer_id));
420 VLOG(2) << "Registered buffer " << buffer_id << " of stream " << stream_type;
421 }
422
423 void CameraDeviceDelegate::OnRegisteredBuffer(
424 arc::mojom::Camera3RequestTemplate stream_type,
425 size_t buffer_index,
426 int32_t result) {
427 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
428 DCHECK(state_ == kCapturing || state_ == kStopping);
429
430 if (state_ == kStopping) {
431 return;
432 }
433 if (result) {
434 SetErrorState(FROM_HERE, "Failed to register buffer");
435 return;
436 }
437 ProcessCaptureRequest(stream_type, buffer_index);
438 }
439
440 void CameraDeviceDelegate::ProcessCaptureRequest(
441 arc::mojom::Camera3RequestTemplate stream_type,
442 size_t buffer_index) {
443 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
444 DCHECK(state_ == kCapturing || state_ == kStopping);
445
446 StreamContext* stream_context = GetStreamContext(stream_type);
447 DCHECK(stream_context);
448
449 arc::mojom::Camera3StreamBufferPtr buffer =
450 arc::mojom::Camera3StreamBuffer::New();
451 buffer->stream_id = static_cast<uint64_t>(
452 arc::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW);
453 buffer->buffer_id = buffer_index;
454 buffer->status = arc::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_OK;
455
456 // TODO(jcliang): Also process still capture buffers after we enabled still
457 // capture stream.
458 arc::mojom::Camera3CaptureRequestPtr request =
459 arc::mojom::Camera3CaptureRequest::New();
460 request->frame_number = frame_number_;
461 request->settings = stream_context->request_settings.Clone();
462 request->output_buffers.push_back(std::move(buffer));
463
464 device_ops_->ProcessCaptureRequest(
465 std::move(request),
466 base::Bind(&CameraDeviceDelegate::OnProcessedCaptureRequest, this,
467 stream_type));
468 VLOG(2) << "Requested capture for frame " << frame_number_ << " with buffer "
469 << buffer_index << " of stream " << stream_type;
470 frame_number_++;
471 }
472
473 void CameraDeviceDelegate::OnProcessedCaptureRequest(
474 arc::mojom::Camera3RequestTemplate stream_type,
475 int32_t result) {
476 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
477 DCHECK(state_ == kCapturing || state_ == kStopping);
478
479 if (state_ == kStopping) {
480 return;
481 }
482 if (result) {
483 SetErrorState(FROM_HERE, "Process capture request failed");
484 return;
485 }
486 RegisterBuffer(stream_type);
487 }
488
489 void CameraDeviceDelegate::ProcessCaptureResult(
490 arc::mojom::Camera3CaptureResultPtr result) {
491 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
492
493 uint32_t frame_number = result->frame_number;
494 CaptureResult& partial_result = partial_results_[frame_number];
wuchengli 2017/05/08 04:22:02 Document a new partial result may be created here
jcliang 2017/05/13 08:53:14 Documented. |partial_stage| is initialized in Capt
495 for (size_t i = 0; i < result->output_buffers->size(); ++i) {
496 arc::mojom::Camera3StreamBufferPtr& stream_buffer =
497 result->output_buffers.value()[i];
498 arc::mojom::Camera3RequestTemplate stream_type =
499 static_cast<arc::mojom::Camera3RequestTemplate>(
500 stream_buffer->stream_id);
501 // The camera HAL v3 API specifies that only one capture result can carry
502 // the result buffer for any given frame number.
503 if (partial_result.buffers.find(stream_type) !=
504 partial_result.buffers.end()) {
wuchengli 2017/05/08 04:22:02 Let's use LOG(ERROR) to make this is printed. Clie
jcliang 2017/05/13 08:53:14 Done.
505 client_->OnLog(
506 std::string("Received multiple result buffers for frame ") +
507 std::to_string(frame_number));
508 continue;
509 }
510 partial_results_[frame_number].buffers[stream_type] =
511 std::move(stream_buffer);
512 }
513
514 // |result->partial_result| is set to 0 if the capture result contains only
515 // the result buffer handles and no result metadata.
516 if (result->partial_result) {
517 partial_results_[frame_number].partial_stage = result->partial_result;
518 MergeMetadata(&partial_results_[frame_number].metadata, result->result);
519 }
520
521 if (partial_result.partial_stage == partial_result_count_) {
522 // This is the last capture results for the requests of this frame number.
523 auto it = partial_results_.find(frame_number);
524 // We can only submit the result buffer after we receive the shutter time.
525 if (it->second.reference_time != base::TimeTicks()) {
526 SubmitCaptureResult(frame_number);
527 }
528 }
529 }
530
531 void CameraDeviceDelegate::Notify(arc::mojom::Camera3NotifyMsgPtr message) {
532 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
533
534 if (message->type == arc::mojom::Camera3MsgType::CAMERA3_MSG_ERROR) {
535 uint32_t frame_number = message->message->get_error()->frame_number;
536 uint64_t error_stream_id = message->message->get_error()->error_stream_id;
537 arc::mojom::Camera3ErrorMsgCode error_code =
538 message->message->get_error()->error_code;
539 switch (error_code) {
540 case arc::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_DEVICE:
541 // Fatal error and no more frames will be produced by the device.
542 SetErrorState(FROM_HERE, "Fatal device error");
543 break;
544 case arc::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_REQUEST:
545 // An error has occurred in processing the request; the request
546 // specified by |frame_number| has been dropped by the camera device.
547 // Subsequent requests are unaffected.
548 //
549 // The HAL will call ProcessCaptureResult with the buffers' state set to
550 // STATUS_ERROR. The content of the buffers will be dropped and the
551 // buffers will be reused in SubmitCaptureResult.
552 client_->OnLog(
553 std::string(
554 "An error occurred while processing request for frame ") +
555 std::to_string(frame_number));
556 break;
557 case arc::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_RESULT:
558 // An error has occurred in producing the output metadata buffer for a
559 // result; the output metadata will not be available for the frame
560 // specified by |frame_number|. Subsequent requests are unaffected.
561 client_->OnLog(std::string("An error occurred while producing result "
562 "metadata for frame ") +
563 std::to_string(frame_number));
564 // The result metadata will not be complete so we don't need to wait for
565 // partial results on frame |frame_number|.
566 partial_results_[frame_number].partial_stage = partial_result_count_;
567 break;
568 case arc::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_BUFFER:
569 // An error has occurred in placing the output buffer into a stream for
570 // a request. |frame_number| specifies the request for which the buffer
571 // was dropped, and |error_stream_id| specifies the stream that dropped
572 // the buffer.
573 //
574 // The HAL will call ProcessCaptureResult with the buffer's state set to
575 // STATUS_ERROR. The content of the buffer will be dropped and the
576 // buffer will be reused in SubmitCaptureResult.
577 client_->OnLog(
578 std::string(
579 "An error occurred while filling output buffer of stream ") +
580 std::to_string(error_stream_id) + std::string(" in frame ") +
581 std::to_string(frame_number));
582 break;
583 default:
584 // To eliminate the warning for not handling CAMERA3_MSG_NUM_ERRORS
585 break;
586 }
587 } else { // arc::mojom::Camera3MsgType::CAMERA3_MSG_SHUTTER
588 uint32_t frame_number = message->message->get_shutter()->frame_number;
589 uint64_t shutter_time = message->message->get_shutter()->timestamp;
590 CaptureResult& partial_result = partial_results_[frame_number];
wuchengli 2017/05/08 04:22:02 Document a new partial result may be created here
jcliang 2017/05/13 08:53:14 Done.
591 // Shutter timestamp is in ns.
592 base::TimeTicks reference_time =
593 base::TimeTicks::FromInternalValue(shutter_time / 1000);
594 partial_result.reference_time = reference_time;
595 if (!frame_number) {
596 // Record the shutter time of the first frame for calculating the
597 // timestamp.
598 first_frame_shutter_time_ = reference_time;
599 partial_result.timestamp = base::TimeDelta::FromMicroseconds(0);
600 } else {
601 partial_result.timestamp = reference_time - first_frame_shutter_time_;
602 }
603 if (partial_result.partial_stage == partial_result_count_) {
604 SubmitCaptureResult(frame_number);
605 }
606 }
607 }
608
609 void CameraDeviceDelegate::SubmitCaptureResult(uint32_t frame_number) {
610 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
611
612 if (partial_results_.begin()->first != frame_number) {
613 SetErrorState(FROM_HERE, "Received out-of-order frames from HAL");
614 return;
615 }
616
617 CaptureResult& partial_result = partial_results_[frame_number];
618 DCHECK_EQ(partial_result.partial_stage, partial_result_count_);
619 for (const auto& it : partial_result.buffers) {
620 arc::mojom::Camera3RequestTemplate stream_type = it.first;
621 StreamContext* stream_context = GetStreamContext(stream_type);
622 const arc::mojom::Camera3StreamBufferPtr& buffer = it.second;
623 uint32_t buffer_id = buffer->buffer_id;
624
625 // Wait on release fence before delivering the result buffer to client.
626 if (buffer->release_fence.is_valid()) {
627 const int kSyncWaitTimeoutMs = 1000;
628 mojo::edk::ScopedPlatformHandle fence;
629 MojoResult result = mojo::edk::PassWrappedPlatformHandle(
630 buffer->release_fence.release().value(), &fence);
631 if (result != MOJO_RESULT_OK) {
632 SetErrorState(FROM_HERE, "Failed to unwrap release fence fd");
633 return;
634 }
635 if (!sync_wait(fence.get().handle, kSyncWaitTimeoutMs)) {
636 SetErrorState(FROM_HERE, "Sync wait on release fence timed out");
637 return;
638 }
639 }
640
641 if (stream_type ==
642 arc::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW) {
643 // Deliver the captured data to client and then re-queue the buffer.
644 if (buffer->status !=
645 arc::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_ERROR) {
646 const base::SharedMemory* shm_buffer =
647 stream_context->buffers[buffer_id].get();
648 client_->OnIncomingCapturedData(
649 reinterpret_cast<uint8_t*>(shm_buffer->memory()),
650 shm_buffer->mapped_size(), stream_context->capture_format,
651 rotation_, partial_result.reference_time, partial_result.timestamp);
652 }
653 stream_context->free_buffers.push(buffer_id);
654 ipc_task_runner_->PostTask(
655 FROM_HERE,
656 base::Bind(
657 &CameraDeviceDelegate::RegisterBuffer, this,
658 arc::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW));
659 }
660 // TODO(jcliang): Handle still capture result for TakePhoto.
661 }
662 partial_results_.erase(frame_number);
663 }
664
665 } // namespace media
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698