Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(103)

Side by Side Diff: media/capture/video/chromeos/camera_device_delegate.cc

Issue 2837273004: media: add video capture device for ARC++ camera HAL v3 (Closed)
Patch Set: address wuchengli@'s comments Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2017 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "media/capture/video/chromeos/camera_device_delegate.h"
6
7 #include <libdrm/drm_fourcc.h>
8
9 #include "media/capture/video/chromeos/camera_hal_delegate.h"
10 #include "media/capture/video/chromeos/camera_metadata_utils.h"
11 #include "mojo/edk/embedder/embedder.h"
12 #include "mojo/edk/embedder/scoped_platform_handle.h"
13 #include "third_party/libsync/include/sync/sync.h"
14
15 namespace media {
16
17 namespace {
18
19 struct SupportedFormat {
20 VideoPixelFormat chromium_format;
21 arc::mojom::HalPixelFormat hal_format;
22 uint32_t drm_format;
23 } const kSupportedFormats[] = {
24 // The Android camera HAL v3 has three types of mandatory pixel formats:
25 //
26 // 1. HAL_PIXEL_FORMAT_YCbCr_420_888 (YUV flexible format).
27 // 2. HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED (platform-specific format).
28 // 3. HAL_PIXEL_FORMAT_BLOB (for JPEG).
29 //
30 // We can't use HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED as it is highly
31 // platform specific and there is no way for Chrome to query the exact
32 // pixel layout of the implementation-defined buffer.
33 //
34 // On Android the framework requests the preview stream with the
35 // implementation-defined format, and as a result some camera HALs support
36 // only implementation-defined preview buffers. We should use the video
37 // capture stream in Chrome VCD as it is mandatory for the camera HAL to
38 // support YUV flexbile format video streams.
39
40 // TODO(jcliang): Change NV12 to I420 after the camera HAL supports hanlding
wuchengli 2017/05/23 04:29:07 handling
jcliang 2017/05/25 14:23:46 Done.
41 // I420 buffers.
42 {PIXEL_FORMAT_NV12,
43 arc::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888,
44 DRM_FORMAT_NV12},
45 };
46
47 } // namespace
48
49 CameraDeviceDelegate::CameraDeviceDelegate(
50 VideoCaptureDeviceDescriptor device_descriptor,
51 scoped_refptr<CameraHalDelegate> camera_hal_delegate,
52 const scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner)
53 : device_descriptor_(device_descriptor),
54 camera_hal_delegate_(camera_hal_delegate),
55 state_(kStopped),
56 rotation_(0),
57 callback_ops_(this),
58 ipc_task_runner_(ipc_task_runner),
59 frame_number_(0),
60 partial_result_count_(1),
61 first_frame_shutter_time_(base::TimeTicks::Now()) {}
62
63 // static
64 VideoPixelFormat CameraDeviceDelegate::PixFormatHalToChromium(
65 arc::mojom::HalPixelFormat from) {
66 auto it =
67 std::find_if(std::begin(kSupportedFormats), std::end(kSupportedFormats),
68 [from](SupportedFormat f) { return f.hal_format == from; });
69 if (it == std::end(kSupportedFormats)) {
70 return PIXEL_FORMAT_UNKNOWN;
71 }
72 return it->chromium_format;
73 }
74
75 // static
76 uint32_t CameraDeviceDelegate::PixFormatChromiumToDrm(VideoPixelFormat from) {
77 auto it = std::find_if(
78 std::begin(kSupportedFormats), std::end(kSupportedFormats),
79 [from](SupportedFormat f) { return f.chromium_format == from; });
80 if (it == std::end(kSupportedFormats)) {
81 return 0;
82 }
83 return it->drm_format;
84 }
85
86 void CameraDeviceDelegate::AllocateAndStart(
87 const VideoCaptureParams& params,
88 std::unique_ptr<VideoCaptureDevice::Client> client) {
89 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
90 DCHECK(!client_);
91 DCHECK_EQ(state_, kStopped);
92
93 chrome_capture_params_ = params;
94 client_ = std::move(client);
95 frame_number_ = 0;
96 partial_results_.clear();
97 first_frame_shutter_time_ = base::TimeTicks::Now();
98 SetState(kStarting);
99
100 int32_t camera_id = std::stoi(device_descriptor_.device_id);
101 // We need to get the static camera metadata of the camera deivce first.
102 camera_hal_delegate_->GetCameraInfo(
103 camera_id,
104 base::Bind(&CameraDeviceDelegate::OnGotCameraInfoOnModuleDelegate, this));
wuchengli 2017/05/23 04:29:07 There seems to be a utility macro of function to b
jcliang 2017/05/25 14:23:46 Found it: https://cs.chromium.org/chromium/src/med
105 }
106
107 void CameraDeviceDelegate::StopAndDeAllocate() {
108 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
109 // StopAndDeAllocate may be called at any state except kStopping.
110 DCHECK_NE(state_, kStopping);
111
112 if (state_ == kStopped) {
113 // In case of Mojo connection error the device may be stopped before
114 // StopAndDeAllocate is called.
115 return;
116 }
117
118 SetState(kStopping);
119 if (!device_ops_.is_bound()) {
120 // The device delegate is in the process of opening the camera device.
121 return;
122 }
123 device_ops_->Close(base::Bind(&CameraDeviceDelegate::OnClosed, this));
124 }
125
126 void CameraDeviceDelegate::TakePhoto(
127 VideoCaptureDevice::TakePhotoCallback callback) {
128 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
129 // TODO(jcliang): Implement TakePhoto.
130 }
131
132 void CameraDeviceDelegate::GetPhotoCapabilities(
133 VideoCaptureDevice::GetPhotoCapabilitiesCallback callback) {
134 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
135 // TODO(jcliang): Implement GetPhotoCapabilities.
136 }
137
138 void CameraDeviceDelegate::SetPhotoOptions(
139 mojom::PhotoSettingsPtr settings,
140 VideoCaptureDevice::SetPhotoOptionsCallback callback) {
141 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
142 // TODO(jcliang): Implement SetPhotoOptions.
143 }
144
145 void CameraDeviceDelegate::SetRotation(int rotation) {
146 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
147 DCHECK(rotation >= 0 && rotation < 360 && rotation % 90 == 0);
148 rotation_ = rotation;
149 }
150
151 void CameraDeviceDelegate::SetState(State state) {
152 state_ = state;
153 }
154
155 void CameraDeviceDelegate::SetErrorState(
156 const tracked_objects::Location& from_here,
157 const std::string& reason) {
158 state_ = kError;
159 LOG(ERROR) << reason;
160 client_->OnError(from_here, reason);
161 }
162
163 void CameraDeviceDelegate::ResetMojoInterface() {
164 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
165 device_ops_.reset();
166 if (callback_ops_.is_bound()) {
167 callback_ops_.Unbind();
168 }
169 }
170
171 void CameraDeviceDelegate::OnMojoConnectionError() {
172 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
173 if (state_ == kStopping) {
174 // When in stopping state the camera HAL adapter may terminate the Mojo
175 // channel before we do, in which case the OnClosed callback would not be
176 // called.
177 OnClosed(0);
178 } else {
179 // The Mojo channel terminated unexpectedly.
180 ResetMojoInterface();
181 SetState(kStopped);
182 SetErrorState(FROM_HERE, "Mojo connection error");
183 }
184 }
185
186 void CameraDeviceDelegate::OnClosed(int32_t result) {
187 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
188 DCHECK_EQ(state_, kStopping);
189 if (result) {
190 client_->OnLog(std::string("Failed to close device: ") +
191 std::string(strerror(result)));
192 }
193 ResetMojoInterface();
194 // Only after the Mojo channel is closed can we be sure that |stream_context_|
195 // is not accessed anymore.
196 stream_context_.reset();
197 client_.reset();
198 SetState(kStopped);
199 }
200
201 void CameraDeviceDelegate::OnGotCameraInfoOnModuleDelegate(
202 int32_t result,
203 arc::mojom::CameraInfoPtr camera_info) {
204 // This method runs on |module_task_runner_| of |camera_hal_delegate_|.
205 ipc_task_runner_->PostTask(
206 FROM_HERE, base::Bind(&CameraDeviceDelegate::OnGotCameraInfo, this,
207 result, base::Passed(&camera_info)));
208 }
209
210 void CameraDeviceDelegate::OnGotCameraInfo(
211 int32_t result,
212 arc::mojom::CameraInfoPtr camera_info) {
213 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
214 DCHECK(state_ == kStarting || state_ == kStopping);
215
216 if (state_ == kStopping) {
217 OnClosed(0);
218 return;
219 }
220
221 if (result) {
222 std::string error_msg = "Failed to get camera info";
223 LOG(ERROR) << error_msg;
224 client_->OnError(FROM_HERE, error_msg);
225 return;
226 }
227 static_metadata_ = std::move(camera_info->static_camera_characteristics);
228 const arc::mojom::CameraMetadataEntryPtr* partial_count = GetMetadataEntry(
229 static_metadata_,
230 arc::mojom::CameraMetadataTag::ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
231 // The partial result count metadata is optional; defaults to 1 in case it
232 // is not set in the static metadata.
233 if (partial_count) {
234 partial_result_count_ =
235 *reinterpret_cast<int32_t*>((*partial_count)->data.data());
236 }
237 int32_t camera_id = std::stoi(device_descriptor_.device_id);
238 camera_hal_delegate_->OpenDevice(
239 camera_id,
240 base::Bind(&CameraDeviceDelegate::OnOpenedDeviceOnModuleDelegate, this));
241 }
242
243 void CameraDeviceDelegate::OnOpenedDeviceOnModuleDelegate(
244 int32_t result,
245 arc::mojom::Camera3DeviceOpsPtr device_ops) {
246 // This method runs on |module_task_runner_| of |camera_hal_delegate_|.
247 mojo::InterfacePtrInfo<arc::mojom::Camera3DeviceOps> device_ops_info =
248 device_ops.PassInterface();
249 ipc_task_runner_->PostTask(
250 FROM_HERE, base::Bind(&CameraDeviceDelegate::OnOpenedDevice, this, result,
251 base::Passed(&device_ops_info)));
252 }
253
254 void CameraDeviceDelegate::OnOpenedDevice(
255 int32_t result,
256 mojo::InterfacePtrInfo<arc::mojom::Camera3DeviceOps> device_ops_info) {
257 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
258 DCHECK(state_ == kStarting || state_ == kStopping);
259
260 if (state_ == kStopping) {
wuchengli 2017/05/23 04:29:07 Document mojo will disconnect because we don't kee
jcliang 2017/05/25 14:23:46 Done.
261 OnClosed(0);
262 return;
263 }
264
265 if (result) {
266 std::string error_msg = "Failed to open camera device";
267 LOG(ERROR) << error_msg;
268 client_->OnError(FROM_HERE, error_msg);
269 return;
270 }
271 if (!device_ops_info.is_valid()) {
272 std::string error_msg = "Invalid device_ops_info";
273 LOG(ERROR) << error_msg;
274 client_->OnError(FROM_HERE, error_msg);
275 return;
276 }
277 device_ops_.Bind(std::move(device_ops_info));
278 device_ops_.set_connection_error_handler(
279 base::Bind(&CameraDeviceDelegate::OnMojoConnectionError, this));
280 Initialize();
281 }
282
283 void CameraDeviceDelegate::Initialize() {
284 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
285 DCHECK_EQ(state_, kStarting);
286
287 // Set up context for preview stream.
288 arc::mojom::Camera3StreamPtr preview_stream =
289 arc::mojom::Camera3Stream::New();
290 preview_stream->id = static_cast<uint64_t>(
291 arc::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW);
292 preview_stream->stream_type =
293 arc::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
294 preview_stream->width =
295 chrome_capture_params_.requested_format.frame_size.width();
296 preview_stream->height =
297 chrome_capture_params_.requested_format.frame_size.height();
298 preview_stream->format =
299 arc::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888;
300 preview_stream->data_space = 0;
301 preview_stream->rotation =
302 arc::mojom::Camera3StreamRotation::CAMERA3_STREAM_ROTATION_0;
303
304 stream_context_.reset(new StreamContext);
305 stream_context_->stream = std::move(preview_stream);
306
307 device_ops_->Initialize(
308 callback_ops_.CreateInterfacePtrAndBind(),
309 base::Bind(&CameraDeviceDelegate::OnInitialized, this));
310 callback_ops_.set_connection_error_handler(
311 base::Bind(&CameraDeviceDelegate::OnMojoConnectionError, this));
312 }
313
314 void CameraDeviceDelegate::OnInitialized(int32_t result) {
315 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
316 DCHECK(state_ == kStarting || state_ == kStopping);
317
318 if (state_ == kStopping) {
319 return;
320 }
321 if (result) {
322 SetErrorState(FROM_HERE, std::string("Failed to initialize camera device") +
323 std::to_string(result));
324 return;
325 }
326 SetState(kInitialized);
327 ConfigureStreams();
328 }
329
330 void CameraDeviceDelegate::ConfigureStreams() {
331 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
332 DCHECK(state_ == kInitialized);
333
334 arc::mojom::Camera3StreamConfigurationPtr stream_config =
335 arc::mojom::Camera3StreamConfiguration::New();
336 stream_config->streams.push_back(stream_context_->stream.Clone());
337 stream_config->operation_mode = arc::mojom::Camera3StreamConfigurationMode::
338 CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE;
339 device_ops_->ConfigureStreams(
340 std::move(stream_config),
341 base::Bind(&CameraDeviceDelegate::OnConfiguredStreams, this));
342 }
343
344 void CameraDeviceDelegate::OnConfiguredStreams(
345 int32_t result,
346 arc::mojom::Camera3StreamConfigurationPtr updated_config) {
347 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
348 DCHECK(state_ == kInitialized || state_ == kStopping);
wuchengli 2017/05/23 04:29:07 Discussed offline. Having DCHECK depends on the im
jcliang 2017/05/25 14:23:46 Done.
349
350 if (state_ == kStopping) {
351 return;
352 }
353 if (result) {
354 SetErrorState(FROM_HERE, std::string("Failed to configure streams") +
355 std::to_string(result));
356 return;
357 }
358 if (!updated_config || updated_config->streams.size() != 1) {
359 SetErrorState(FROM_HERE,
360 std::string("Wrong number of streams configured") +
361 std::to_string(updated_config->streams.size()));
362 return;
363 }
364 auto& updated_stream = updated_config->streams[0];
365 VideoCaptureFormat capture_format = chrome_capture_params_.requested_format;
366 // TODO(jcliang): Determine the best format from metadata.
367 capture_format.pixel_format = PIXEL_FORMAT_NV12;
368 stream_context_->capture_format = capture_format;
369 stream_context_->stream->usage = updated_stream->usage;
370 stream_context_->stream->max_buffers = updated_stream->max_buffers;
wuchengli 2017/05/25 07:59:06 Let's have a limit here in case HAL has a bug and
jcliang 2017/05/25 14:23:46 Done.
371
372 VLOG(2) << "Stream " << updated_stream->id
373 << " configured: usage=" << updated_stream->usage
374 << " max_buffers=" << updated_stream->max_buffers;
375
376 // Allocate buffers.
377 size_t num_buffers = stream_context_->stream->max_buffers;
378 stream_context_->buffers.resize(num_buffers);
379 for (size_t j = 0; j < num_buffers; ++j) {
380 const VideoCaptureFormat frame_format(
381 gfx::Size(stream_context_->stream->width,
382 stream_context_->stream->height),
383 0.0, stream_context_->capture_format.pixel_format);
384 std::unique_ptr<base::SharedMemory> buffer(new base::SharedMemory());
385 base::SharedMemoryCreateOptions options;
386 options.size = frame_format.ImageAllocationSize();
387 options.share_read_only = false;
388 bool ret = buffer->Create(options);
389 if (!ret) {
390 SetErrorState(FROM_HERE, "Failed to create SharedMemory buffer");
391 return;
392 }
393 ret = buffer->Map(buffer->requested_size());
394 if (!ret) {
395 SetErrorState(FROM_HERE, "Failed to map SharedMemory buffer");
396 return;
397 }
398 stream_context_->buffers[j] = std::move(buffer);
399 stream_context_->free_buffers.push(j);
400 }
401 VLOG(2) << "Allocated " << stream_context_->stream->max_buffers << " buffers";
402 ConstructDefaultRequestSettings();
403 client_->OnStarted();
404 }
405
406 void CameraDeviceDelegate::ConstructDefaultRequestSettings() {
407 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
408 DCHECK(stream_context_);
409
410 device_ops_->ConstructDefaultRequestSettings(
411 arc::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW,
412 base::Bind(&CameraDeviceDelegate::OnConstructedDefaultRequestSettings,
413 this));
414 }
415
416 void CameraDeviceDelegate::OnConstructedDefaultRequestSettings(
417 arc::mojom::CameraMetadataPtr settings) {
418 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
419
420 if (state_ == kStopping) {
421 return;
422 }
423 DCHECK(stream_context_);
424 stream_context_->request_settings = std::move(settings);
425 SetState(kStreamConfigured);
426 StartCapture();
427 }
428
429 void CameraDeviceDelegate::StartCapture() {
430 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
431 DCHECK(state_ == kStreamConfigured);
432
433 if (state_ == kStopping) {
434 return;
435 }
436 DCHECK(stream_context_);
437 DCHECK(!stream_context_->request_settings.is_null());
438 SetState(kCapturing);
wuchengli 2017/05/25 07:59:06 Document we cannot use a loop to register all the
jcliang 2017/05/25 14:23:46 Done.
439 RegisterBuffer();
440 }
441
442 void CameraDeviceDelegate::RegisterBuffer() {
443 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
444 DCHECK(state_ == kCapturing || state_ == kStopping);
445
446 if (state_ == kStopping) {
447 return;
448 }
449 DCHECK(stream_context_);
450 if (stream_context_->free_buffers.empty()) {
451 return;
452 }
453
454 size_t buffer_id = stream_context_->free_buffers.front();
455 stream_context_->free_buffers.pop();
456 const base::SharedMemory* buffer = stream_context_->buffers[buffer_id].get();
457
458 VideoPixelFormat buffer_format = stream_context_->capture_format.pixel_format;
459 uint32_t drm_format = PixFormatChromiumToDrm(buffer_format);
460 if (!drm_format) {
461 SetErrorState(FROM_HERE, std::string("Unsupported video pixel format") +
462 VideoPixelFormatToString(buffer_format));
463 return;
464 }
465 arc::mojom::HalPixelFormat hal_pixel_format = stream_context_->stream->format;
466
467 size_t num_planes = VideoFrame::NumPlanes(buffer_format);
468 std::vector<mojo::ScopedHandle> fds(num_planes);
469 std::vector<uint32_t> strides(num_planes);
470 std::vector<uint32_t> offsets(num_planes);
471 for (size_t i = 0; i < num_planes; ++i) {
472 base::SharedMemoryHandle shm_handle = buffer->handle();
473 // Wrap the platform handle.
474 MojoHandle wrapped_handle;
475 MojoResult result = mojo::edk::CreatePlatformHandleWrapper(
476 mojo::edk::ScopedPlatformHandle(mojo::edk::PlatformHandle(
477 base::SharedMemory::DuplicateHandle(shm_handle).GetHandle())),
478 &wrapped_handle);
479 if (result != MOJO_RESULT_OK) {
480 SetErrorState(FROM_HERE, "Failed to wrap shared memory handle");
481 return;
482 }
483 fds[i].reset(mojo::Handle(wrapped_handle));
484 strides[i] = VideoFrame::RowBytes(
485 i, buffer_format,
486 chrome_capture_params_.requested_format.frame_size.width());
487 if (!i) {
488 offsets[i] = 0;
489 } else {
490 offsets[i] = offsets[i - 1] +
491 VideoFrame::PlaneSize(
492 buffer_format, i - 1,
493 chrome_capture_params_.requested_format.frame_size)
494 .GetArea();
495 }
496 }
497 device_ops_->RegisterBuffer(
498 buffer_id, arc::mojom::Camera3DeviceOps::BufferType::SHM, std::move(fds),
499 drm_format, hal_pixel_format, stream_context_->stream->width,
500 stream_context_->stream->height, std::move(strides), std::move(offsets),
501 base::Bind(&CameraDeviceDelegate::OnRegisteredBuffer, this, buffer_id));
502 VLOG(2) << "Registered buffer " << buffer_id;
503 }
504
505 void CameraDeviceDelegate::OnRegisteredBuffer(size_t buffer_id,
506 int32_t result) {
507 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
508 DCHECK(state_ == kCapturing || state_ == kStopping);
509
510 if (state_ == kStopping) {
511 return;
512 }
513 if (result) {
514 SetErrorState(FROM_HERE, std::string("Failed to register buffer: ") +
515 std::to_string(result));
516 return;
517 }
518 ProcessCaptureRequest(buffer_id);
519 }
520
521 void CameraDeviceDelegate::ProcessCaptureRequest(size_t buffer_id) {
522 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
523 DCHECK(state_ == kCapturing || state_ == kStopping);
524 DCHECK(stream_context_);
525
526 arc::mojom::Camera3StreamBufferPtr buffer =
527 arc::mojom::Camera3StreamBuffer::New();
528 buffer->stream_id = static_cast<uint64_t>(
529 arc::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW);
530 buffer->buffer_id = buffer_id;
531 buffer->status = arc::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_OK;
532
533 arc::mojom::Camera3CaptureRequestPtr request =
534 arc::mojom::Camera3CaptureRequest::New();
535 request->frame_number = frame_number_;
536 request->settings = stream_context_->request_settings.Clone();
537 request->output_buffers.push_back(std::move(buffer));
538
539 device_ops_->ProcessCaptureRequest(
540 std::move(request),
541 base::Bind(&CameraDeviceDelegate::OnProcessedCaptureRequest, this));
542 VLOG(2) << "Requested capture for frame " << frame_number_ << " with buffer "
543 << buffer_id;
544 frame_number_++;
545 // In case |frame_number_| wraps around, we start at 1 to avoid resetting
546 // |first_frame_shutter_time_|.
547 if (!frame_number_) {
548 frame_number_++;
549 }
550 }
551
552 void CameraDeviceDelegate::OnProcessedCaptureRequest(int32_t result) {
553 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
554 DCHECK(state_ == kCapturing || state_ == kStopping);
555
556 if (state_ == kStopping) {
557 return;
558 }
559 if (result) {
560 SetErrorState(FROM_HERE, std::string("Process capture request failed") +
561 std::to_string(result));
562 return;
563 }
564 RegisterBuffer();
565 }
566
567 void CameraDeviceDelegate::ProcessCaptureResult(
568 arc::mojom::Camera3CaptureResultPtr result) {
569 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
570
wuchengli 2017/05/23 04:29:07 return if state is kStopping or kStopped
jcliang 2017/05/25 14:23:46 Done.
571 uint32_t frame_number = result->frame_number;
572 // A new partial result may be created in either ProcessCaptureResult or
573 // Notify.
574 CaptureResult& partial_result = partial_results_[frame_number];
wuchengli 2017/05/25 07:59:05 Print an error or error out if the number of parti
jcliang 2017/05/25 14:23:46 Done.
575 if (result->output_buffers) {
576 if (result->output_buffers->size() != 1) {
577 SetErrorState(
578 FROM_HERE,
579 std::string("Incorrect number of output buffers received: ") +
580 std::to_string(result->output_buffers->size()));
581 return;
582 }
583 arc::mojom::Camera3StreamBufferPtr& stream_buffer =
584 result->output_buffers.value()[0];
585 VLOG(2) << "Received capture result for frame " << frame_number
586 << " stream_id: " << stream_buffer->stream_id;
587 // The camera HAL v3 API specifies that only one capture result can carry
588 // the result buffer for any given frame number.
589 if (!partial_result.buffer.is_null()) {
590 SetErrorState(FROM_HERE,
591 std::string("Received multiple result buffers for frame ") +
592 std::to_string(frame_number));
593 return;
594 } else {
595 partial_result.buffer = std::move(stream_buffer);
596 }
597 }
598
599 // |result->partial_result| is set to 0 if the capture result contains only
600 // the result buffer handles and no result metadata.
601 if (result->partial_result) {
602 uint32_t result_id = result->partial_result;
603 if (result_id > partial_result_count_) {
604 SetErrorState(FROM_HERE, std::string("Invalid partial_result id: ") +
605 std::to_string(result_id));
606 return;
607 }
608 if (partial_result.partial_metadata_received.find(result_id) !=
609 partial_result.partial_metadata_received.end()) {
610 SetErrorState(FROM_HERE,
611 std::string("Received duplicated partial metadata: ") +
612 std::to_string(result_id));
613 return;
614 }
615 partial_result.partial_metadata_received.insert(result_id);
616 MergeMetadata(&partial_result.metadata, result->result);
617 }
618
619 if (partial_result.partial_metadata_received.size() ==
620 partial_result_count_ &&
621 !partial_result.buffer.is_null()) {
622 // We can only submit the result buffer after we receive the shutter time.
623 if (partial_result.reference_time != base::TimeTicks()) {
624 SubmitCaptureResult(frame_number);
625 }
626 }
627 }
628
629 void CameraDeviceDelegate::Notify(arc::mojom::Camera3NotifyMsgPtr message) {
630 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
631
wuchengli 2017/05/23 04:29:07 return if state is kStopping or kStopped
jcliang 2017/05/25 14:23:46 Done.
632 if (message->type == arc::mojom::Camera3MsgType::CAMERA3_MSG_ERROR) {
wuchengli 2017/05/25 07:59:06 This function is long. Line 633-695 can be moved t
jcliang 2017/05/25 14:23:46 Done.
633 uint32_t frame_number = message->message->get_error()->frame_number;
634 uint64_t error_stream_id = message->message->get_error()->error_stream_id;
635 arc::mojom::Camera3ErrorMsgCode error_code =
636 message->message->get_error()->error_code;
637 switch (error_code) {
638 case arc::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_DEVICE:
639 // Fatal error and no more frames will be produced by the device.
640 SetErrorState(FROM_HERE, "Fatal device error");
641 break;
642 case arc::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_REQUEST: {
643 // An error has occurred in processing the request; the request
644 // specified by |frame_number| has been dropped by the camera device.
645 // Subsequent requests are unaffected.
646 //
647 // The HAL will call ProcessCaptureResult with the buffers' state set to
648 // STATUS_ERROR. The content of the buffers will be dropped and the
649 // buffers will be reused in SubmitCaptureResult.
650 std::string warning_msg =
651 std::string(
652 "An error occurred while processing request for frame ") +
653 std::to_string(frame_number);
654 LOG(WARNING) << warning_msg;
655 client_->OnLog(warning_msg);
656 break;
657 }
658 case arc::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_RESULT: {
659 // An error has occurred in producing the output metadata buffer for a
660 // result; the output metadata will not be available for the frame
661 // specified by |frame_number|. Subsequent requests are unaffected.
662 std::string warning_msg =
663 std::string(
664 "An error occurred while producing result "
665 "metadata for frame ") +
666 std::to_string(frame_number);
667 LOG(WARNING) << warning_msg;
668 client_->OnLog(warning_msg);
669 // The result metadata will not be complete so we don't need to wait for
670 // partial results on frame |frame_number|.
671 partial_results_[frame_number].partial_metadata_received.clear();
672 for (uint32_t i = 0; i < partial_result_count_; ++i) {
673 partial_results_[frame_number].partial_metadata_received.insert(i);
674 }
675 break;
676 }
677 case arc::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_BUFFER:
678 // An error has occurred in placing the output buffer into a stream for
679 // a request. |frame_number| specifies the request for which the buffer
680 // was dropped, and |error_stream_id| specifies the stream that dropped
681 // the buffer.
682 //
683 // The HAL will call ProcessCaptureResult with the buffer's state set to
684 // STATUS_ERROR. The content of the buffer will be dropped and the
685 // buffer will be reused in SubmitCaptureResult.
686 client_->OnLog(
687 std::string(
688 "An error occurred while filling output buffer of stream ") +
689 std::to_string(error_stream_id) + std::string(" in frame ") +
690 std::to_string(frame_number));
691 break;
692 default:
693 // To eliminate the warning for not handling CAMERA3_MSG_NUM_ERRORS
694 break;
695 }
696 } else { // arc::mojom::Camera3MsgType::CAMERA3_MSG_SHUTTER
697 uint32_t frame_number = message->message->get_shutter()->frame_number;
698 uint64_t shutter_time = message->message->get_shutter()->timestamp;
699 // A new partial result may be created in either ProcessCaptureResult or
700 // Notify.
701 VLOG(2) << "Received shutter time for frame " << frame_number;
702 if (!shutter_time) {
703 SetErrorState(FROM_HERE, std::string("Received invalid shutter time: ") +
704 std::to_string(shutter_time));
705 return;
706 }
707 CaptureResult& partial_result = partial_results_[frame_number];
wuchengli 2017/05/25 07:59:06 Print an error or error out if the number of parti
jcliang 2017/05/25 14:23:46 Done.
708 // Shutter timestamp is in ns.
709 base::TimeTicks reference_time =
710 base::TimeTicks::FromInternalValue(shutter_time / 1000);
711 partial_result.reference_time = reference_time;
712 if (!frame_number) {
713 // Record the shutter time of the first frame for calculating the
714 // timestamp.
715 first_frame_shutter_time_ = reference_time;
716 }
717 partial_result.timestamp = reference_time - first_frame_shutter_time_;
718 if (partial_result.partial_metadata_received.size() ==
719 partial_result_count_ &&
720 !partial_result.buffer.is_null()) {
721 SubmitCaptureResult(frame_number);
722 }
723 }
724 }
725
726 void CameraDeviceDelegate::SubmitCaptureResult(uint32_t frame_number) {
727 DCHECK(ipc_task_runner_->BelongsToCurrentThread());
728
729 if (partial_results_.begin()->first != frame_number) {
730 SetErrorState(FROM_HERE, "Received out-of-order frames from HAL");
wuchengli 2017/05/25 07:59:06 print partial_results_.begin()->first and frame_nu
jcliang 2017/05/25 14:23:46 Done.
731 return;
732 }
733
734 VLOG(2) << "Submit capture result of frame " << frame_number;
735 CaptureResult& partial_result = partial_results_[frame_number];
736 DCHECK_EQ(partial_result.partial_metadata_received.size(),
737 partial_result_count_);
738 DCHECK(partial_result.buffer);
739 uint32_t buffer_id = partial_result.buffer->buffer_id;
740
741 // Wait on release fence before delivering the result buffer to client.
742 if (partial_result.buffer->release_fence.is_valid()) {
743 const int kSyncWaitTimeoutMs = 1000;
744 mojo::edk::ScopedPlatformHandle fence;
745 MojoResult result = mojo::edk::PassWrappedPlatformHandle(
746 partial_result.buffer->release_fence.release().value(), &fence);
747 if (result != MOJO_RESULT_OK) {
748 SetErrorState(FROM_HERE, "Failed to unwrap release fence fd");
749 return;
750 }
751 if (!sync_wait(fence.get().handle, kSyncWaitTimeoutMs)) {
752 SetErrorState(FROM_HERE, "Sync wait on release fence timed out");
753 return;
754 }
755 }
756
757 // Deliver the captured data to client and then re-queue the buffer.
758 if (partial_result.buffer->status !=
759 arc::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_ERROR) {
760 const base::SharedMemory* shm_buffer =
761 stream_context_->buffers[buffer_id].get();
762 client_->OnIncomingCapturedData(
763 reinterpret_cast<uint8_t*>(shm_buffer->memory()),
764 shm_buffer->mapped_size(), stream_context_->capture_format, rotation_,
765 partial_result.reference_time, partial_result.timestamp);
766 }
767 stream_context_->free_buffers.push(buffer_id);
768 partial_results_.erase(frame_number);
769 RegisterBuffer();
770 }
771
772 } // namespace media
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698