Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(489)

Unified Diff: media/capture/video/chromeos/camera_device_delegate.cc

Issue 2837273004: media: add video capture device for ARC++ camera HAL v3 (Closed)
Patch Set: revise some code comments Created 3 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: media/capture/video/chromeos/camera_device_delegate.cc
diff --git a/media/capture/video/chromeos/camera_device_delegate.cc b/media/capture/video/chromeos/camera_device_delegate.cc
new file mode 100644
index 0000000000000000000000000000000000000000..9fc79001e2a98010db274ba892286f7d55245c48
--- /dev/null
+++ b/media/capture/video/chromeos/camera_device_delegate.cc
@@ -0,0 +1,605 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/camera_device_delegate.h"
+
+#include <libdrm/drm_fourcc.h>
+
+#include "media/capture/video/chromeos/camera_metadata_utils.h"
+#include "mojo/edk/embedder/embedder.h"
+#include "mojo/edk/embedder/scoped_platform_handle.h"
+#include "third_party/libsync/include/sync/sync.h"
+
+namespace media {
+
+namespace {
+
+struct SupportedFormat {
+ VideoPixelFormat chromium_format;
+ arc::mojom::HalPixelFormat hal_format;
+ uint32_t drm_format;
+} const kSupportedFormats[] = {
+ {PIXEL_FORMAT_I420,
+ arc::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888,
+ DRM_FORMAT_YUV420},
+ // TODO(jcliang): Do not use IMPLEMENTATION_DEFINED formats at all as it is
+ // nearly impossible to get it right across all boards.
+ {PIXEL_FORMAT_RGB32,
+ arc::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+ DRM_FORMAT_XBGR8888},
+};
+
+} // namespace
+
+CameraDeviceDelegate::CameraDeviceDelegate(
+ VideoCaptureDeviceDescriptor device_descriptor,
+ arc::mojom::CameraMetadataPtr static_metadata,
+ mojo::InterfacePtrInfo<arc::mojom::Camera3DeviceOps> device_ops_info,
+ const scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner)
+ : device_descriptor_(device_descriptor),
+ static_metadata_(std::move(static_metadata)),
+ state_(kStopped),
+ rotation_(0),
+ device_ops_info_(std::move(device_ops_info)),
+ callback_ops_(this),
+ ipc_task_runner_(ipc_task_runner),
+ frame_number_(0),
+ partial_result_count_(1),
+ first_frame_shutter_time_(base::TimeTicks::Now()) {}
+
+// static
+VideoPixelFormat CameraDeviceDelegate::PixFormatHalToChromium(
+ arc::mojom::HalPixelFormat from) {
+ auto it =
+ std::find_if(std::begin(kSupportedFormats), std::end(kSupportedFormats),
+ [from](SupportedFormat f) { return f.hal_format == from; });
+ if (it == std::end(kSupportedFormats)) {
+ return PIXEL_FORMAT_UNKNOWN;
+ }
+ return it->chromium_format;
+}
+
+// static
+uint32_t CameraDeviceDelegate::PixFormatChromiumToDrm(VideoPixelFormat from) {
+ auto it = std::find_if(
+ std::begin(kSupportedFormats), std::end(kSupportedFormats),
+ [from](SupportedFormat f) { return f.chromium_format == from; });
+ if (it == std::end(kSupportedFormats)) {
+ return 0;
+ }
+ return it->drm_format;
+}
+
+void CameraDeviceDelegate::AllocateAndStart(
+ const VideoCaptureParams& params,
+ std::unique_ptr<VideoCaptureDevice::Client> client) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ DCHECK(!client_);
+ DCHECK(state_ = kStopped);
+ const arc::mojom::CameraMetadataEntryPtr* partial_count = GetMetadataEntry(
+ static_metadata_,
+ arc::mojom::CameraMetadataTag::ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
+ // The partial result count metadata is optional. It defaults to 1 in case it
+ // is not set in the static metadata.
+ if (partial_count) {
+ partial_result_count_ =
+ *reinterpret_cast<int32_t*>((*partial_count)->data.data());
+ }
+
+ client_ = std::move(client);
+ device_ops_.Bind(std::move(device_ops_info_), ipc_task_runner_);
+ device_ops_.set_connection_error_handler(
+ base::Bind(&CameraDeviceDelegate::OnMojoConnectionError, this));
+ frame_number_ = 0;
+ streams_.clear();
+ partial_results_.clear();
+
+ // Set up context for preview stream.
+ arc::mojom::Camera3StreamPtr preview_stream =
+ arc::mojom::Camera3Stream::New();
+ preview_stream->id = static_cast<uint64_t>(
+ arc::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW);
+ preview_stream->stream_type =
+ arc::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
+ preview_stream->width = params.requested_format.frame_size.width();
+ preview_stream->height = params.requested_format.frame_size.height();
+ // preview_stream->format = HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888;
+ // TODO(jcliang): We should not use implementation defined format here.
+ preview_stream->format =
+ arc::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ preview_stream->data_space = 0;
+ preview_stream->rotation =
+ arc::mojom::Camera3StreamRotation::CAMERA3_STREAM_ROTATION_0;
+ streams_[arc::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW] = {
+ .params = params, .stream = std::move(preview_stream),
+ };
+ // TODO(jcliang): Set up context for still capture stream.
+
+ SetState(kStarting);
+ Initialize();
+}
+
+void CameraDeviceDelegate::StopAndDeAllocate(base::WaitableEvent* closed) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ // StopAndDeAllocate may be called at any state.
+
+ if (!device_ops_.is_bound() || state_ == kStopping) {
+ // In case of Mojo connection error |device_ops_| and |callback_ops_| are
+ // unbound.
+ return;
+ }
+ SetState(kStopping);
+ device_ops_->Close(base::Bind(&CameraDeviceDelegate::OnClosed, this,
+ base::Unretained(closed)));
+}
+
+void CameraDeviceDelegate::TakePhoto(
+ VideoCaptureDevice::TakePhotoCallback callback) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ // TODO(jcliang): Implement TakePhoto.
+}
+
+void CameraDeviceDelegate::GetPhotoCapabilities(
+ VideoCaptureDevice::GetPhotoCapabilitiesCallback callback) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ // TODO(jcliang): Implement GetPhotoCapabilities.
+}
+
+void CameraDeviceDelegate::SetPhotoOptions(
+ mojom::PhotoSettingsPtr settings,
+ VideoCaptureDevice::SetPhotoOptionsCallback callback) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ // TODO(jcliang): Implement SetPhotoOptions.
+}
+
+void CameraDeviceDelegate::SetRotation(int rotation) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ DCHECK(rotation >= 0 && rotation < 360 && rotation % 90 == 0);
+ rotation_ = rotation;
+}
+
+void CameraDeviceDelegate::SetState(State state) {
+ state_ = state;
+}
+
+void CameraDeviceDelegate::SetErrorState(
+ const tracked_objects::Location& from_here,
+ const std::string& reason) {
+ state_ = kError;
+ client_->OnError(from_here, reason);
+}
+
+void CameraDeviceDelegate::ResetMojoInterface() {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ device_ops_.reset();
+ if (callback_ops_.is_bound()) {
+ callback_ops_.Unbind();
+ }
+}
+
+void CameraDeviceDelegate::OnMojoConnectionError() {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ ResetMojoInterface();
+ SetErrorState(FROM_HERE, "Mojo connection error");
+}
+
+void CameraDeviceDelegate::OnClosed(base::WaitableEvent* closed,
+ int32_t result) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ ResetMojoInterface();
+ client_.reset();
+ SetState(kStopped);
+ closed->Signal();
+}
+
+void CameraDeviceDelegate::Initialize() {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ DCHECK(state_ == kStarting);
+
+ device_ops_->Initialize(
+ callback_ops_.CreateInterfacePtrAndBind(),
+ base::Bind(&CameraDeviceDelegate::OnInitialized, this));
+ callback_ops_.set_connection_error_handler(
+ base::Bind(&CameraDeviceDelegate::OnMojoConnectionError, this));
+}
+
+void CameraDeviceDelegate::OnInitialized(int32_t result) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ DCHECK(state_ == kStarting || state_ == kStopping);
+
+ if (state_ == kStopping) {
+ return;
+ }
+ if (result) {
+ SetErrorState(FROM_HERE, "Failed to initialize camera device");
+ return;
+ }
+ SetState(kInitialized);
+ ConfigureStreams();
+}
+
+void CameraDeviceDelegate::ConfigureStreams() {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ DCHECK(state_ == kInitialized || state_ == kStopping);
+
+ arc::mojom::Camera3StreamConfigurationPtr stream_config =
+ arc::mojom::Camera3StreamConfiguration::New();
+ stream_config->num_streams = streams_.size();
+ for (const auto& context : streams_) {
+ stream_config->streams.push_back(context.second.stream.Clone());
+ }
+ stream_config->operation_mode = arc::mojom::Camera3StreamConfigurationMode::
+ CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE;
+ device_ops_->ConfigureStreams(
+ std::move(stream_config),
+ base::Bind(&CameraDeviceDelegate::OnConfiguredStreams, this));
+}
+
+void CameraDeviceDelegate::OnConfiguredStreams(
+ arc::mojom::Camera3StreamConfigurationPtr updated_config) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ DCHECK(state_ == kInitialized || state_ == kStopping);
+
+ if (state_ == kStopping) {
+ return;
+ }
+ for (size_t i = 0; i < updated_config->num_streams; ++i) {
+ auto& updated_stream = updated_config->streams[i];
+ arc::mojom::Camera3RequestTemplate stream_type =
+ static_cast<arc::mojom::Camera3RequestTemplate>(updated_stream->id);
+ StreamContext* stream_context = GetStreamContext(stream_type);
+ if (!stream_context) {
+ SetErrorState(FROM_HERE, "ConfigureStreams returned invalid stream");
+ continue;
+ }
+ // TODO(jcliang): Determine the best format from metadata.
+ VideoCaptureFormat capture_format = stream_context->params.requested_format;
+ capture_format.pixel_format = PIXEL_FORMAT_RGB32;
+ stream_context->capture_format = capture_format;
+ stream_context->stream->usage = updated_stream->usage;
+ stream_context->stream->max_buffers = updated_stream->max_buffers;
+
+ VLOG(2) << "Stream " << updated_stream->id
+ << " configured: usage=" << updated_stream->usage
+ << " max_buffers=" << updated_stream->max_buffers;
+
+ // Allocate buffers.
+ size_t num_buffers = stream_context->stream->max_buffers;
+ stream_context->buffers.resize(num_buffers);
+ for (size_t j = 0; j < num_buffers; ++j) {
+ const VideoCaptureFormat frame_format(
+ gfx::Size(stream_context->stream->width,
+ stream_context->stream->height),
+ 0.0, stream_context->capture_format.pixel_format);
+ std::unique_ptr<base::SharedMemory> buffer(new base::SharedMemory());
+ base::SharedMemoryCreateOptions options;
+ options.size = frame_format.ImageAllocationSize();
+ options.share_read_only = false;
+ buffer->Create(options);
+ buffer->Map(buffer->requested_size());
+ stream_context->buffers[j] = std::move(buffer);
+ stream_context->free_buffers.push(j);
+ }
+ VLOG(2) << "Allocated " << stream_context->stream->max_buffers
+ << " buffers for stream " << stream_type;
+
+ // TODO(jcliang): Construct default request settings for still capture.
+ ConstructDefaultRequestSettings(
+ arc::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW);
+ }
+
+ client_->OnStarted();
+}
+
+void CameraDeviceDelegate::ConstructDefaultRequestSettings(
+ arc::mojom::Camera3RequestTemplate stream_type) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ DCHECK(GetStreamContext(stream_type));
+
+ device_ops_->ConstructDefaultRequestSettings(
+ stream_type,
+ base::Bind(&CameraDeviceDelegate::OnConstructedDefaultRequestSettings,
+ this, stream_type));
+}
+
+void CameraDeviceDelegate::OnConstructedDefaultRequestSettings(
+ arc::mojom::Camera3RequestTemplate stream_type,
+ arc::mojom::CameraMetadataPtr settings) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+
+ if (state_ == kStopping) {
+ return;
+ }
+ StreamContext* stream_context = GetStreamContext(stream_type);
+ DCHECK(stream_context);
+ stream_context->request_settings = std::move(settings);
+ // TODO(jcliang): Once we have the still capture stream we need to change it
+ // to only SetState when both preview and still capture streams
+ // are configured.
+ SetState(kStreamConfigured);
+ if (stream_type ==
+ arc::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW) {
+ StartCapture(stream_type);
+ }
+}
+
+void CameraDeviceDelegate::StartCapture(
+ arc::mojom::Camera3RequestTemplate stream_type) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ // We may get here when either after the streams are configured, or when we
+ // start still capture while the preview capture is running.
+ DCHECK(state_ == kStreamConfigured || state_ == kCapturing ||
+ state_ == kStopping);
+
+ if (state_ == kStopping) {
+ return;
+ }
+ StreamContext* stream_context = GetStreamContext(stream_type);
+ DCHECK(stream_context);
+ DCHECK(!stream_context->request_settings.is_null());
+ SetState(kCapturing);
+ RegisterBuffer(stream_type);
+}
+
+void CameraDeviceDelegate::RegisterBuffer(
+ arc::mojom::Camera3RequestTemplate stream_type) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ DCHECK(state_ == kCapturing || state_ == kStopping);
+
+ if (state_ == kStopping) {
+ return;
+ }
+ StreamContext* stream_context = GetStreamContext(stream_type);
+ DCHECK(stream_context);
+ if (stream_context->free_buffers.empty()) {
+ return;
+ }
+
+ const VideoCaptureParams& params = stream_context->params;
+ const arc::mojom::Camera3StreamPtr& stream = stream_context->stream;
+ size_t buffer_id = stream_context->free_buffers.front();
+ stream_context->free_buffers.pop();
+ const base::SharedMemory* buffer = stream_context->buffers[buffer_id].get();
+
+ VideoPixelFormat buffer_format = stream_context->capture_format.pixel_format;
+ uint32_t drm_format = PixFormatChromiumToDrm(buffer_format);
+ if (!drm_format) {
+ SetErrorState(FROM_HERE, "Unsupported video pixel format");
+ return;
+ }
+ arc::mojom::HalPixelFormat hal_pixel_format = stream->format;
+
+ size_t num_planes = VideoFrame::NumPlanes(buffer_format);
+ std::vector<mojo::ScopedHandle> fds(num_planes);
+ std::vector<uint32_t> strides(num_planes);
+ std::vector<uint32_t> offsets(num_planes);
+ for (size_t i = 0; i < num_planes; ++i) {
+ base::SharedMemoryHandle shm_handle = buffer->handle();
+ // Wrap the platform handle.
+ MojoHandle wrapped_handle;
+ MojoResult result = mojo::edk::CreatePlatformHandleWrapper(
+ mojo::edk::ScopedPlatformHandle(mojo::edk::PlatformHandle(
+ base::SharedMemory::DuplicateHandle(shm_handle).fd)),
+ &wrapped_handle);
+ if (result != MOJO_RESULT_OK) {
+ SetErrorState(FROM_HERE, "Failed to wrap shared memory handle");
+ return;
+ }
+ fds[i].reset(mojo::Handle(wrapped_handle));
+ strides[i] = VideoFrame::RowBytes(i, buffer_format, stream->width);
+ if (!i) {
+ offsets[i] = 0;
+ } else {
+ offsets[i] = offsets[i - 1] +
+ VideoFrame::PlaneSize(buffer_format, i,
+ params.requested_format.frame_size)
+ .GetArea();
+ }
+ }
+ device_ops_->RegisterBuffer(
+ buffer_id, arc::mojom::Camera3DeviceOps::BufferType::SHM, std::move(fds),
+ drm_format, hal_pixel_format, stream_context->stream->width,
+ stream_context->stream->height, std::move(strides), std::move(offsets),
+ base::Bind(&CameraDeviceDelegate::OnRegisteredBuffer, this, stream_type,
+ buffer_id));
+ VLOG(2) << "Registered buffer " << buffer_id << " of stream " << stream_type;
+}
+
+void CameraDeviceDelegate::OnRegisteredBuffer(
+ arc::mojom::Camera3RequestTemplate stream_type,
+ size_t buffer_index,
+ int32_t result) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ DCHECK(state_ == kCapturing || state_ == kStopping);
+
+ if (state_ == kStopping) {
+ return;
+ }
+ if (result) {
+ SetErrorState(FROM_HERE, "Failed to register buffer");
+ return;
+ }
+ ProcessCaptureRequest(stream_type, buffer_index);
+}
+
+void CameraDeviceDelegate::ProcessCaptureRequest(
+ arc::mojom::Camera3RequestTemplate stream_type,
+ size_t buffer_index) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ DCHECK(state_ == kCapturing || state_ == kStopping);
+
+ StreamContext* stream_context = GetStreamContext(stream_type);
+ DCHECK(stream_context);
+
+ arc::mojom::Camera3StreamBufferPtr buffer =
+ arc::mojom::Camera3StreamBuffer::New();
+ buffer->stream_id = static_cast<uint64_t>(
+ arc::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW);
+ buffer->buffer_id = buffer_index;
+ buffer->status = arc::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_OK;
+
+ // TODO(jcliang): Also process still capture buffers after we enabled still
+ // capture stream.
+ arc::mojom::Camera3CaptureRequestPtr request =
+ arc::mojom::Camera3CaptureRequest::New();
+ request->frame_number = frame_number_;
+ request->settings = stream_context->request_settings.Clone();
+ request->num_output_buffers = 1;
+ request->output_buffers.push_back(std::move(buffer));
+
+ device_ops_->ProcessCaptureRequest(
+ std::move(request),
+ base::Bind(&CameraDeviceDelegate::OnProcessedCaptureRequest, this,
+ stream_type));
+ VLOG(2) << "Requested capture for frame " << frame_number_ << " with buffer "
+ << buffer_index << " of stream " << stream_type;
+ frame_number_++;
+}
+
+void CameraDeviceDelegate::OnProcessedCaptureRequest(
+ arc::mojom::Camera3RequestTemplate stream_type,
+ int32_t result) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ DCHECK(state_ == kCapturing || state_ == kStopping);
+
+ if (state_ == kStopping) {
+ return;
+ }
+ if (result) {
+ SetErrorState(FROM_HERE, "Process capture request failed");
+ return;
+ }
+ RegisterBuffer(stream_type);
+}
+
+void CameraDeviceDelegate::ProcessCaptureResult(
+ arc::mojom::Camera3CaptureResultPtr result) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+
+ uint32_t frame_number = result->frame_number;
+ CaptureResult& partial_result = partial_results_[frame_number];
+ for (size_t i = 0; i < result->num_output_buffers; ++i) {
kenrb 2017/05/01 23:08:34 This looks like it causes a problem if num_output_
jcliang 2017/05/02 12:57:20 You're right we don't need num_output_buffers here
+ arc::mojom::Camera3StreamBufferPtr& stream_buffer =
+ result->output_buffers.value()[i];
+ arc::mojom::Camera3RequestTemplate stream_type =
+ static_cast<arc::mojom::Camera3RequestTemplate>(
+ stream_buffer->stream_id);
+ // The camera HAL v3 API specifies that only one capture result can carry
+ // the result buffer for any given frame number.
+ if (partial_result.buffers.find(stream_type) !=
+ partial_result.buffers.end()) {
+ client_->OnLog(
+ std::string("Received multiple result buffers for frame ") +
+ std::to_string(frame_number));
+ continue;
+ }
+ if (stream_buffer->status ==
+ arc::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_ERROR) {
+ // TODO(jcliang): Discard buffer and continue maybe?
+ SetErrorState(FROM_HERE, "HAL encountered error while filling buffer");
+ return;
+ }
+ partial_results_[frame_number].buffers[stream_type] =
+ std::move(stream_buffer);
+ }
+
+ // |result->partial_result| is set to 0 if the capture result contains only
+ // the result buffer handles and no result metadata.
+ if (result->partial_result) {
+ partial_results_[frame_number].partial_stage = result->partial_result;
+ MergeMetadata(&partial_results_[frame_number].metadata, result->result);
+ }
+
+ if (partial_result.partial_stage == partial_result_count_) {
+ // This is the last capture results for the requests of this frame number.
+ auto it = partial_results_.find(frame_number);
+ // We can only submit the result buffer after we receive the shutter time.
+ if (it->second.reference_time != base::TimeTicks()) {
+ SubmitCaptureResult(frame_number);
+ }
+ }
+}
+
+void CameraDeviceDelegate::Notify(arc::mojom::Camera3NotifyMsgPtr message) {
+ // TODO(jcliang): unit tests.
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+
+ if (message->type == arc::mojom::Camera3MsgType::CAMERA3_MSG_ERROR) {
+ // TODO(jcliang): Handle error notify.
+ } else { // arc::mojom::Camera3MsgType::CAMERA3_MSG_SHUTTER
+ uint32_t frame_number = message->message->get_shutter()->frame_number;
+ uint64_t shutter_time = message->message->get_shutter()->timestamp;
+ CaptureResult& partial_result = partial_results_[frame_number];
+ // Shutter timestamp is in ns.
+ base::TimeTicks reference_time =
+ base::TimeTicks::FromInternalValue(shutter_time / 1000);
+ partial_result.reference_time = reference_time;
+ if (!frame_number) {
+ // Record the shutter time of the first frame for calculating the
+ // timestamp.
+ first_frame_shutter_time_ = reference_time;
+ partial_result.timestamp = base::TimeDelta::FromMicroseconds(0);
+ } else {
+ partial_result.timestamp = reference_time - first_frame_shutter_time_;
+ }
+ if (partial_result.partial_stage == partial_result_count_) {
+ SubmitCaptureResult(frame_number);
+ }
+ }
+}
+
+void CameraDeviceDelegate::SubmitCaptureResult(uint32_t frame_number) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+
+ if (partial_results_.begin()->first != frame_number) {
+ SetErrorState(FROM_HERE, "Received out-of-order frames from HAL");
+ return;
+ }
+
+ CaptureResult& partial_result = partial_results_[frame_number];
+ DCHECK_EQ(partial_result.partial_stage, partial_result_count_);
+ for (const auto& it : partial_result.buffers) {
+ arc::mojom::Camera3RequestTemplate stream_type = it.first;
+ StreamContext* stream_context = GetStreamContext(stream_type);
+ const arc::mojom::Camera3StreamBufferPtr& buffer = it.second;
+ uint32_t buffer_id = buffer->buffer_id;
+
+ // Wait on release fence before delivering the result buffer to client.
+ if (buffer->release_fence.is_valid()) {
+ const int kSyncWaitTimeoutMs = 1000;
+ mojo::edk::ScopedPlatformHandle fence;
+ MojoResult result = mojo::edk::PassWrappedPlatformHandle(
+ buffer->release_fence.release().value(), &fence);
+ if (result != MOJO_RESULT_OK) {
+ SetErrorState(FROM_HERE, "Failed to unwrap release fence fd");
+ return;
+ }
+ if (!sync_wait(fence.get().handle, kSyncWaitTimeoutMs)) {
+ SetErrorState(FROM_HERE, "Sync wait on release fence timed out");
+ return;
+ }
+ }
+
+ if (stream_type ==
+ arc::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW) {
+ // Deliver the captured data to client and then re-queue the buffer.
+ const base::SharedMemory* buffer =
+ stream_context->buffers[buffer_id].get();
+ client_->OnIncomingCapturedData(
+ reinterpret_cast<uint8_t*>(buffer->memory()), buffer->mapped_size(),
+ stream_context->capture_format, rotation_,
+ partial_result.reference_time, partial_result.timestamp);
+ stream_context->free_buffers.push(buffer_id);
+ ipc_task_runner_->PostTask(
+ FROM_HERE,
+ base::Bind(
+ &CameraDeviceDelegate::RegisterBuffer, this,
+ arc::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW));
+ }
+ // TODO(jcliang): Handle still capture result for TakePhoto.
+ }
+ partial_results_.erase(frame_number);
+}
+
+} // namespace media

Powered by Google App Engine
This is Rietveld 408576698