Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(367)

Side by Side Diff: media/capture/video/android/video_capture_device_android.cc

Issue 2983473002: Android Tango depth camera capture support.
Patch Set: Created 3 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/capture/video/android/video_capture_device_android.h" 5 #include "media/capture/video/android/video_capture_device_android.h"
6 6
7 #include <stdint.h> 7 #include <stdint.h>
8 #include <utility> 8 #include <utility>
9 9
10 #include "base/android/jni_android.h" 10 #include "base/android/jni_android.h"
(...skipping 244 matching lines...) Expand 10 before | Expand all | Expand 10 after
255 } 255 }
256 DoSetPhotoOptions(std::move(settings), std::move(callback)); 256 DoSetPhotoOptions(std::move(settings), std::move(callback));
257 } 257 }
258 258
259 void VideoCaptureDeviceAndroid::OnFrameAvailable( 259 void VideoCaptureDeviceAndroid::OnFrameAvailable(
260 JNIEnv* env, 260 JNIEnv* env,
261 const JavaParamRef<jobject>& obj, 261 const JavaParamRef<jobject>& obj,
262 const JavaParamRef<jbyteArray>& data, 262 const JavaParamRef<jbyteArray>& data,
263 jint length, 263 jint length,
264 jint rotation) { 264 jint rotation) {
265 { 265 if (!IsClientConfiguredForIncomingData())
266 base::AutoLock lock(lock_); 266 return;
267 if (state_ != kConfigured || !client_)
268 return;
269 }
270
271 jbyte* buffer = env->GetByteArrayElements(data, NULL); 267 jbyte* buffer = env->GetByteArrayElements(data, NULL);
272 if (!buffer) { 268 if (!buffer) {
273 LOG(ERROR) << "VideoCaptureDeviceAndroid::OnFrameAvailable: " 269 LOG(ERROR) << "VideoCaptureDeviceAndroid::OnFrameAvailable: "
274 "failed to GetByteArrayElements"; 270 "failed to GetByteArrayElements";
275 return; 271 return;
276 } 272 }
277 273
278 const base::TimeTicks current_time = base::TimeTicks::Now(); 274 const base::TimeTicks current_time = base::TimeTicks::Now();
279 { 275 ProcessFirstFrameAvailable(current_time);
280 base::AutoLock lock(lock_);
281 if (!got_first_frame_) {
282 // Set aside one frame allowance for fluctuation.
283 expected_next_frame_time_ = current_time - frame_interval_;
284 got_first_frame_ = true;
285
286 for (const auto& request : photo_requests_queue_)
287 main_task_runner_->PostTask(FROM_HERE, request);
288 photo_requests_queue_.clear();
289 }
290 }
291 276
292 // Deliver the frame when it doesn't arrive too early. 277 // Deliver the frame when it doesn't arrive too early.
293 if (expected_next_frame_time_ <= current_time) { 278 if (expected_next_frame_time_ <= current_time) {
294 // Using |expected_next_frame_time_| to estimate a proper capture timestamp 279 // Using |expected_next_frame_time_| to estimate a proper capture timestamp
295 // since android.hardware.Camera API doesn't expose a better timestamp. 280 // since android.hardware.Camera API doesn't expose a better timestamp.
296 const base::TimeDelta capture_time = 281 const base::TimeDelta capture_time =
297 expected_next_frame_time_ - base::TimeTicks(); 282 expected_next_frame_time_ - base::TimeTicks();
298 283
299 expected_next_frame_time_ += frame_interval_; 284 expected_next_frame_time_ += frame_interval_;
300 285
301 // TODO(qiangchen): Investigate how to get raw timestamp for Android, 286 // TODO(qiangchen): Investigate how to get raw timestamp for Android,
302 // rather than using reference time to calculate timestamp. 287 // rather than using reference time to calculate timestamp.
303 base::AutoLock lock(lock_); 288 SendIncomingDataToClient(reinterpret_cast<uint8_t*>(buffer), length,
304 if (!client_) 289 rotation, current_time, capture_time);
305 return;
306 client_->OnIncomingCapturedData(reinterpret_cast<uint8_t*>(buffer), length,
307 capture_format_, rotation, current_time,
308 capture_time);
309 } 290 }
310 291
311 env->ReleaseByteArrayElements(data, buffer, JNI_ABORT); 292 env->ReleaseByteArrayElements(data, buffer, JNI_ABORT);
312 } 293 }
313 294
314 void VideoCaptureDeviceAndroid::OnI420FrameAvailable(JNIEnv* env, 295 void VideoCaptureDeviceAndroid::OnI420FrameAvailable(JNIEnv* env,
315 jobject obj, 296 jobject obj,
316 jobject y_buffer, 297 jobject y_buffer,
317 jint y_stride, 298 jint y_stride,
318 jobject u_buffer, 299 jobject u_buffer,
319 jobject v_buffer, 300 jobject v_buffer,
320 jint uv_row_stride, 301 jint uv_row_stride,
321 jint uv_pixel_stride, 302 jint uv_pixel_stride,
322 jint width, 303 jint width,
323 jint height, 304 jint height,
324 jint rotation, 305 jint rotation,
325 jlong timestamp) { 306 jlong timestamp) {
326 { 307 if (!IsClientConfiguredForIncomingData())
327 base::AutoLock lock(lock_); 308 return;
328 if (state_ != kConfigured || !client_)
329 return;
330 }
331 const int64_t absolute_micro = 309 const int64_t absolute_micro =
332 timestamp / base::Time::kNanosecondsPerMicrosecond; 310 timestamp / base::Time::kNanosecondsPerMicrosecond;
333 const base::TimeDelta capture_time = 311 const base::TimeDelta capture_time =
334 base::TimeDelta::FromMicroseconds(absolute_micro); 312 base::TimeDelta::FromMicroseconds(absolute_micro);
335 313
336 const base::TimeTicks current_time = base::TimeTicks::Now(); 314 const base::TimeTicks current_time = base::TimeTicks::Now();
337 { 315 ProcessFirstFrameAvailable(current_time);
338 base::AutoLock lock(lock_);
339 if (!got_first_frame_) {
340 // Set aside one frame allowance for fluctuation.
341 expected_next_frame_time_ = current_time - frame_interval_;
342 got_first_frame_ = true;
343
344 for (const auto& request : photo_requests_queue_)
345 main_task_runner_->PostTask(FROM_HERE, request);
346 photo_requests_queue_.clear();
347 }
348 }
349 316
350 uint8_t* const y_src = 317 uint8_t* const y_src =
351 reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(y_buffer)); 318 reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(y_buffer));
352 CHECK(y_src); 319 CHECK(y_src);
353 uint8_t* const u_src = 320 uint8_t* const u_src =
354 reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(u_buffer)); 321 reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(u_buffer));
355 CHECK(u_src); 322 CHECK(u_src);
356 uint8_t* const v_src = 323 uint8_t* const v_src =
357 reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(v_buffer)); 324 reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(v_buffer));
358 CHECK(v_src); 325 CHECK(v_src);
359 326
360 const int y_plane_length = width * height; 327 const int y_plane_length = width * height;
361 const int uv_plane_length = y_plane_length / 4; 328 const int uv_plane_length = y_plane_length / 4;
362 const int buffer_length = y_plane_length + uv_plane_length * 2; 329 const int buffer_length = y_plane_length + uv_plane_length * 2;
363 std::unique_ptr<uint8_t> buffer(new uint8_t[buffer_length]); 330 std::unique_ptr<uint8_t> buffer(new uint8_t[buffer_length]);
364 331
365 libyuv::Android420ToI420(y_src, y_stride, u_src, uv_row_stride, v_src, 332 libyuv::Android420ToI420(y_src, y_stride, u_src, uv_row_stride, v_src,
366 uv_row_stride, uv_pixel_stride, buffer.get(), width, 333 uv_row_stride, uv_pixel_stride, buffer.get(), width,
367 buffer.get() + y_plane_length, width / 2, 334 buffer.get() + y_plane_length, width / 2,
368 buffer.get() + y_plane_length + uv_plane_length, 335 buffer.get() + y_plane_length + uv_plane_length,
369 width / 2, width, height); 336 width / 2, width, height);
370 337
371 // Deliver the frame when it doesn't arrive too early. 338 // Deliver the frame when it doesn't arrive too early.
372 if (expected_next_frame_time_ <= current_time) { 339 if (AdvanceToNextFrameTime(current_time)) {
373 expected_next_frame_time_ += frame_interval_; 340 SendIncomingDataToClient(buffer.get(), buffer_length, rotation,
341 current_time, capture_time);
342 }
343 }
374 344
375 // TODO(qiangchen): Investigate how to get raw timestamp for Android, 345 void VideoCaptureDeviceAndroid::OnPointCloudAvailable(JNIEnv* env,
376 // rather than using reference time to calculate timestamp. 346 jobject obj,
377 base::AutoLock lock(lock_); 347 jobject points_buffer,
378 if (!client_) 348 jint num_points,
379 return; 349 jdouble timestamp) {
380 client_->OnIncomingCapturedData(buffer.get(), buffer_length, 350 NOTREACHED() << "Only Tango superclasses implement this.";
381 capture_format_, rotation, current_time,
382 capture_time);
383 }
384 } 351 }
385 352
386 void VideoCaptureDeviceAndroid::OnError(JNIEnv* env, 353 void VideoCaptureDeviceAndroid::OnError(JNIEnv* env,
387 const JavaParamRef<jobject>& obj, 354 const JavaParamRef<jobject>& obj,
388 const JavaParamRef<jstring>& message) { 355 const JavaParamRef<jstring>& message) {
389 SetErrorState(FROM_HERE, 356 SetErrorState(FROM_HERE,
390 base::android::ConvertJavaStringToUTF8(env, message)); 357 base::android::ConvertJavaStringToUTF8(env, message));
391 } 358 }
392 359
393 void VideoCaptureDeviceAndroid::OnPhotoTaken( 360 void VideoCaptureDeviceAndroid::OnPhotoTaken(
(...skipping 29 matching lines...) Expand all
423 void VideoCaptureDeviceAndroid::OnStarted(JNIEnv* env, 390 void VideoCaptureDeviceAndroid::OnStarted(JNIEnv* env,
424 const JavaParamRef<jobject>& obj) { 391 const JavaParamRef<jobject>& obj) {
425 if (client_) 392 if (client_)
426 client_->OnStarted(); 393 client_->OnStarted();
427 } 394 }
428 395
429 void VideoCaptureDeviceAndroid::ConfigureForTesting() { 396 void VideoCaptureDeviceAndroid::ConfigureForTesting() {
430 Java_VideoCapture_setTestMode(AttachCurrentThread(), j_capture_); 397 Java_VideoCapture_setTestMode(AttachCurrentThread(), j_capture_);
431 } 398 }
432 399
400 void VideoCaptureDeviceAndroid::ProcessFirstFrameAvailable(
401 base::TimeTicks current_time) {
402 base::AutoLock lock(lock_);
403 if (!got_first_frame_) {
mcasas 2017/07/15 00:12:36 if (got_first_frame) return;
404 // Set aside one frame allowance for fluctuation.
405 expected_next_frame_time_ = current_time - frame_interval_;
406 got_first_frame_ = true;
407
408 for (const auto& request : photo_requests_queue_)
409 main_task_runner_->PostTask(FROM_HERE, request);
410 photo_requests_queue_.clear();
411 }
412 }
413
414 bool VideoCaptureDeviceAndroid::IsClientConfiguredForIncomingData() {
415 base::AutoLock lock(lock_);
416 return (state_ == kConfigured && client_);
417 }
418
419 bool VideoCaptureDeviceAndroid::AdvanceToNextFrameTime(
420 base::TimeTicks current_time) {
421 if (expected_next_frame_time_ > current_time)
422 return false;
423 expected_next_frame_time_ += frame_interval_;
424 return true;
425 }
426
427 void VideoCaptureDeviceAndroid::SendIncomingDataToClient(
428 const uint8_t* data,
429 int length,
430 int rotation,
431 base::TimeTicks reference_time,
432 base::TimeDelta timestamp) {
433 base::AutoLock lock(lock_);
434 if (!client_)
435 return;
436 client_->OnIncomingCapturedData(data, length, capture_format_, rotation,
437 reference_time, timestamp);
438 }
439
433 VideoPixelFormat VideoCaptureDeviceAndroid::GetColorspace() { 440 VideoPixelFormat VideoCaptureDeviceAndroid::GetColorspace() {
434 JNIEnv* env = AttachCurrentThread(); 441 JNIEnv* env = AttachCurrentThread();
435 const int current_capture_colorspace = 442 const int current_capture_colorspace =
436 Java_VideoCapture_getColorspace(env, j_capture_); 443 Java_VideoCapture_getColorspace(env, j_capture_);
437 switch (current_capture_colorspace) { 444 switch (current_capture_colorspace) {
438 case ANDROID_IMAGE_FORMAT_YV12: 445 case ANDROID_IMAGE_FORMAT_YV12:
439 return media::PIXEL_FORMAT_YV12; 446 return media::PIXEL_FORMAT_YV12;
440 case ANDROID_IMAGE_FORMAT_YUV_420_888: 447 case ANDROID_IMAGE_FORMAT_YUV_420_888:
441 return media::PIXEL_FORMAT_I420; 448 return media::PIXEL_FORMAT_I420;
442 case ANDROID_IMAGE_FORMAT_NV21: 449 case ANDROID_IMAGE_FORMAT_NV21:
443 return media::PIXEL_FORMAT_NV21; 450 return media::PIXEL_FORMAT_NV21;
451 case ANDROID_IMAGE_FORMAT_DEPTH16:
452 return media::PIXEL_FORMAT_Y16;
444 case ANDROID_IMAGE_FORMAT_UNKNOWN: 453 case ANDROID_IMAGE_FORMAT_UNKNOWN:
445 default: 454 default:
446 return media::PIXEL_FORMAT_UNKNOWN; 455 return media::PIXEL_FORMAT_UNKNOWN;
447 } 456 }
448 } 457 }
449 458
450 void VideoCaptureDeviceAndroid::SetErrorState( 459 void VideoCaptureDeviceAndroid::SetErrorState(
451 const tracked_objects::Location& from_here, 460 const tracked_objects::Location& from_here,
452 const std::string& reason) { 461 const std::string& reason) {
453 { 462 {
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after
644 settings->has_exposure_compensation, exposure_compensation, 653 settings->has_exposure_compensation, exposure_compensation,
645 static_cast<int>(white_balance_mode), iso, 654 static_cast<int>(white_balance_mode), iso,
646 settings->has_red_eye_reduction, settings->red_eye_reduction, 655 settings->has_red_eye_reduction, settings->red_eye_reduction,
647 static_cast<int>(fill_light_mode), settings->has_torch, settings->torch, 656 static_cast<int>(fill_light_mode), settings->has_torch, settings->torch,
648 color_temperature); 657 color_temperature);
649 658
650 callback.Run(true); 659 callback.Run(true);
651 } 660 }
652 661
653 } // namespace media 662 } // namespace media
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698