Chromium Code Reviews| Index: media/capture/video/android/video_capture_device_android.cc |
| diff --git a/media/capture/video/android/video_capture_device_android.cc b/media/capture/video/android/video_capture_device_android.cc |
| index 826f8c5b8bbf8e2c4f9c7735a438001cc37dda70..97bacdef41d4da61b04501438c4974d0fa9807a5 100644 |
| --- a/media/capture/video/android/video_capture_device_android.cc |
| +++ b/media/capture/video/android/video_capture_device_android.cc |
| @@ -262,12 +262,8 @@ void VideoCaptureDeviceAndroid::OnFrameAvailable( |
| const JavaParamRef<jbyteArray>& data, |
| jint length, |
| jint rotation) { |
| - { |
| - base::AutoLock lock(lock_); |
| - if (state_ != kConfigured || !client_) |
| - return; |
| - } |
| - |
| + if (!IsClientConfiguredForIncomingData()) |
| + return; |
| jbyte* buffer = env->GetByteArrayElements(data, NULL); |
| if (!buffer) { |
| LOG(ERROR) << "VideoCaptureDeviceAndroid::OnFrameAvailable: " |
| @@ -276,18 +272,7 @@ void VideoCaptureDeviceAndroid::OnFrameAvailable( |
| } |
| const base::TimeTicks current_time = base::TimeTicks::Now(); |
| - { |
| - base::AutoLock lock(lock_); |
| - if (!got_first_frame_) { |
| - // Set aside one frame allowance for fluctuation. |
| - expected_next_frame_time_ = current_time - frame_interval_; |
| - got_first_frame_ = true; |
| - |
| - for (const auto& request : photo_requests_queue_) |
| - main_task_runner_->PostTask(FROM_HERE, request); |
| - photo_requests_queue_.clear(); |
| - } |
| - } |
| + ProcessFirstFrameAvailable(current_time); |
| // Deliver the frame when it doesn't arrive too early. |
| if (expected_next_frame_time_ <= current_time) { |
| @@ -300,12 +285,8 @@ void VideoCaptureDeviceAndroid::OnFrameAvailable( |
| // TODO(qiangchen): Investigate how to get raw timestamp for Android, |
| // rather than using reference time to calculate timestamp. |
| - base::AutoLock lock(lock_); |
| - if (!client_) |
| - return; |
| - client_->OnIncomingCapturedData(reinterpret_cast<uint8_t*>(buffer), length, |
| - capture_format_, rotation, current_time, |
| - capture_time); |
| + SendIncomingDataToClient(reinterpret_cast<uint8_t*>(buffer), length, |
| + rotation, current_time, capture_time); |
| } |
| env->ReleaseByteArrayElements(data, buffer, JNI_ABORT); |
| @@ -323,29 +304,15 @@ void VideoCaptureDeviceAndroid::OnI420FrameAvailable(JNIEnv* env, |
| jint height, |
| jint rotation, |
| jlong timestamp) { |
| - { |
| - base::AutoLock lock(lock_); |
| - if (state_ != kConfigured || !client_) |
| - return; |
| - } |
| + if (!IsClientConfiguredForIncomingData()) |
| + return; |
| const int64_t absolute_micro = |
| timestamp / base::Time::kNanosecondsPerMicrosecond; |
| const base::TimeDelta capture_time = |
| base::TimeDelta::FromMicroseconds(absolute_micro); |
| const base::TimeTicks current_time = base::TimeTicks::Now(); |
| - { |
| - base::AutoLock lock(lock_); |
| - if (!got_first_frame_) { |
| - // Set aside one frame allowance for fluctuation. |
| - expected_next_frame_time_ = current_time - frame_interval_; |
| - got_first_frame_ = true; |
| - |
| - for (const auto& request : photo_requests_queue_) |
| - main_task_runner_->PostTask(FROM_HERE, request); |
| - photo_requests_queue_.clear(); |
| - } |
| - } |
| + ProcessFirstFrameAvailable(current_time); |
| uint8_t* const y_src = |
| reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(y_buffer)); |
| @@ -369,20 +336,20 @@ void VideoCaptureDeviceAndroid::OnI420FrameAvailable(JNIEnv* env, |
| width / 2, width, height); |
| // Deliver the frame when it doesn't arrive too early. |
| - if (expected_next_frame_time_ <= current_time) { |
| - expected_next_frame_time_ += frame_interval_; |
| - |
| - // TODO(qiangchen): Investigate how to get raw timestamp for Android, |
| - // rather than using reference time to calculate timestamp. |
| - base::AutoLock lock(lock_); |
| - if (!client_) |
| - return; |
| - client_->OnIncomingCapturedData(buffer.get(), buffer_length, |
| - capture_format_, rotation, current_time, |
| - capture_time); |
| + if (AdvanceToNextFrameTime(current_time)) { |
| + SendIncomingDataToClient(buffer.get(), buffer_length, rotation, |
| + current_time, capture_time); |
| } |
| } |
| +void VideoCaptureDeviceAndroid::OnPointCloudAvailable(JNIEnv* env, |
| + jobject obj, |
| + jobject points_buffer, |
| + jint num_points, |
| + jdouble timestamp) { |
| + NOTREACHED() << "Only Tango superclasses implement this."; |
| +} |
| + |
| void VideoCaptureDeviceAndroid::OnError(JNIEnv* env, |
| const JavaParamRef<jobject>& obj, |
| const JavaParamRef<jstring>& message) { |
| @@ -430,6 +397,46 @@ void VideoCaptureDeviceAndroid::ConfigureForTesting() { |
| Java_VideoCapture_setTestMode(AttachCurrentThread(), j_capture_); |
| } |
| +void VideoCaptureDeviceAndroid::ProcessFirstFrameAvailable( |
| + base::TimeTicks current_time) { |
| + base::AutoLock lock(lock_); |
| + if (!got_first_frame_) { |
|
mcasas
2017/07/15 00:12:36
if (got_first_frame)
return;
|
| + // Set aside one frame allowance for fluctuation. |
| + expected_next_frame_time_ = current_time - frame_interval_; |
| + got_first_frame_ = true; |
| + |
| + for (const auto& request : photo_requests_queue_) |
| + main_task_runner_->PostTask(FROM_HERE, request); |
| + photo_requests_queue_.clear(); |
| + } |
| +} |
| + |
| +bool VideoCaptureDeviceAndroid::IsClientConfiguredForIncomingData() { |
| + base::AutoLock lock(lock_); |
| + return (state_ == kConfigured && client_); |
| +} |
| + |
| +bool VideoCaptureDeviceAndroid::AdvanceToNextFrameTime( |
| + base::TimeTicks current_time) { |
| + if (expected_next_frame_time_ > current_time) |
| + return false; |
| + expected_next_frame_time_ += frame_interval_; |
| + return true; |
| +} |
| + |
| +void VideoCaptureDeviceAndroid::SendIncomingDataToClient( |
| + const uint8_t* data, |
| + int length, |
| + int rotation, |
| + base::TimeTicks reference_time, |
| + base::TimeDelta timestamp) { |
| + base::AutoLock lock(lock_); |
| + if (!client_) |
| + return; |
| + client_->OnIncomingCapturedData(data, length, capture_format_, rotation, |
| + reference_time, timestamp); |
| +} |
| + |
| VideoPixelFormat VideoCaptureDeviceAndroid::GetColorspace() { |
| JNIEnv* env = AttachCurrentThread(); |
| const int current_capture_colorspace = |
| @@ -441,6 +448,8 @@ VideoPixelFormat VideoCaptureDeviceAndroid::GetColorspace() { |
| return media::PIXEL_FORMAT_I420; |
| case ANDROID_IMAGE_FORMAT_NV21: |
| return media::PIXEL_FORMAT_NV21; |
| + case ANDROID_IMAGE_FORMAT_DEPTH16: |
| + return media::PIXEL_FORMAT_Y16; |
| case ANDROID_IMAGE_FORMAT_UNKNOWN: |
| default: |
| return media::PIXEL_FORMAT_UNKNOWN; |