Index: trunk/src/media/video/capture/android/video_capture_device_android.cc |
=================================================================== |
--- trunk/src/media/video/capture/android/video_capture_device_android.cc (revision 236934) |
+++ trunk/src/media/video/capture/android/video_capture_device_android.cc (working copy) |
@@ -86,7 +86,11 @@ |
} |
VideoCaptureDeviceAndroid::VideoCaptureDeviceAndroid(const Name& device_name) |
- : state_(kIdle), got_first_frame_(false), device_name_(device_name) {} |
+ : state_(kIdle), |
+ got_first_frame_(false), |
+ device_name_(device_name), |
+ current_settings_() { |
+} |
VideoCaptureDeviceAndroid::~VideoCaptureDeviceAndroid() { |
StopAndDeAllocate(); |
@@ -107,7 +111,7 @@ |
} |
void VideoCaptureDeviceAndroid::AllocateAndStart( |
- const VideoCaptureParams& params, |
+ const VideoCaptureCapability& capture_format, |
scoped_ptr<Client> client) { |
DVLOG(1) << "VideoCaptureDeviceAndroid::AllocateAndStart"; |
{ |
@@ -120,38 +124,40 @@ |
JNIEnv* env = AttachCurrentThread(); |
- jboolean ret = |
- Java_VideoCapture_allocate(env, |
- j_capture_.obj(), |
- params.requested_format.frame_size.width(), |
- params.requested_format.frame_size.height(), |
- params.requested_format.frame_rate); |
+ jboolean ret = Java_VideoCapture_allocate(env, |
+ j_capture_.obj(), |
+ capture_format.width, |
+ capture_format.height, |
+ capture_format.frame_rate); |
if (!ret) { |
SetErrorState("failed to allocate"); |
return; |
} |
// Store current width and height. |
- capture_format_.frame_size.SetSize( |
- Java_VideoCapture_queryWidth(env, j_capture_.obj()), |
- Java_VideoCapture_queryHeight(env, j_capture_.obj())); |
- capture_format_.frame_rate = |
+ current_settings_.width = |
+ Java_VideoCapture_queryWidth(env, j_capture_.obj()); |
+ current_settings_.height = |
+ Java_VideoCapture_queryHeight(env, j_capture_.obj()); |
+ current_settings_.frame_rate = |
Java_VideoCapture_queryFrameRate(env, j_capture_.obj()); |
- capture_format_.pixel_format = GetColorspace(); |
- DCHECK_NE(capture_format_.pixel_format, media::PIXEL_FORMAT_UNKNOWN); |
- CHECK(capture_format_.frame_size.GetArea() > 0); |
- CHECK(!(capture_format_.frame_size.width() % 2)); |
- CHECK(!(capture_format_.frame_size.height() % 2)); |
+ current_settings_.color = GetColorspace(); |
+ DCHECK_NE(current_settings_.color, media::PIXEL_FORMAT_UNKNOWN); |
+ CHECK(current_settings_.width > 0 && !(current_settings_.width % 2)); |
+ CHECK(current_settings_.height > 0 && !(current_settings_.height % 2)); |
- if (capture_format_.frame_rate > 0) { |
+ if (capture_format.frame_rate > 0) { |
frame_interval_ = base::TimeDelta::FromMicroseconds( |
- (base::Time::kMicrosecondsPerSecond + capture_format_.frame_rate - 1) / |
- capture_format_.frame_rate); |
+ (base::Time::kMicrosecondsPerSecond + capture_format.frame_rate - 1) / |
+ capture_format.frame_rate); |
} |
- DVLOG(1) << "VideoCaptureDeviceAndroid::Allocate: queried frame_size=" |
- << capture_format_.frame_size.ToString() |
- << ", frame_rate=" << capture_format_.frame_rate; |
+ DVLOG(1) << "VideoCaptureDeviceAndroid::Allocate: queried width=" |
+ << current_settings_.width |
+ << ", height=" |
+ << current_settings_.height |
+ << ", frame_rate=" |
+ << current_settings_.frame_rate; |
jint result = Java_VideoCapture_startCapture(env, j_capture_.obj()); |
if (result < 0) { |
@@ -228,7 +234,7 @@ |
rotation, |
flip_vert, |
flip_horiz, |
- capture_format_); |
+ current_settings_); |
} |
env->ReleaseByteArrayElements(data, buffer, JNI_ABORT); |