OLD | NEW |
---|---|
(Empty) | |
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 package org.chromium.media; | |
6 | |
7 import android.content.Context; | |
8 import android.graphics.ImageFormat; | |
9 import android.graphics.SurfaceTexture; | |
10 import android.graphics.SurfaceTexture.OnFrameAvailableListener; | |
11 import android.hardware.Camera; | |
12 import android.hardware.Camera.PreviewCallback; | |
13 import android.opengl.GLES20; | |
14 import android.util.Log; | |
15 import android.view.Surface; | |
16 import android.view.WindowManager; | |
17 | |
18 import java.io.IOException; | |
19 import java.lang.RuntimeException; | |
20 import java.util.concurrent.locks.ReentrantLock; | |
21 import java.util.Iterator; | |
22 import java.util.List; | |
23 | |
24 import org.chromium.base.CalledByNative; | |
25 import org.chromium.base.JNINamespace; | |
26 | |
27 @JNINamespace("media") | |
28 public class VideoCapture implements PreviewCallback, OnFrameAvailableListener { | |
29 static class CaptureCapability { | |
30 public int mWidth = 0; | |
31 public int mHeight = 0; | |
32 public int mDesiredFps = 0; | |
33 } | |
34 | |
35 private Camera mCamera; | |
36 public ReentrantLock mPreviewBufferLock = new ReentrantLock(); | |
37 private int mPixelFormat = ImageFormat.YV12; | |
38 private Context mContext = null; | |
39 // True when native code has started capture. | |
40 private boolean mIsRunning = false; | |
41 | |
42 private static final int NUM_CAPTURE_BUFFERS = 3; | |
43 private int mExpectedFrameSize = 0; | |
44 private int mId = 0; | |
45 // Native callback context variable. | |
46 private int mNativeVideoCaptureDeviceAndroid = 0; | |
47 private int[] mGlTextures = null; | |
48 private SurfaceTexture mSurfaceTexture = null; | |
49 private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65; | |
50 | |
51 private int mCameraOrientation = 0; | |
52 private int mCameraFacing = 0; | |
53 private int mDeviceOrientation = 0; | |
54 | |
55 CaptureCapability mCurrentCapability = null; | |
56 private static final String TAG = "VideoCapture"; | |
57 | |
58 // Returns an instance of VideoCapture. | |
59 @CalledByNative | |
60 public static VideoCapture createVideoCapture(Context context, int id, | |
61 int nativeVideoCaptureDeviceAndroid) { | |
62 return new VideoCapture(context, id, nativeVideoCaptureDeviceAndroid); | |
63 } | |
64 | |
65 public VideoCapture(Context context, int id, | |
66 int nativeVideoCaptureDeviceAndroid) { | |
67 mContext = context; | |
68 mId = id; | |
69 mNativeVideoCaptureDeviceAndroid = nativeVideoCaptureDeviceAndroid; | |
70 } | |
71 | |
72 // Returns 0 on success, -1 otherwise. | |
Ami GONE FROM CHROMIUM
2013/01/30 19:46:23
Return boolean instead, true on success?
wjia(left Chromium)
2013/02/06 00:45:34
Done.
| |
73 @CalledByNative | |
74 public int allocate(int width, int height, int frameRate) { | |
75 Log.d(TAG, "allocate: requested width=" + width + | |
76 ", height=" + height + ", frameRate=" + frameRate); | |
77 try { | |
78 mCamera = Camera.open(mId); | |
79 Camera.CameraInfo camera_info = new Camera.CameraInfo(); | |
80 Camera.getCameraInfo(mId, camera_info); | |
81 mCameraOrientation = camera_info.orientation; | |
82 mCameraFacing = camera_info.facing; | |
83 mDeviceOrientation = getDeviceOrientation(); | |
84 Log.d(TAG, "allocate: device orientation=" + mDeviceOrientation + | |
85 ", camera orientation=" + mCameraOrientation + | |
86 ", facing=" + mCameraFacing); | |
87 | |
88 Camera.Parameters parameters = mCamera.getParameters(); | |
89 | |
90 // Calculate fps. | |
91 List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange(); | |
92 int frameRateInMs = frameRate * 1000; | |
93 boolean fpsIsSupported = false; | |
94 int fpsMin = 0; | |
95 int fpsMax = 0; | |
96 Iterator itFpsRange = listFpsRange.iterator(); | |
97 while (itFpsRange.hasNext()) { | |
98 int[] fpsRange = (int[])itFpsRange.next(); | |
99 if (fpsRange[0] <= frameRateInMs && | |
100 frameRateInMs <= fpsRange[1]) { | |
101 fpsIsSupported = true; | |
102 fpsMin = fpsRange[0]; | |
103 fpsMax = fpsRange[1]; | |
104 break; | |
105 } | |
106 } | |
107 | |
108 if (!fpsIsSupported) { | |
109 Log.e(TAG, "allocate: fps " + frameRate + " is not supported"); | |
110 return -1; | |
111 } | |
112 | |
113 mCurrentCapability = new CaptureCapability(); | |
114 mCurrentCapability.mDesiredFps = frameRate; | |
115 | |
116 // Calculate size. | |
117 List<Camera.Size> listCameraSize = | |
118 parameters.getSupportedPreviewSizes(); | |
119 int minDiff = Integer.MAX_VALUE; | |
120 int matchedWidth = width; | |
121 int matchedHeight = height; | |
122 Iterator itCameraSize = listCameraSize.iterator(); | |
123 while (itCameraSize.hasNext()) { | |
124 Camera.Size size = (Camera.Size)itCameraSize.next(); | |
125 int diff = Math.abs(size.width - width) + | |
126 Math.abs(size.height - height); | |
127 Log.d(TAG, "allocate: support resolution (" + | |
128 size.width + ", " + size.height + "), diff=" + diff); | |
129 if (diff < minDiff) { | |
130 minDiff = diff; | |
131 matchedWidth = size.width; | |
132 matchedHeight = size.height; | |
133 } | |
134 } | |
135 mCurrentCapability.mWidth = matchedWidth; | |
136 mCurrentCapability.mHeight = matchedHeight; | |
137 Log.d(TAG, "allocate: matched width=" + matchedWidth + | |
138 ", height=" + matchedHeight); | |
139 | |
140 parameters.setPreviewSize(matchedWidth, matchedHeight); | |
141 parameters.setPreviewFormat(mPixelFormat); | |
142 parameters.setPreviewFpsRange(fpsMin, fpsMax); | |
143 mCamera.setParameters(parameters); | |
144 | |
145 // Set SurfaceTexture. | |
146 mGlTextures = new int[1]; | |
147 // Generate one texture pointer and bind it as an external texture. | |
148 GLES20.glGenTextures(1, mGlTextures, 0); | |
149 GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mGlTextures[0]); | |
150 // No mip-mapping with camera source. | |
151 GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, | |
152 GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); | |
153 GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, | |
154 GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); | |
155 // Clamp to edge is only option. | |
156 GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, | |
157 GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); | |
158 GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, | |
159 GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); | |
160 | |
161 mSurfaceTexture = new SurfaceTexture(mGlTextures[0]); | |
162 mSurfaceTexture.setOnFrameAvailableListener(null); | |
163 | |
164 mCamera.setPreviewTexture(mSurfaceTexture); | |
165 | |
166 int bufSize = matchedWidth * matchedHeight * | |
167 ImageFormat.getBitsPerPixel(mPixelFormat) / 8; | |
168 for (int i = 0; i < NUM_CAPTURE_BUFFERS; i++) { | |
169 byte[] buffer = new byte[bufSize]; | |
170 mCamera.addCallbackBuffer(buffer); | |
171 } | |
172 mExpectedFrameSize = bufSize; | |
173 } catch (IOException ex) { | |
174 Log.e(TAG, "allocate: IOException"); | |
175 return -1; | |
176 } catch (RuntimeException ex) { | |
177 Log.e(TAG, "allocate: RuntimeException"); | |
178 return -1; | |
179 } | |
180 | |
181 return 0; | |
182 } | |
183 | |
184 @CalledByNative | |
185 public int queryWidth() { | |
186 return mCurrentCapability.mWidth; | |
187 } | |
188 | |
189 @CalledByNative | |
190 public int queryHeight() { | |
191 return mCurrentCapability.mHeight; | |
192 } | |
193 | |
194 @CalledByNative | |
195 public int queryFrameRate() { | |
196 return mCurrentCapability.mDesiredFps; | |
197 } | |
198 | |
199 @CalledByNative | |
200 public int startCapture() { | |
201 if (mCamera == null) { | |
202 Log.e(TAG, "startCapture: camera is null"); | |
203 return -1; | |
204 } | |
205 | |
206 mPreviewBufferLock.lock(); | |
207 try { | |
208 if (mIsRunning) { | |
209 return 0; | |
210 } | |
211 mIsRunning = true; | |
212 } finally { | |
213 mPreviewBufferLock.unlock(); | |
214 } | |
215 mCamera.setPreviewCallbackWithBuffer(this); | |
216 mCamera.startPreview(); | |
217 return 0; | |
218 } | |
219 | |
220 @CalledByNative | |
221 public int stopCapture() { | |
222 if (mCamera == null) { | |
223 Log.d(TAG, "stopCapture: camera is null"); | |
224 return 0; | |
225 } | |
226 | |
227 mPreviewBufferLock.lock(); | |
228 try { | |
229 if (!mIsRunning) { | |
230 return 0; | |
231 } | |
232 mIsRunning = false; | |
233 } finally { | |
234 mPreviewBufferLock.unlock(); | |
235 } | |
236 | |
237 mCamera.stopPreview(); | |
238 mCamera.setPreviewCallbackWithBuffer(null); | |
239 return 0; | |
240 } | |
241 | |
242 @CalledByNative | |
243 public void deallocate() { | |
244 if (mCamera == null) | |
245 return; | |
246 | |
247 stopCapture(); | |
248 try { | |
249 mCamera.setPreviewTexture(null); | |
250 mSurfaceTexture.setOnFrameAvailableListener(null); | |
251 GLES20.glDeleteTextures(1, mGlTextures, 0); | |
252 mCurrentCapability = null; | |
253 mCamera.release(); | |
254 mCamera = null; | |
255 } catch (Exception ex) { | |
256 Log.e(TAG, "deallocate: failed to deallocate camera"); | |
257 return; | |
258 } | |
259 } | |
260 | |
261 @Override | |
262 public void onPreviewFrame(byte[] data, Camera camera) { | |
263 mPreviewBufferLock.lock(); | |
264 try { | |
265 if (!mIsRunning) { | |
266 return; | |
267 } | |
268 if (data.length == mExpectedFrameSize) { | |
269 int rotation = getDeviceOrientation(); | |
270 if (rotation != mDeviceOrientation) { | |
271 mDeviceOrientation = rotation; | |
272 Log.d(TAG, | |
273 "onPreviewFrame: device orientation=" + | |
274 mDeviceOrientation + ", camera orientation=" + | |
275 mCameraOrientation); | |
276 } | |
277 boolean flipVertical = false; | |
278 boolean flipHorizontal = false; | |
279 if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_FRONT) { | |
280 rotation = (mCameraOrientation + rotation) % 360; | |
281 rotation = (360 - rotation) % 360; | |
282 flipHorizontal = (rotation == 180 || rotation == 0); | |
283 flipVertical = !flipHorizontal; | |
284 } else { | |
285 rotation = (mCameraOrientation - rotation + 360) % 360; | |
286 } | |
287 nativeOnFrameAvailable(mNativeVideoCaptureDeviceAndroid, | |
288 data, mExpectedFrameSize, | |
289 rotation, flipVertical, flipHorizontal); | |
290 } | |
291 } finally { | |
292 mPreviewBufferLock.unlock(); | |
293 if (camera != null) { | |
294 camera.addCallbackBuffer(data); | |
295 } | |
296 } | |
297 } | |
298 | |
299 // TODO(wjia): investigate whether reading from texture could give better | |
300 // performance and frame rate. | |
301 @Override | |
302 public void onFrameAvailable(SurfaceTexture surfaceTexture) { } | |
303 | |
304 private native void nativeOnFrameAvailable(int nativeVideoCaptureDeviceAndro id, | |
305 byte[] data, int length, int rotation, | |
306 boolean flipVertical, boolean flipHorizontal); | |
307 | |
308 private int getDeviceOrientation() { | |
309 int orientation = 0; | |
310 if (mContext != null) { | |
311 WindowManager wm = (WindowManager)mContext.getSystemService( | |
312 Context.WINDOW_SERVICE); | |
313 switch(wm.getDefaultDisplay().getRotation()) { | |
314 case Surface.ROTATION_90: | |
315 orientation = 90; | |
316 break; | |
317 case Surface.ROTATION_180: | |
318 orientation = 180; | |
319 break; | |
320 case Surface.ROTATION_270: | |
321 orientation = 270; | |
322 break; | |
323 case Surface.ROTATION_0: | |
324 default: | |
325 orientation = 0; | |
326 break; | |
327 } | |
328 } | |
329 return orientation; | |
330 } | |
331 } | |
OLD | NEW |