Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(170)

Side by Side Diff: ios/chrome/browser/ui/qr_scanner/camera_controller.mm

Issue 2589803002: Upstream Chrome on iOS source code [6/11]. (Closed)
Patch Set: Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2016 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #import "ios/chrome/browser/ui/qr_scanner/camera_controller.h"
6
7 #include "base/ios/weak_nsobject.h"
8 #include "base/logging.h"
9 #include "base/mac/foundation_util.h"
10 #include "base/mac/scoped_nsobject.h"
11 #include "base/strings/stringprintf.h"
12 #include "ios/chrome/common/ios_app_bundle_id_prefix.h"
13
14 @interface CameraController ()<AVCaptureMetadataOutputObjectsDelegate> {
15 // The capture session for recording video and detecting QR codes.
16 base::scoped_nsobject<AVCaptureSession> _captureSession;
17 // The metadata output attached to the capture session.
18 base::scoped_nsobject<AVCaptureMetadataOutput> _metadataOutput;
19 // The delegate which receives the scanned result. All methods of this
20 // delegate should be called on the main queue.
21 base::WeakNSProtocol<id<CameraControllerDelegate>> _delegate;
22 // The queue for dispatching calls to |_captureSession|.
23 dispatch_queue_t _sessionQueue;
24 }
25
26 // The current state of the camera. The state is set to CAMERA_NOT_LOADED before
27 // the camera is first loaded, and afterwards it is never CAMERA_NOT_LOADED.
28 @property(nonatomic, readwrite, assign) qr_scanner::CameraState cameraState;
29 // Redeclaration of |torchActive| to make the setter private.
30 @property(nonatomic, readwrite, assign, getter=isTorchActive) BOOL torchActive;
31 // The current availability of the torch.
32 @property(nonatomic, readwrite, assign, getter=isTorchAvailable)
33 BOOL torchAvailable;
34
35 // YES if |cameraState| is CAMERA_AVAILABLE.
36 - (BOOL)isCameraAvailable;
37 // Starts receiving notfications about changes to the capture session and to the
38 // torch properties.
39 - (void)startReceivingNotifications;
40 // Stops receiving all notifications.
41 - (void)stopReceivingNotifications;
42 // Returns the camera attached to |_captureSession|.
43 - (AVCaptureDevice*)getCamera;
44 // Returns the AVCaptureVideoOrientation to compensate for the current
45 // UIInterfaceOrientation. Defaults to AVCaptureVideoOrientationPortrait.
46 - (AVCaptureVideoOrientation)videoOrientationForCurrentInterfaceOrientation;
47
48 @end
49
50 @implementation CameraController {
51 qr_scanner::CameraState _cameraState;
52 BOOL _torchActive;
53 BOOL _torchAvailable;
54 CGRect _viewportRect;
55 }
56
57 #pragma mark lifecycle
58
59 - (instancetype)initWithDelegate:(id<CameraControllerDelegate>)delegate {
60 self = [super init];
61 if (self) {
62 DCHECK(delegate);
63 _cameraState = qr_scanner::CAMERA_NOT_LOADED;
64 _delegate.reset(delegate);
65 std::string queueName =
66 base::StringPrintf("%s.chrome.ios.QRScannerCaptureSessionQueue",
67 BUILDFLAG(IOS_APP_BUNDLE_ID_PREFIX));
68 _sessionQueue =
69 dispatch_queue_create(queueName.c_str(), DISPATCH_QUEUE_SERIAL);
70 _torchAvailable = NO;
71 _torchActive = NO;
72 _viewportRect = CGRectNull;
73 }
74 return self;
75 }
76
77 - (void)dealloc {
78 [self stopReceivingNotifications];
79 dispatch_release(_sessionQueue);
80 [super dealloc];
81 }
82
83 #pragma mark public methods
84
85 - (AVAuthorizationStatus)getAuthorizationStatus {
86 return [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
87 }
88
89 - (void)requestAuthorizationAndLoadCaptureSession:
90 (AVCaptureVideoPreviewLayer*)previewLayer {
91 DCHECK(previewLayer);
92 DCHECK([self getAuthorizationStatus] == AVAuthorizationStatusNotDetermined);
93 [AVCaptureDevice
94 requestAccessForMediaType:AVMediaTypeVideo
95 completionHandler:^void(BOOL granted) {
96 if (!granted) {
97 [self setCameraState:qr_scanner::CAMERA_PERMISSION_DENIED];
98 } else {
99 [self loadCaptureSession:previewLayer];
100 }
101 }];
102 }
103
104 - (void)setViewport:(CGRect)viewportRect {
105 dispatch_async(_sessionQueue, ^{
106 _viewportRect = viewportRect;
107 if (_metadataOutput) {
108 [_metadataOutput setRectOfInterest:_viewportRect];
109 }
110 });
111 }
112
113 - (void)resetVideoOrientation:(AVCaptureVideoPreviewLayer*)previewLayer {
114 DCHECK(previewLayer);
115 AVCaptureConnection* videoConnection = [previewLayer connection];
116 if ([videoConnection isVideoOrientationSupported]) {
117 [videoConnection setVideoOrientation:
118 [self videoOrientationForCurrentInterfaceOrientation]];
119 }
120 }
121
122 - (void)startRecording {
123 dispatch_async(_sessionQueue, ^{
124 if ([self isCameraAvailable]) {
125 if (![_captureSession isRunning]) {
126 [_captureSession startRunning];
127 }
128 }
129 });
130 }
131
132 - (void)stopRecording {
133 dispatch_async(_sessionQueue, ^{
134 if ([self isCameraAvailable]) {
135 if ([_captureSession isRunning]) {
136 [_captureSession stopRunning];
137 }
138 }
139 });
140 }
141
142 - (void)setTorchMode:(AVCaptureTorchMode)mode {
143 dispatch_async(_sessionQueue, ^{
144 if (![self isCameraAvailable]) {
145 return;
146 }
147 AVCaptureDevice* camera = [self getCamera];
148 if (![camera isTorchModeSupported:mode]) {
149 return;
150 }
151 NSError* error = nil;
152 [camera lockForConfiguration:&error];
153 if (error) {
154 return;
155 }
156 [camera setTorchMode:mode];
157 [camera unlockForConfiguration];
158 });
159 }
160
161 #pragma mark private methods
162
163 - (BOOL)isCameraAvailable {
164 return [self cameraState] == qr_scanner::CAMERA_AVAILABLE;
165 }
166
167 - (void)loadCaptureSession:(AVCaptureVideoPreviewLayer*)previewLayer {
168 DCHECK(previewLayer);
169 DCHECK([self cameraState] == qr_scanner::CAMERA_NOT_LOADED);
170 DCHECK([self getAuthorizationStatus] == AVAuthorizationStatusAuthorized);
171 dispatch_async(_sessionQueue, ^{
172 // Get the back camera.
173 NSArray* videoCaptureDevices =
174 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
175 if ([videoCaptureDevices count] == 0) {
176 [self setCameraState:qr_scanner::CAMERA_UNAVAILABLE];
177 return;
178 }
179
180 NSUInteger cameraIndex = [videoCaptureDevices
181 indexOfObjectPassingTest:^BOOL(AVCaptureDevice* device, NSUInteger idx,
182 BOOL* stop) {
183 return device.position == AVCaptureDevicePositionBack;
184 }];
185
186 // Allow only the back camera.
187 if (cameraIndex == NSNotFound) {
188 [self setCameraState:qr_scanner::CAMERA_UNAVAILABLE];
189 return;
190 }
191 AVCaptureDevice* camera = videoCaptureDevices[cameraIndex];
192
193 // Configure camera input.
194 NSError* error = nil;
195 AVCaptureDeviceInput* videoInput =
196 [AVCaptureDeviceInput deviceInputWithDevice:camera error:&error];
197 if (error || !videoInput) {
198 [self setCameraState:qr_scanner::CAMERA_UNAVAILABLE];
199 return;
200 }
201
202 AVCaptureSession* session = [[[AVCaptureSession alloc] init] autorelease];
203 if (![session canAddInput:videoInput]) {
204 [self setCameraState:qr_scanner::CAMERA_UNAVAILABLE];
205 return;
206 }
207 [session addInput:videoInput];
208
209 // Configure metadata output.
210 AVCaptureMetadataOutput* metadataOutput =
211 [[[AVCaptureMetadataOutput alloc] init] autorelease];
212 [metadataOutput setMetadataObjectsDelegate:self
213 queue:dispatch_get_main_queue()];
214 if (![session canAddOutput:metadataOutput]) {
215 [self setCameraState:qr_scanner::CAMERA_UNAVAILABLE];
216 return;
217 }
218 [session addOutput:metadataOutput];
219 NSArray* availableCodeTypes = [metadataOutput availableMetadataObjectTypes];
220
221 // Require QR code recognition to be available.
222 if (![availableCodeTypes containsObject:AVMetadataObjectTypeQRCode]) {
223 [self setCameraState:qr_scanner::CAMERA_UNAVAILABLE];
224 return;
225 }
226 [metadataOutput setMetadataObjectTypes:availableCodeTypes];
227 _metadataOutput.reset([metadataOutput retain]);
228
229 _captureSession.reset([session retain]);
230 [self setCameraState:qr_scanner::CAMERA_AVAILABLE];
231 // Setup torchAvailable.
232 [self
233 setTorchAvailable:[camera hasTorch] &&
234 [camera isTorchModeSupported:AVCaptureTorchModeOn] &&
235 [camera isTorchModeSupported:AVCaptureTorchModeOff]];
236
237 [previewLayer setSession:_captureSession];
238 [previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
239 [self resetVideoOrientation:previewLayer];
240 dispatch_async(dispatch_get_main_queue(), ^{
241 [_delegate captureSessionIsConnected];
242 [self startRecording];
243 });
244 [self startReceivingNotifications];
245 });
246 }
247
248 - (void)startReceivingNotifications {
249 // Start receiving notifications about changes to the capture session.
250 [[NSNotificationCenter defaultCenter]
251 addObserver:self
252 selector:@selector(handleAVCaptureSessionRuntimeError:)
253 name:AVCaptureSessionRuntimeErrorNotification
254 object:_captureSession];
255
256 [[NSNotificationCenter defaultCenter]
257 addObserver:self
258 selector:@selector(handleAVCaptureSessionWasInterrupted:)
259 name:AVCaptureSessionWasInterruptedNotification
260 object:_captureSession];
261
262 [[NSNotificationCenter defaultCenter]
263 addObserver:self
264 selector:@selector(handleAVCaptureSessionInterruptionEnded:)
265 name:AVCaptureSessionInterruptionEndedNotification
266 object:_captureSession];
267
268 // Start receiving notifications about changes to the camera.
269 AVCaptureDevice* camera = [self getCamera];
270 DCHECK(camera);
271
272 [[NSNotificationCenter defaultCenter]
273 addObserver:self
274 selector:@selector(handleAVCaptureDeviceWasDisconnected:)
275 name:AVCaptureDeviceWasDisconnectedNotification
276 object:camera];
277
278 // Start receiving notifications about changes to the torch state.
279 [camera addObserver:self
280 forKeyPath:@"hasTorch"
281 options:NSKeyValueObservingOptionNew
282 context:nil];
283
284 [camera addObserver:self
285 forKeyPath:@"torchAvailable"
286 options:NSKeyValueObservingOptionNew
287 context:nil];
288
289 [camera addObserver:self
290 forKeyPath:@"torchActive"
291 options:NSKeyValueObservingOptionNew
292 context:nil];
293 }
294
295 - (void)stopReceivingNotifications {
296 [[NSNotificationCenter defaultCenter] removeObserver:self];
297 AVCaptureDevice* camera = [self getCamera];
298 [camera removeObserver:self forKeyPath:@"hasTorch"];
299 [camera removeObserver:self forKeyPath:@"torchAvailable"];
300 [camera removeObserver:self forKeyPath:@"torchActive"];
301 }
302
303 - (AVCaptureDevice*)getCamera {
304 AVCaptureDeviceInput* captureSessionInput =
305 [[_captureSession inputs] firstObject];
306 DCHECK(captureSessionInput != nil);
307 return [captureSessionInput device];
308 }
309
310 - (AVCaptureVideoOrientation)videoOrientationForCurrentInterfaceOrientation {
311 UIInterfaceOrientation orientation =
312 [[UIApplication sharedApplication] statusBarOrientation];
313 switch (orientation) {
314 case UIInterfaceOrientationUnknown:
315 return AVCaptureVideoOrientationPortrait;
316 default:
317 return static_cast<AVCaptureVideoOrientation>(orientation);
318 }
319 }
320
321 #pragma mark notification handlers
322
323 - (void)handleAVCaptureSessionRuntimeError:(NSNotification*)notification {
324 dispatch_async(_sessionQueue, ^{
325 [self setCameraState:qr_scanner::CAMERA_UNAVAILABLE];
326 });
327 }
328
329 - (void)handleAVCaptureSessionWasInterrupted:(NSNotification*)notification {
330 dispatch_async(_sessionQueue, ^{
331 AVCaptureSessionInterruptionReason reason =
332 (AVCaptureSessionInterruptionReason)[[[notification userInfo]
333 valueForKey:AVCaptureSessionInterruptionReasonKey] integerValue];
334 switch (reason) {
335 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground :
336 // iOS automatically stops and restarts capture sessions when the app
337 // is backgrounded and foregrounded.
338 break;
339 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
340 [self setCameraState:qr_scanner::CAMERA_IN_USE_BY_ANOTHER_APPLICATION];
341 break;
342 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultiple ForegroundApps:
343 [self setCameraState:qr_scanner::MULTIPLE_FOREGROUND_APPS];
344 break;
345 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
346 NOTREACHED();
347 break;
348 }
349 });
350 }
351
352 - (void)handleAVCaptureSessionInterruptionEnded:(NSNotification*)notification {
353 dispatch_async(_sessionQueue, ^{
354 if ([_captureSession isRunning]) {
355 [self setCameraState:qr_scanner::CAMERA_AVAILABLE];
356 }
357 });
358 }
359
360 - (void)handleAVCaptureDeviceWasDisconnected:(NSNotification*)notification {
361 dispatch_async(_sessionQueue, ^{
362 [self setCameraState:qr_scanner::CAMERA_UNAVAILABLE];
363 });
364 }
365
366 - (void)observeValueForKeyPath:(NSString*)keyPath
367 ofObject:(id)object
368 change:(NSDictionary<NSString*, id>*)change
369 context:(void*)context {
370 if ([keyPath isEqualToString:@"hasTorch"] ||
371 [keyPath isEqualToString:@"torchAvailable"] ||
372 [keyPath isEqualToString:@"torchActive"]) {
373 AVCaptureDevice* camera = [self getCamera];
374 [self setTorchAvailable:([camera hasTorch] && [camera isTorchAvailable])];
375 [self setTorchActive:[camera isTorchActive]];
376 }
377 }
378
379 #pragma mark property implementation
380
381 - (qr_scanner::CameraState)cameraState {
382 return _cameraState;
383 }
384
385 - (void)setCameraState:(qr_scanner::CameraState)state {
386 if (state == _cameraState) {
387 return;
388 }
389 _cameraState = state;
390 dispatch_async(dispatch_get_main_queue(), ^{
391 [_delegate cameraStateChanged:state];
392 });
393 }
394
395 - (BOOL)isTorchAvailable {
396 return _torchAvailable;
397 }
398
399 - (void)setTorchAvailable:(BOOL)available {
400 if (available == _torchAvailable) {
401 return;
402 }
403 _torchAvailable = available;
404 dispatch_async(dispatch_get_main_queue(), ^{
405 [_delegate torchAvailabilityChanged:available];
406 });
407 }
408
409 - (BOOL)isTorchActive {
410 return _torchActive;
411 }
412
413 - (void)setTorchActive:(BOOL)active {
414 if (active == _torchActive) {
415 return;
416 }
417 _torchActive = active;
418 dispatch_async(dispatch_get_main_queue(), ^{
419 [_delegate torchStateChanged:active];
420 });
421 }
422
423 #pragma mark AVCaptureMetadataOutputObjectsDelegate
424
425 - (void)captureOutput:(AVCaptureOutput*)captureOutput
426 didOutputMetadataObjects:(NSArray*)metadataObjects
427 fromConnection:(AVCaptureConnection*)connection {
428 AVMetadataObject* metadataResult = [metadataObjects firstObject];
429 if (![metadataResult
430 isKindOfClass:[AVMetadataMachineReadableCodeObject class]]) {
431 return;
432 }
433 NSString* resultString =
434 [base::mac::ObjCCastStrict<AVMetadataMachineReadableCodeObject>(
435 metadataResult) stringValue];
436 if (resultString.length == 0) {
437 return;
438 }
439
440 dispatch_async(_sessionQueue, ^{
441 if ([_captureSession isRunning]) {
442 [_captureSession stopRunning];
443 }
444 });
445
446 // Check if the barcode can only contain digits. In this case, the result can
447 // be loaded immediately.
448 NSString* resultType = metadataResult.type;
449 BOOL isAllDigits =
450 [resultType isEqualToString:AVMetadataObjectTypeUPCECode] ||
451 [resultType isEqualToString:AVMetadataObjectTypeEAN8Code] ||
452 [resultType isEqualToString:AVMetadataObjectTypeEAN13Code] ||
453 [resultType isEqualToString:AVMetadataObjectTypeInterleaved2of5Code] ||
454 [resultType isEqualToString:AVMetadataObjectTypeITF14Code];
455
456 // Note: |captureOutput| is called on the main queue. This is specified by
457 // |setMetadataObjectsDelegate:queue:|.
458 [_delegate receiveQRScannerResult:resultString loadImmediately:isAllDigits];
459 }
460
461 @end
OLDNEW
« no previous file with comments | « ios/chrome/browser/ui/qr_scanner/camera_controller.h ('k') | ios/chrome/browser/ui/qr_scanner/qr_scanner_alerts.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698