Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(140)

Side by Side Diff: media/capture/video/mac/video_capture_device_avfoundation_mac.mm

Issue 2143903003: [WIP] Move media/capture to device/capture (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #import "media/capture/video/mac/video_capture_device_avfoundation_mac.h"
6
7 #import <CoreMedia/CoreMedia.h>
8 #import <CoreVideo/CoreVideo.h>
9 #include <stddef.h>
10 #include <stdint.h>
11
12 #include "base/location.h"
13 #include "base/logging.h"
14 #include "base/mac/foundation_util.h"
15 #include "base/mac/mac_util.h"
16 #include "base/metrics/histogram_macros.h"
17 #include "media/base/timestamp_constants.h"
18 #include "media/base/video_capture_types.h"
19 #include "media/capture/video/mac/video_capture_device_mac.h"
20 #include "ui/gfx/geometry/size.h"
21
22 // Prefer MJPEG if frame width or height is larger than this.
23 static const int kMjpegWidthThreshold = 640;
24 static const int kMjpegHeightThreshold = 480;
25
26 namespace {
27
28 enum MacBookVersions {
29 OTHER = 0,
30 MACBOOK_5, // MacBook5.X
31 MACBOOK_6,
32 MACBOOK_7,
33 MACBOOK_8,
34 MACBOOK_PRO_11, // MacBookPro11.X
35 MACBOOK_PRO_12,
36 MACBOOK_PRO_13,
37 MACBOOK_AIR_5, // MacBookAir5.X
38 MACBOOK_AIR_6,
39 MACBOOK_AIR_7,
40 MACBOOK_AIR_8,
41 MACBOOK_AIR_3,
42 MACBOOK_AIR_4,
43 MACBOOK_4,
44 MACBOOK_9,
45 MACBOOK_10,
46 MACBOOK_PRO_10,
47 MACBOOK_PRO_9,
48 MACBOOK_PRO_8,
49 MACBOOK_PRO_7,
50 MACBOOK_PRO_6,
51 MACBOOK_PRO_5,
52 MAX_MACBOOK_VERSION = MACBOOK_PRO_5
53 };
54
55 MacBookVersions GetMacBookModel(const std::string& model) {
56 struct {
57 const char* name;
58 MacBookVersions version;
59 } static const kModelToVersion[] = {
60 {"MacBook4,", MACBOOK_4}, {"MacBook5,", MACBOOK_5},
61 {"MacBook6,", MACBOOK_6}, {"MacBook7,", MACBOOK_7},
62 {"MacBook8,", MACBOOK_8}, {"MacBook9,", MACBOOK_9},
63 {"MacBook10,", MACBOOK_10}, {"MacBookPro5,", MACBOOK_PRO_5},
64 {"MacBookPro6,", MACBOOK_PRO_6}, {"MacBookPro7,", MACBOOK_PRO_7},
65 {"MacBookPro8,", MACBOOK_PRO_8}, {"MacBookPro9,", MACBOOK_PRO_9},
66 {"MacBookPro10,", MACBOOK_PRO_10}, {"MacBookPro11,", MACBOOK_PRO_11},
67 {"MacBookPro12,", MACBOOK_PRO_12}, {"MacBookPro13,", MACBOOK_PRO_13},
68 {"MacBookAir3,", MACBOOK_AIR_3}, {"MacBookAir4,", MACBOOK_AIR_4},
69 {"MacBookAir5,", MACBOOK_AIR_5}, {"MacBookAir6,", MACBOOK_AIR_6},
70 {"MacBookAir7,", MACBOOK_AIR_7}, {"MacBookAir8,", MACBOOK_AIR_8},
71 };
72
73 for (const auto& entry : kModelToVersion) {
74 if (base::StartsWith(model, entry.name,
75 base::CompareCase::INSENSITIVE_ASCII)) {
76 return entry.version;
77 }
78 }
79 return OTHER;
80 }
81
82 // Add Uma stats for number of detected devices on MacBooks. These are used for
83 // investigating crbug/582931.
84 void MaybeWriteUma(int number_of_devices, int number_of_suspended_devices) {
85 std::string model = base::mac::GetModelIdentifier();
86 if (base::StartsWith(model, "MacBook",
87 base::CompareCase::INSENSITIVE_ASCII)) {
88 UMA_HISTOGRAM_COUNTS("Media.VideoCapture.MacBook.NumberOfDevices",
89 number_of_devices + number_of_suspended_devices);
90 if (number_of_devices + number_of_suspended_devices == 0) {
91 UMA_HISTOGRAM_ENUMERATION(
92 "Media.VideoCapture.MacBook.HardwareVersionWhenNoCamera",
93 GetMacBookModel(model), MAX_MACBOOK_VERSION + 1);
94 }
95 }
96 }
97
98 // This function translates Mac Core Video pixel formats to Chromium pixel
99 // formats.
100 media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) {
101 switch (code) {
102 case kCVPixelFormatType_422YpCbCr8:
103 return media::PIXEL_FORMAT_UYVY;
104 case CoreMediaGlue::kCMPixelFormat_422YpCbCr8_yuvs:
105 return media::PIXEL_FORMAT_YUY2;
106 case CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML:
107 return media::PIXEL_FORMAT_MJPEG;
108 default:
109 return media::PIXEL_FORMAT_UNKNOWN;
110 }
111 }
112
113 // Extracts |base_address| and |length| out of a SampleBuffer.
114 void ExtractBaseAddressAndLength(
115 char** base_address,
116 size_t* length,
117 CoreMediaGlue::CMSampleBufferRef sample_buffer) {
118 CoreMediaGlue::CMBlockBufferRef block_buffer =
119 CoreMediaGlue::CMSampleBufferGetDataBuffer(sample_buffer);
120 DCHECK(block_buffer);
121
122 size_t length_at_offset;
123 const OSStatus status = CoreMediaGlue::CMBlockBufferGetDataPointer(
124 block_buffer, 0, &length_at_offset, length, base_address);
125 DCHECK_EQ(noErr, status);
126 // Expect the (M)JPEG data to be available as a contiguous reference, i.e.
127 // not covered by multiple memory blocks.
128 DCHECK_EQ(length_at_offset, *length);
129 }
130
131 } // anonymous namespace
132
133 @implementation VideoCaptureDeviceAVFoundation
134
135 #pragma mark Class methods
136
137 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames {
138 // At this stage we already know that AVFoundation is supported and the whole
139 // library is loaded and initialised, by the device monitoring.
140 NSArray* devices = [AVCaptureDeviceGlue devices];
141 int number_of_suspended_devices = 0;
142 for (CrAVCaptureDevice* device in devices) {
143 if ([device hasMediaType:AVFoundationGlue::AVMediaTypeVideo()] ||
144 [device hasMediaType:AVFoundationGlue::AVMediaTypeMuxed()]) {
145 if ([device isSuspended]) {
146 ++number_of_suspended_devices;
147 continue;
148 }
149 DeviceNameAndTransportType* nameAndTransportType =
150 [[[DeviceNameAndTransportType alloc]
151 initWithName:[device localizedName]
152 transportType:[device transportType]] autorelease];
153 [deviceNames setObject:nameAndTransportType forKey:[device uniqueID]];
154 }
155 }
156 MaybeWriteUma([deviceNames count], number_of_suspended_devices);
157 }
158
159 + (NSDictionary*)deviceNames {
160 NSMutableDictionary* deviceNames =
161 [[[NSMutableDictionary alloc] init] autorelease];
162 // The device name retrieval is not going to happen in the main thread, and
163 // this might cause instabilities (it did in QTKit), so keep an eye here.
164 [self getDeviceNames:deviceNames];
165 return deviceNames;
166 }
167
168 + (void)getDevice:(const media::VideoCaptureDevice::Name&)name
169 supportedFormats:(media::VideoCaptureFormats*)formats {
170 NSArray* devices = [AVCaptureDeviceGlue devices];
171 CrAVCaptureDevice* device = nil;
172 for (device in devices) {
173 if ([[device uniqueID] UTF8String] == name.id())
174 break;
175 }
176 if (device == nil)
177 return;
178 for (CrAVCaptureDeviceFormat* format in device.formats) {
179 // MediaSubType is a CMPixelFormatType but can be used as CVPixelFormatType
180 // as well according to CMFormatDescription.h
181 const media::VideoPixelFormat pixelFormat = FourCCToChromiumPixelFormat(
182 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(
183 [format formatDescription]));
184
185 CoreMediaGlue::CMVideoDimensions dimensions =
186 CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(
187 [format formatDescription]);
188
189 for (CrAVFrameRateRange* frameRate in
190 [format videoSupportedFrameRateRanges]) {
191 media::VideoCaptureFormat format(
192 gfx::Size(dimensions.width, dimensions.height),
193 frameRate.maxFrameRate, pixelFormat);
194 formats->push_back(format);
195 DVLOG(2) << name.name() << " "
196 << media::VideoCaptureFormat::ToString(format);
197 }
198 }
199 }
200
201 #pragma mark Public methods
202
203 - (id)initWithFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
204 if ((self = [super init])) {
205 DCHECK(main_thread_checker_.CalledOnValidThread());
206 DCHECK(frameReceiver);
207 [self setFrameReceiver:frameReceiver];
208 captureSession_.reset(
209 [[AVFoundationGlue::AVCaptureSessionClass() alloc] init]);
210 }
211 return self;
212 }
213
214 - (void)dealloc {
215 [self stopCapture];
216 [super dealloc];
217 }
218
219 - (void)setFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
220 base::AutoLock lock(lock_);
221 frameReceiver_ = frameReceiver;
222 }
223
224 - (BOOL)setCaptureDevice:(NSString*)deviceId {
225 DCHECK(captureSession_);
226 DCHECK(main_thread_checker_.CalledOnValidThread());
227
228 if (!deviceId) {
229 // First stop the capture session, if it's running.
230 [self stopCapture];
231 // Now remove the input and output from the capture session.
232 [captureSession_ removeOutput:captureVideoDataOutput_];
233 if (captureDeviceInput_) {
234 [captureSession_ removeInput:captureDeviceInput_];
235 // No need to release |captureDeviceInput_|, is owned by the session.
236 captureDeviceInput_ = nil;
237 }
238 if (stillImageOutput_)
239 [captureSession_ removeOutput:stillImageOutput_];
240 return YES;
241 }
242
243 // Look for input device with requested name.
244 captureDevice_ = [AVCaptureDeviceGlue deviceWithUniqueID:deviceId];
245 if (!captureDevice_) {
246 [self
247 sendErrorString:[NSString stringWithUTF8String:
248 "Could not open video capture device."]];
249 return NO;
250 }
251
252 // Create the capture input associated with the device. Easy peasy.
253 NSError* error = nil;
254 captureDeviceInput_ =
255 [AVCaptureDeviceInputGlue deviceInputWithDevice:captureDevice_
256 error:&error];
257 if (!captureDeviceInput_) {
258 captureDevice_ = nil;
259 [self sendErrorString:
260 [NSString stringWithFormat:
261 @"Could not create video capture input (%@): %@",
262 [error localizedDescription],
263 [error localizedFailureReason]]];
264 return NO;
265 }
266 [captureSession_ addInput:captureDeviceInput_];
267
268 // Create a new data output for video. The data output is configured to
269 // discard late frames by default.
270 captureVideoDataOutput_.reset(
271 [[AVFoundationGlue::AVCaptureVideoDataOutputClass() alloc] init]);
272 if (!captureVideoDataOutput_) {
273 [captureSession_ removeInput:captureDeviceInput_];
274 [self sendErrorString:[NSString stringWithUTF8String:
275 "Could not create video data output."]];
276 return NO;
277 }
278 [captureVideoDataOutput_ setAlwaysDiscardsLateVideoFrames:true];
279 [captureVideoDataOutput_
280 setSampleBufferDelegate:self
281 queue:dispatch_get_global_queue(
282 DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
283 [captureSession_ addOutput:captureVideoDataOutput_];
284
285 // Create and plug the still image capture output. This should happen in
286 // advance of the actual picture to allow for the 3A to stabilize.
287 stillImageOutput_.reset(
288 [[AVFoundationGlue::AVCaptureStillImageOutputClass() alloc] init]);
289 if ([captureSession_ canAddOutput:stillImageOutput_])
290 [captureSession_ addOutput:stillImageOutput_];
291
292 return YES;
293 }
294
295 - (BOOL)setCaptureHeight:(int)height
296 width:(int)width
297 frameRate:(float)frameRate {
298 DCHECK(![captureSession_ isRunning] &&
299 main_thread_checker_.CalledOnValidThread());
300
301 frameWidth_ = width;
302 frameHeight_ = height;
303 frameRate_ = frameRate;
304
305 FourCharCode best_fourcc = kCVPixelFormatType_422YpCbCr8;
306 const bool prefer_mjpeg =
307 width > kMjpegWidthThreshold || height > kMjpegHeightThreshold;
308 for (CrAVCaptureDeviceFormat* format in captureDevice_.formats) {
309 const FourCharCode fourcc =
310 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(
311 [format formatDescription]);
312 if (prefer_mjpeg &&
313 fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) {
314 best_fourcc = fourcc;
315 break;
316 }
317
318 // Compare according to Chromium preference.
319 if (media::VideoCaptureFormat::ComparePixelFormatPreference(
320 FourCCToChromiumPixelFormat(fourcc),
321 FourCCToChromiumPixelFormat(best_fourcc))) {
322 best_fourcc = fourcc;
323 }
324 }
325
326 // The capture output has to be configured, despite Mac documentation
327 // detailing that setting the sessionPreset would be enough. The reason for
328 // this mismatch is probably because most of the AVFoundation docs are written
329 // for iOS and not for MacOsX. AVVideoScalingModeKey() refers to letterboxing
330 // yes/no and preserve aspect ratio yes/no when scaling. Currently we set
331 // cropping and preservation.
332 NSDictionary* videoSettingsDictionary = @{
333 (id) kCVPixelBufferWidthKey : @(width), (id)
334 kCVPixelBufferHeightKey : @(height), (id)
335 kCVPixelBufferPixelFormatTypeKey : @(best_fourcc),
336 AVFoundationGlue::AVVideoScalingModeKey() :
337 AVFoundationGlue::AVVideoScalingModeResizeAspectFill()
338 };
339 [captureVideoDataOutput_ setVideoSettings:videoSettingsDictionary];
340
341 CrAVCaptureConnection* captureConnection = [captureVideoDataOutput_
342 connectionWithMediaType:AVFoundationGlue::AVMediaTypeVideo()];
343 // Check selector existence, related to bugs http://crbug.com/327532 and
344 // http://crbug.com/328096.
345 // CMTimeMake accepts integer argumenst but |frameRate| is float, round it.
346 if ([captureConnection
347 respondsToSelector:@selector(isVideoMinFrameDurationSupported)] &&
348 [captureConnection isVideoMinFrameDurationSupported]) {
349 [captureConnection
350 setVideoMinFrameDuration:CoreMediaGlue::CMTimeMake(
351 media::kFrameRatePrecision,
352 (int)(frameRate *
353 media::kFrameRatePrecision))];
354 }
355 if ([captureConnection
356 respondsToSelector:@selector(isVideoMaxFrameDurationSupported)] &&
357 [captureConnection isVideoMaxFrameDurationSupported]) {
358 [captureConnection
359 setVideoMaxFrameDuration:CoreMediaGlue::CMTimeMake(
360 media::kFrameRatePrecision,
361 (int)(frameRate *
362 media::kFrameRatePrecision))];
363 }
364 return YES;
365 }
366
367 - (BOOL)startCapture {
368 DCHECK(main_thread_checker_.CalledOnValidThread());
369 if (!captureSession_) {
370 DLOG(ERROR) << "Video capture session not initialized.";
371 return NO;
372 }
373 // Connect the notifications.
374 NSNotificationCenter* nc = [NSNotificationCenter defaultCenter];
375 [nc addObserver:self
376 selector:@selector(onVideoError:)
377 name:AVFoundationGlue::AVCaptureSessionRuntimeErrorNotification()
378 object:captureSession_];
379 [captureSession_ startRunning];
380 return YES;
381 }
382
383 - (void)stopCapture {
384 DCHECK(main_thread_checker_.CalledOnValidThread());
385 if ([captureSession_ isRunning])
386 [captureSession_ stopRunning]; // Synchronous.
387 [[NSNotificationCenter defaultCenter] removeObserver:self];
388 }
389
390 - (void)takePhoto {
391 DCHECK(main_thread_checker_.CalledOnValidThread());
392 DCHECK([captureSession_ isRunning]);
393
394 DCHECK_EQ(1u, [[stillImageOutput_ connections] count]);
395 CrAVCaptureConnection* const connection =
396 [[stillImageOutput_ connections] firstObject];
397 if (!connection) {
398 base::AutoLock lock(lock_);
399 frameReceiver_->OnPhotoError();
400 return;
401 }
402
403 const auto handler = ^(CoreMediaGlue::CMSampleBufferRef sampleBuffer,
404 NSError* error) {
405 base::AutoLock lock(lock_);
406 if (!frameReceiver_)
407 return;
408 if (error != nil) {
409 frameReceiver_->OnPhotoError();
410 return;
411 }
412
413 // Recommended compressed pixel format is JPEG, we don't expect surprises.
414 // TODO(mcasas): Consider using [1] for merging EXIF output information:
415 // [1] +(NSData*)jpegStillImageNSDataRepresentation:jpegSampleBuffer;
416 DCHECK_EQ(
417 CoreMediaGlue::kCMVideoCodecType_JPEG,
418 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(
419 CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer)));
420
421 char* baseAddress = 0;
422 size_t length = 0;
423 ExtractBaseAddressAndLength(&baseAddress, &length, sampleBuffer);
424 frameReceiver_->OnPhotoTaken(reinterpret_cast<uint8_t*>(baseAddress),
425 length, "image/jpeg");
426 };
427
428 [stillImageOutput_ captureStillImageAsynchronouslyFromConnection:connection
429 completionHandler:handler];
430 }
431
432 #pragma mark Private methods
433
434 // |captureOutput| is called by the capture device to deliver a new frame.
435 // AVFoundation calls from a number of threads, depending on, at least, if
436 // Chrome is on foreground or background.
437 - (void)captureOutput:(CrAVCaptureOutput*)captureOutput
438 didOutputSampleBuffer:(CoreMediaGlue::CMSampleBufferRef)sampleBuffer
439 fromConnection:(CrAVCaptureConnection*)connection {
440 const CoreMediaGlue::CMFormatDescriptionRef formatDescription =
441 CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer);
442 const FourCharCode fourcc =
443 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(formatDescription);
444 const CoreMediaGlue::CMVideoDimensions dimensions =
445 CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(formatDescription);
446 const media::VideoCaptureFormat captureFormat(
447 gfx::Size(dimensions.width, dimensions.height), frameRate_,
448 FourCCToChromiumPixelFormat(fourcc));
449
450 char* baseAddress = 0;
451 size_t frameSize = 0;
452 CVImageBufferRef videoFrame = nil;
453 if (fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) {
454 ExtractBaseAddressAndLength(&baseAddress, &frameSize, sampleBuffer);
455 } else {
456 videoFrame = CoreMediaGlue::CMSampleBufferGetImageBuffer(sampleBuffer);
457 // Lock the frame and calculate frame size.
458 if (CVPixelBufferLockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly) ==
459 kCVReturnSuccess) {
460 baseAddress = static_cast<char*>(CVPixelBufferGetBaseAddress(videoFrame));
461 frameSize = CVPixelBufferGetHeight(videoFrame) *
462 CVPixelBufferGetBytesPerRow(videoFrame);
463 } else {
464 videoFrame = nil;
465 }
466 }
467
468 {
469 base::AutoLock lock(lock_);
470 const CoreMediaGlue::CMTime cm_timestamp =
471 CoreMediaGlue::CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
472 const base::TimeDelta timestamp =
473 CMTIME_IS_VALID(cm_timestamp)
474 ? base::TimeDelta::FromMicroseconds(
475 cm_timestamp.value * base::TimeTicks::kMicrosecondsPerSecond /
476 cm_timestamp.timescale)
477 : media::kNoTimestamp();
478
479 if (frameReceiver_ && baseAddress) {
480 frameReceiver_->ReceiveFrame(reinterpret_cast<uint8_t*>(baseAddress),
481 frameSize, captureFormat, 0, 0, timestamp);
482 }
483 }
484
485 if (videoFrame)
486 CVPixelBufferUnlockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly);
487 }
488
489 - (void)onVideoError:(NSNotification*)errorNotification {
490 NSError* error = base::mac::ObjCCast<NSError>([[errorNotification userInfo]
491 objectForKey:AVFoundationGlue::AVCaptureSessionErrorKey()]);
492 [self sendErrorString:[NSString
493 stringWithFormat:@"%@: %@",
494 [error localizedDescription],
495 [error localizedFailureReason]]];
496 }
497
498 - (void)sendErrorString:(NSString*)error {
499 DLOG(ERROR) << [error UTF8String];
500 base::AutoLock lock(lock_);
501 if (frameReceiver_)
502 frameReceiver_->ReceiveError(FROM_HERE, [error UTF8String]);
503 }
504
505 @end
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698