Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(202)

Side by Side Diff: media/capture/video/mac/video_capture_device_avfoundation_mac.mm

Issue 2155723002: RELAND 3: ImageCapture: Implement takePhoto() for Mac AVFoundation (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Avoid using AVCaptureStillImageOutput if the capture is configured MJPEG (errors). Cleaned up black… Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 The Chromium Authors. All rights reserved. 1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #import "media/capture/video/mac/video_capture_device_avfoundation_mac.h" 5 #import "media/capture/video/mac/video_capture_device_avfoundation_mac.h"
6 6
7 #import <CoreMedia/CoreMedia.h> 7 #import <CoreMedia/CoreMedia.h>
8 #import <CoreVideo/CoreVideo.h> 8 #import <CoreVideo/CoreVideo.h>
9 #include <stddef.h> 9 #include <stddef.h>
10 #include <stdint.h> 10 #include <stdint.h>
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
88 UMA_HISTOGRAM_COUNTS("Media.VideoCapture.MacBook.NumberOfDevices", 88 UMA_HISTOGRAM_COUNTS("Media.VideoCapture.MacBook.NumberOfDevices",
89 number_of_devices + number_of_suspended_devices); 89 number_of_devices + number_of_suspended_devices);
90 if (number_of_devices + number_of_suspended_devices == 0) { 90 if (number_of_devices + number_of_suspended_devices == 0) {
91 UMA_HISTOGRAM_ENUMERATION( 91 UMA_HISTOGRAM_ENUMERATION(
92 "Media.VideoCapture.MacBook.HardwareVersionWhenNoCamera", 92 "Media.VideoCapture.MacBook.HardwareVersionWhenNoCamera",
93 GetMacBookModel(model), MAX_MACBOOK_VERSION + 1); 93 GetMacBookModel(model), MAX_MACBOOK_VERSION + 1);
94 } 94 }
95 } 95 }
96 } 96 }
97 97
98 } // anonymous namespace
99
100 // This function translates Mac Core Video pixel formats to Chromium pixel 98 // This function translates Mac Core Video pixel formats to Chromium pixel
101 // formats. 99 // formats.
102 media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) { 100 media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) {
103 switch (code) { 101 switch (code) {
104 case kCVPixelFormatType_422YpCbCr8: 102 case kCVPixelFormatType_422YpCbCr8:
105 return media::PIXEL_FORMAT_UYVY; 103 return media::PIXEL_FORMAT_UYVY;
106 case CoreMediaGlue::kCMPixelFormat_422YpCbCr8_yuvs: 104 case CoreMediaGlue::kCMPixelFormat_422YpCbCr8_yuvs:
107 return media::PIXEL_FORMAT_YUY2; 105 return media::PIXEL_FORMAT_YUY2;
108 case CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML: 106 case CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML:
109 return media::PIXEL_FORMAT_MJPEG; 107 return media::PIXEL_FORMAT_MJPEG;
110 default: 108 default:
111 return media::PIXEL_FORMAT_UNKNOWN; 109 return media::PIXEL_FORMAT_UNKNOWN;
112 } 110 }
113 } 111 }
114 112
113 // Extracts |base_address| and |length| out of a SampleBuffer.
114 void ExtractBaseAddressAndLength(
115 char** base_address,
116 size_t* length,
117 CoreMediaGlue::CMSampleBufferRef sample_buffer) {
118 CoreMediaGlue::CMBlockBufferRef block_buffer =
119 CoreMediaGlue::CMSampleBufferGetDataBuffer(sample_buffer);
120 DCHECK(block_buffer);
121
122 size_t length_at_offset;
123 const OSStatus status = CoreMediaGlue::CMBlockBufferGetDataPointer(
124 block_buffer, 0, &length_at_offset, length, base_address);
125 DCHECK_EQ(noErr, status);
126 // Expect the (M)JPEG data to be available as a contiguous reference, i.e.
127 // not covered by multiple memory blocks.
128 DCHECK_EQ(length_at_offset, *length);
129 }
130
131 } // anonymous namespace
132
115 @implementation VideoCaptureDeviceAVFoundation 133 @implementation VideoCaptureDeviceAVFoundation
116 134
117 #pragma mark Class methods 135 #pragma mark Class methods
118 136
119 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames { 137 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames {
120 // At this stage we already know that AVFoundation is supported and the whole 138 // At this stage we already know that AVFoundation is supported and the whole
121 // library is loaded and initialised, by the device monitoring. 139 // library is loaded and initialised, by the device monitoring.
122 NSArray* devices = [AVCaptureDeviceGlue devices]; 140 NSArray* devices = [AVCaptureDeviceGlue devices];
123 int number_of_suspended_devices = 0; 141 int number_of_suspended_devices = 0;
124 for (CrAVCaptureDevice* device in devices) { 142 for (CrAVCaptureDevice* device in devices) {
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
205 223
206 - (BOOL)setCaptureDevice:(NSString*)deviceId { 224 - (BOOL)setCaptureDevice:(NSString*)deviceId {
207 DCHECK(captureSession_); 225 DCHECK(captureSession_);
208 DCHECK(main_thread_checker_.CalledOnValidThread()); 226 DCHECK(main_thread_checker_.CalledOnValidThread());
209 227
210 if (!deviceId) { 228 if (!deviceId) {
211 // First stop the capture session, if it's running. 229 // First stop the capture session, if it's running.
212 [self stopCapture]; 230 [self stopCapture];
213 // Now remove the input and output from the capture session. 231 // Now remove the input and output from the capture session.
214 [captureSession_ removeOutput:captureVideoDataOutput_]; 232 [captureSession_ removeOutput:captureVideoDataOutput_];
233 if (stillImageOutput_)
234 [captureSession_ removeOutput:stillImageOutput_];
215 if (captureDeviceInput_) { 235 if (captureDeviceInput_) {
216 [captureSession_ removeInput:captureDeviceInput_]; 236 [captureSession_ removeInput:captureDeviceInput_];
217 // No need to release |captureDeviceInput_|, is owned by the session. 237 // No need to release |captureDeviceInput_|, is owned by the session.
218 captureDeviceInput_ = nil; 238 captureDeviceInput_ = nil;
219 } 239 }
220 return YES; 240 return YES;
221 } 241 }
222 242
223 // Look for input device with requested name. 243 // Look for input device with requested name.
224 captureDevice_ = [AVCaptureDeviceGlue deviceWithUniqueID:deviceId]; 244 captureDevice_ = [AVCaptureDeviceGlue deviceWithUniqueID:deviceId];
(...skipping 29 matching lines...) Expand all
254 [self sendErrorString:[NSString stringWithUTF8String: 274 [self sendErrorString:[NSString stringWithUTF8String:
255 "Could not create video data output."]]; 275 "Could not create video data output."]];
256 return NO; 276 return NO;
257 } 277 }
258 [captureVideoDataOutput_ setAlwaysDiscardsLateVideoFrames:true]; 278 [captureVideoDataOutput_ setAlwaysDiscardsLateVideoFrames:true];
259 [captureVideoDataOutput_ 279 [captureVideoDataOutput_
260 setSampleBufferDelegate:self 280 setSampleBufferDelegate:self
261 queue:dispatch_get_global_queue( 281 queue:dispatch_get_global_queue(
262 DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)]; 282 DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
263 [captureSession_ addOutput:captureVideoDataOutput_]; 283 [captureSession_ addOutput:captureVideoDataOutput_];
284
285 // Create and plug the still image capture output. This should happen in
286 // advance of the actual picture to allow for the 3A to stabilize.
287 stillImageOutput_.reset(
288 [[AVFoundationGlue::AVCaptureStillImageOutputClass() alloc] init]);
289 if (stillImageOutput_ && [captureSession_ canAddOutput:stillImageOutput_])
290 [captureSession_ addOutput:stillImageOutput_];
291
264 return YES; 292 return YES;
265 } 293 }
266 294
267 - (BOOL)setCaptureHeight:(int)height 295 - (BOOL)setCaptureHeight:(int)height
268 width:(int)width 296 width:(int)width
269 frameRate:(float)frameRate { 297 frameRate:(float)frameRate {
270 DCHECK(![captureSession_ isRunning] && 298 DCHECK(![captureSession_ isRunning] &&
271 main_thread_checker_.CalledOnValidThread()); 299 main_thread_checker_.CalledOnValidThread());
272 300
273 frameWidth_ = width; 301 frameWidth_ = width;
(...skipping 14 matching lines...) Expand all
288 } 316 }
289 317
290 // Compare according to Chromium preference. 318 // Compare according to Chromium preference.
291 if (media::VideoCaptureFormat::ComparePixelFormatPreference( 319 if (media::VideoCaptureFormat::ComparePixelFormatPreference(
292 FourCCToChromiumPixelFormat(fourcc), 320 FourCCToChromiumPixelFormat(fourcc),
293 FourCCToChromiumPixelFormat(best_fourcc))) { 321 FourCCToChromiumPixelFormat(best_fourcc))) {
294 best_fourcc = fourcc; 322 best_fourcc = fourcc;
295 } 323 }
296 } 324 }
297 325
326 if (best_fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) {
327 [captureSession_ removeOutput:stillImageOutput_];
328 stillImageOutput_.reset();
329 }
330
298 // The capture output has to be configured, despite Mac documentation 331 // The capture output has to be configured, despite Mac documentation
299 // detailing that setting the sessionPreset would be enough. The reason for 332 // detailing that setting the sessionPreset would be enough. The reason for
300 // this mismatch is probably because most of the AVFoundation docs are written 333 // this mismatch is probably because most of the AVFoundation docs are written
301 // for iOS and not for MacOsX. AVVideoScalingModeKey() refers to letterboxing 334 // for iOS and not for MacOsX. AVVideoScalingModeKey() refers to letterboxing
302 // yes/no and preserve aspect ratio yes/no when scaling. Currently we set 335 // yes/no and preserve aspect ratio yes/no when scaling. Currently we set
303 // cropping and preservation. 336 // cropping and preservation.
304 NSDictionary* videoSettingsDictionary = @{ 337 NSDictionary* videoSettingsDictionary = @{
305 (id) kCVPixelBufferWidthKey : @(width), (id) 338 (id) kCVPixelBufferWidthKey : @(width), (id)
306 kCVPixelBufferHeightKey : @(height), (id) 339 kCVPixelBufferHeightKey : @(height), (id)
307 kCVPixelBufferPixelFormatTypeKey : @(best_fourcc), 340 kCVPixelBufferPixelFormatTypeKey : @(best_fourcc),
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
352 return YES; 385 return YES;
353 } 386 }
354 387
355 - (void)stopCapture { 388 - (void)stopCapture {
356 DCHECK(main_thread_checker_.CalledOnValidThread()); 389 DCHECK(main_thread_checker_.CalledOnValidThread());
357 if ([captureSession_ isRunning]) 390 if ([captureSession_ isRunning])
358 [captureSession_ stopRunning]; // Synchronous. 391 [captureSession_ stopRunning]; // Synchronous.
359 [[NSNotificationCenter defaultCenter] removeObserver:self]; 392 [[NSNotificationCenter defaultCenter] removeObserver:self];
360 } 393 }
361 394
395 - (void)takePhoto {
396 DCHECK(main_thread_checker_.CalledOnValidThread());
397 DCHECK([captureSession_ isRunning]);
398 if (!stillImageOutput_)
399 return;
400
401 DCHECK_EQ(1u, [[stillImageOutput_ connections] count]);
402 CrAVCaptureConnection* const connection =
403 [[stillImageOutput_ connections] firstObject];
404 if (!connection) {
405 base::AutoLock lock(lock_);
406 frameReceiver_->OnPhotoError();
407 return;
408 }
409
410 const auto handler = ^(CoreMediaGlue::CMSampleBufferRef sampleBuffer,
411 NSError* error) {
412 base::AutoLock lock(lock_);
413 if (!frameReceiver_)
414 return;
415 if (error != nil) {
416 frameReceiver_->OnPhotoError();
417 return;
418 }
419
420 // Recommended compressed pixel format is JPEG, we don't expect surprises.
421 // TODO(mcasas): Consider using [1] for merging EXIF output information:
422 // [1] +(NSData*)jpegStillImageNSDataRepresentation:jpegSampleBuffer;
423 DCHECK_EQ(
424 CoreMediaGlue::kCMVideoCodecType_JPEG,
425 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(
426 CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer)));
427
428 char* baseAddress = 0;
429 size_t length = 0;
430 ExtractBaseAddressAndLength(&baseAddress, &length, sampleBuffer);
431 frameReceiver_->OnPhotoTaken(reinterpret_cast<uint8_t*>(baseAddress),
432 length, "image/jpeg");
433 };
434
435 [stillImageOutput_ captureStillImageAsynchronouslyFromConnection:connection
436 completionHandler:handler];
437 }
438
362 #pragma mark Private methods 439 #pragma mark Private methods
363 440
364 // |captureOutput| is called by the capture device to deliver a new frame. 441 // |captureOutput| is called by the capture device to deliver a new frame.
365 // AVFoundation calls from a number of threads, depending on, at least, if 442 // AVFoundation calls from a number of threads, depending on, at least, if
366 // Chrome is on foreground or background. 443 // Chrome is on foreground or background.
367 - (void)captureOutput:(CrAVCaptureOutput*)captureOutput 444 - (void)captureOutput:(CrAVCaptureOutput*)captureOutput
368 didOutputSampleBuffer:(CoreMediaGlue::CMSampleBufferRef)sampleBuffer 445 didOutputSampleBuffer:(CoreMediaGlue::CMSampleBufferRef)sampleBuffer
369 fromConnection:(CrAVCaptureConnection*)connection { 446 fromConnection:(CrAVCaptureConnection*)connection {
370 const CoreMediaGlue::CMFormatDescriptionRef formatDescription = 447 const CoreMediaGlue::CMFormatDescriptionRef formatDescription =
371 CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer); 448 CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer);
372 const FourCharCode fourcc = 449 const FourCharCode fourcc =
373 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(formatDescription); 450 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(formatDescription);
374 const CoreMediaGlue::CMVideoDimensions dimensions = 451 const CoreMediaGlue::CMVideoDimensions dimensions =
375 CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(formatDescription); 452 CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(formatDescription);
376 const media::VideoCaptureFormat captureFormat( 453 const media::VideoCaptureFormat captureFormat(
377 gfx::Size(dimensions.width, dimensions.height), frameRate_, 454 gfx::Size(dimensions.width, dimensions.height), frameRate_,
378 FourCCToChromiumPixelFormat(fourcc)); 455 FourCCToChromiumPixelFormat(fourcc));
379 456
380 char* baseAddress = 0; 457 char* baseAddress = 0;
381 size_t frameSize = 0; 458 size_t frameSize = 0;
382 CVImageBufferRef videoFrame = nil; 459 CVImageBufferRef videoFrame = nil;
383 if (fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) { 460 if (fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) {
384 // If MJPEG, use block buffer instead of pixel buffer. 461 ExtractBaseAddressAndLength(&baseAddress, &frameSize, sampleBuffer);
385 CoreMediaGlue::CMBlockBufferRef blockBuffer =
386 CoreMediaGlue::CMSampleBufferGetDataBuffer(sampleBuffer);
387 if (blockBuffer) {
388 size_t lengthAtOffset;
389 CoreMediaGlue::CMBlockBufferGetDataPointer(
390 blockBuffer, 0, &lengthAtOffset, &frameSize, &baseAddress);
391 // Expect the MJPEG data to be available as a contiguous reference, i.e.
392 // not covered by multiple memory blocks.
393 CHECK_EQ(lengthAtOffset, frameSize);
394 }
395 } else { 462 } else {
396 videoFrame = CoreMediaGlue::CMSampleBufferGetImageBuffer(sampleBuffer); 463 videoFrame = CoreMediaGlue::CMSampleBufferGetImageBuffer(sampleBuffer);
397 // Lock the frame and calculate frame size. 464 // Lock the frame and calculate frame size.
398 if (CVPixelBufferLockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly) == 465 if (CVPixelBufferLockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly) ==
399 kCVReturnSuccess) { 466 kCVReturnSuccess) {
400 baseAddress = static_cast<char*>(CVPixelBufferGetBaseAddress(videoFrame)); 467 baseAddress = static_cast<char*>(CVPixelBufferGetBaseAddress(videoFrame));
401 frameSize = CVPixelBufferGetHeight(videoFrame) * 468 frameSize = CVPixelBufferGetHeight(videoFrame) *
402 CVPixelBufferGetBytesPerRow(videoFrame); 469 CVPixelBufferGetBytesPerRow(videoFrame);
403 } else { 470 } else {
404 videoFrame = nil; 471 videoFrame = nil;
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
436 } 503 }
437 504
438 - (void)sendErrorString:(NSString*)error { 505 - (void)sendErrorString:(NSString*)error {
439 DLOG(ERROR) << [error UTF8String]; 506 DLOG(ERROR) << [error UTF8String];
440 base::AutoLock lock(lock_); 507 base::AutoLock lock(lock_);
441 if (frameReceiver_) 508 if (frameReceiver_)
442 frameReceiver_->ReceiveError(FROM_HERE, [error UTF8String]); 509 frameReceiver_->ReceiveError(FROM_HERE, [error UTF8String]);
443 } 510 }
444 511
445 @end 512 @end
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698