Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(276)

Side by Side Diff: media/capture/video/mac/video_capture_device_avfoundation_mac.mm

Issue 2129733004: ImageCapture: Implement takePhoto() for Mac AVFoundation (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: General cleanup and added VideoCaptureDeviceTest::TakePhoto Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 The Chromium Authors. All rights reserved. 1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #import "media/capture/video/mac/video_capture_device_avfoundation_mac.h" 5 #import "media/capture/video/mac/video_capture_device_avfoundation_mac.h"
6 6
7 #import <CoreMedia/CoreMedia.h> 7 #import <CoreMedia/CoreMedia.h>
8 #import <CoreVideo/CoreVideo.h> 8 #import <CoreVideo/CoreVideo.h>
9 #include <stddef.h> 9 #include <stddef.h>
10 #include <stdint.h> 10 #include <stdint.h>
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
88 UMA_HISTOGRAM_COUNTS("Media.VideoCapture.MacBook.NumberOfDevices", 88 UMA_HISTOGRAM_COUNTS("Media.VideoCapture.MacBook.NumberOfDevices",
89 number_of_devices + number_of_suspended_devices); 89 number_of_devices + number_of_suspended_devices);
90 if (number_of_devices + number_of_suspended_devices == 0) { 90 if (number_of_devices + number_of_suspended_devices == 0) {
91 UMA_HISTOGRAM_ENUMERATION( 91 UMA_HISTOGRAM_ENUMERATION(
92 "Media.VideoCapture.MacBook.HardwareVersionWhenNoCamera", 92 "Media.VideoCapture.MacBook.HardwareVersionWhenNoCamera",
93 GetMacBookModel(model), MAX_MACBOOK_VERSION + 1); 93 GetMacBookModel(model), MAX_MACBOOK_VERSION + 1);
94 } 94 }
95 } 95 }
96 } 96 }
97 97
98 } // anonymous namespace
99
100 // This function translates Mac Core Video pixel formats to Chromium pixel 98 // This function translates Mac Core Video pixel formats to Chromium pixel
101 // formats. 99 // formats.
102 media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) { 100 media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) {
103 switch (code) { 101 switch (code) {
104 case kCVPixelFormatType_422YpCbCr8: 102 case kCVPixelFormatType_422YpCbCr8:
105 return media::PIXEL_FORMAT_UYVY; 103 return media::PIXEL_FORMAT_UYVY;
106 case CoreMediaGlue::kCMPixelFormat_422YpCbCr8_yuvs: 104 case CoreMediaGlue::kCMPixelFormat_422YpCbCr8_yuvs:
107 return media::PIXEL_FORMAT_YUY2; 105 return media::PIXEL_FORMAT_YUY2;
108 case CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML: 106 case CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML:
109 return media::PIXEL_FORMAT_MJPEG; 107 return media::PIXEL_FORMAT_MJPEG;
110 default: 108 default:
111 return media::PIXEL_FORMAT_UNKNOWN; 109 return media::PIXEL_FORMAT_UNKNOWN;
112 } 110 }
113 } 111 }
114 112
113 // Extracts |baseAddress| and |length| out of a SampleBuffer.
114 void ExtractBaseAddressAndLength(
115 char** baseAddress,
Robert Sesek 2016/07/08 19:31:04 naming: use under_scores in non-ObjC methods
mcasas 2016/07/08 21:39:28 Oops, yes, done.
116 size_t* length,
117 CoreMediaGlue::CMSampleBufferRef sampleBuffer) {
118 CoreMediaGlue::CMBlockBufferRef blockBuffer =
119 CoreMediaGlue::CMSampleBufferGetDataBuffer(sampleBuffer);
120 DCHECK(blockBuffer);
121
122 size_t lengthAtOffset;
123 CoreMediaGlue::CMBlockBufferGetDataPointer(blockBuffer, 0, &lengthAtOffset,
124 length, baseAddress);
125 // Expect the (M)JPEG data to be available as a contiguous reference, i.e.
126 // not covered by multiple memory blocks.
127 DCHECK_EQ(lengthAtOffset, *length);
128 }
129
130 } // anonymous namespace
131
115 @implementation VideoCaptureDeviceAVFoundation 132 @implementation VideoCaptureDeviceAVFoundation
116 133
117 #pragma mark Class methods 134 #pragma mark Class methods
118 135
119 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames { 136 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames {
120 // At this stage we already know that AVFoundation is supported and the whole 137 // At this stage we already know that AVFoundation is supported and the whole
121 // library is loaded and initialised, by the device monitoring. 138 // library is loaded and initialised, by the device monitoring.
122 NSArray* devices = [AVCaptureDeviceGlue devices]; 139 NSArray* devices = [AVCaptureDeviceGlue devices];
123 int number_of_suspended_devices = 0; 140 int number_of_suspended_devices = 0;
124 for (CrAVCaptureDevice* device in devices) { 141 for (CrAVCaptureDevice* device in devices) {
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
210 if (!deviceId) { 227 if (!deviceId) {
211 // First stop the capture session, if it's running. 228 // First stop the capture session, if it's running.
212 [self stopCapture]; 229 [self stopCapture];
213 // Now remove the input and output from the capture session. 230 // Now remove the input and output from the capture session.
214 [captureSession_ removeOutput:captureVideoDataOutput_]; 231 [captureSession_ removeOutput:captureVideoDataOutput_];
215 if (captureDeviceInput_) { 232 if (captureDeviceInput_) {
216 [captureSession_ removeInput:captureDeviceInput_]; 233 [captureSession_ removeInput:captureDeviceInput_];
217 // No need to release |captureDeviceInput_|, is owned by the session. 234 // No need to release |captureDeviceInput_|, is owned by the session.
218 captureDeviceInput_ = nil; 235 captureDeviceInput_ = nil;
219 } 236 }
237 if (stillImageOutput_)
238 [captureSession_ removeOutput:stillImageOutput_];
220 return YES; 239 return YES;
221 } 240 }
222 241
223 // Look for input device with requested name. 242 // Look for input device with requested name.
224 captureDevice_ = [AVCaptureDeviceGlue deviceWithUniqueID:deviceId]; 243 captureDevice_ = [AVCaptureDeviceGlue deviceWithUniqueID:deviceId];
225 if (!captureDevice_) { 244 if (!captureDevice_) {
226 [self 245 [self
227 sendErrorString:[NSString stringWithUTF8String: 246 sendErrorString:[NSString stringWithUTF8String:
228 "Could not open video capture device."]]; 247 "Could not open video capture device."]];
229 return NO; 248 return NO;
(...skipping 24 matching lines...) Expand all
254 [self sendErrorString:[NSString stringWithUTF8String: 273 [self sendErrorString:[NSString stringWithUTF8String:
255 "Could not create video data output."]]; 274 "Could not create video data output."]];
256 return NO; 275 return NO;
257 } 276 }
258 [captureVideoDataOutput_ setAlwaysDiscardsLateVideoFrames:true]; 277 [captureVideoDataOutput_ setAlwaysDiscardsLateVideoFrames:true];
259 [captureVideoDataOutput_ 278 [captureVideoDataOutput_
260 setSampleBufferDelegate:self 279 setSampleBufferDelegate:self
261 queue:dispatch_get_global_queue( 280 queue:dispatch_get_global_queue(
262 DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)]; 281 DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
263 [captureSession_ addOutput:captureVideoDataOutput_]; 282 [captureSession_ addOutput:captureVideoDataOutput_];
283
284 // Create and plug the still image capture output. This should happen in
285 // advance of the actual picture to allow for the 3A to stabilize.
286 stillImageOutput_.reset(
287 [[AVFoundationGlue::AVCaptureStillImageOutputClass() alloc] init]);
288 [captureSession_ addOutput:stillImageOutput_];
289
264 return YES; 290 return YES;
265 } 291 }
266 292
267 - (BOOL)setCaptureHeight:(int)height 293 - (BOOL)setCaptureHeight:(int)height
268 width:(int)width 294 width:(int)width
269 frameRate:(float)frameRate { 295 frameRate:(float)frameRate {
270 DCHECK(![captureSession_ isRunning] && 296 DCHECK(![captureSession_ isRunning] &&
271 main_thread_checker_.CalledOnValidThread()); 297 main_thread_checker_.CalledOnValidThread());
272 298
273 frameWidth_ = width; 299 frameWidth_ = width;
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
352 return YES; 378 return YES;
353 } 379 }
354 380
355 - (void)stopCapture { 381 - (void)stopCapture {
356 DCHECK(main_thread_checker_.CalledOnValidThread()); 382 DCHECK(main_thread_checker_.CalledOnValidThread());
357 if ([captureSession_ isRunning]) 383 if ([captureSession_ isRunning])
358 [captureSession_ stopRunning]; // Synchronous. 384 [captureSession_ stopRunning]; // Synchronous.
359 [[NSNotificationCenter defaultCenter] removeObserver:self]; 385 [[NSNotificationCenter defaultCenter] removeObserver:self];
360 } 386 }
361 387
388 - (void)takePhoto {
389 DCHECK(main_thread_checker_.CalledOnValidThread());
390 DCHECK([captureSession_ isRunning]);
391
392 DCHECK_EQ(1u, [[stillImageOutput_ connections] count]);
393 CrAVCaptureConnection* const connection =
394 [[stillImageOutput_ connections] firstObject];
395 if (!connection) {
396 base::AutoLock lock(lock_);
397 frameReceiver_->OnPhotoError();
398 return;
399 }
400
401 const auto handler = ^(CoreMediaGlue::CMSampleBufferRef sampleBuffer,
402 NSError* error) {
403 base::AutoLock lock(lock_);
404 if (!frameReceiver_)
405 return;
406 if (error != nil) {
407 frameReceiver_->OnPhotoError();
408 return;
409 }
410
411 // Recommended compressed pixel format is JPEG, we don't expect surprises.
412 // TODO(mcasas): Consider using [1] for merging EXIF output information:
413 // [1] +(NSData*)jpegStillImageNSDataRepresentation:jpegSampleBuffer;
414 DCHECK_EQ(
415 CoreMediaGlue::kCMVideoCodecType_JPEG,
416 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(
417 CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer)));
418
419 char* baseAddress = 0;
420 size_t length = 0;
421 ExtractBaseAddressAndLength(&baseAddress, &length, sampleBuffer);
422 frameReceiver_->OnPhotoTaken(reinterpret_cast<uint8_t*>(baseAddress),
423 length, "image/jpeg");
424 };
425
426 [stillImageOutput_ captureStillImageAsynchronouslyFromConnection:connection
427 completionHandler:handler];
428 }
429
362 #pragma mark Private methods 430 #pragma mark Private methods
363 431
364 // |captureOutput| is called by the capture device to deliver a new frame. 432 // |captureOutput| is called by the capture device to deliver a new frame.
365 // AVFoundation calls from a number of threads, depending on, at least, if 433 // AVFoundation calls from a number of threads, depending on, at least, if
366 // Chrome is on foreground or background. 434 // Chrome is on foreground or background.
367 - (void)captureOutput:(CrAVCaptureOutput*)captureOutput 435 - (void)captureOutput:(CrAVCaptureOutput*)captureOutput
368 didOutputSampleBuffer:(CoreMediaGlue::CMSampleBufferRef)sampleBuffer 436 didOutputSampleBuffer:(CoreMediaGlue::CMSampleBufferRef)sampleBuffer
369 fromConnection:(CrAVCaptureConnection*)connection { 437 fromConnection:(CrAVCaptureConnection*)connection {
370 const CoreMediaGlue::CMFormatDescriptionRef formatDescription = 438 const CoreMediaGlue::CMFormatDescriptionRef formatDescription =
371 CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer); 439 CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer);
372 const FourCharCode fourcc = 440 const FourCharCode fourcc =
373 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(formatDescription); 441 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(formatDescription);
374 const CoreMediaGlue::CMVideoDimensions dimensions = 442 const CoreMediaGlue::CMVideoDimensions dimensions =
375 CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(formatDescription); 443 CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(formatDescription);
376 const media::VideoCaptureFormat captureFormat( 444 const media::VideoCaptureFormat captureFormat(
377 gfx::Size(dimensions.width, dimensions.height), frameRate_, 445 gfx::Size(dimensions.width, dimensions.height), frameRate_,
378 FourCCToChromiumPixelFormat(fourcc)); 446 FourCCToChromiumPixelFormat(fourcc));
379 447
380 char* baseAddress = 0; 448 char* baseAddress = 0;
381 size_t frameSize = 0; 449 size_t frameSize = 0;
382 CVImageBufferRef videoFrame = nil; 450 CVImageBufferRef videoFrame = nil;
383 if (fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) { 451 if (fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) {
384 // If MJPEG, use block buffer instead of pixel buffer. 452 ExtractBaseAddressAndLength(&baseAddress, &frameSize, sampleBuffer);
385 CoreMediaGlue::CMBlockBufferRef blockBuffer =
386 CoreMediaGlue::CMSampleBufferGetDataBuffer(sampleBuffer);
387 if (blockBuffer) {
388 size_t lengthAtOffset;
389 CoreMediaGlue::CMBlockBufferGetDataPointer(
390 blockBuffer, 0, &lengthAtOffset, &frameSize, &baseAddress);
391 // Expect the MJPEG data to be available as a contiguous reference, i.e.
392 // not covered by multiple memory blocks.
393 CHECK_EQ(lengthAtOffset, frameSize);
394 }
395 } else { 453 } else {
396 videoFrame = CoreMediaGlue::CMSampleBufferGetImageBuffer(sampleBuffer); 454 videoFrame = CoreMediaGlue::CMSampleBufferGetImageBuffer(sampleBuffer);
397 // Lock the frame and calculate frame size. 455 // Lock the frame and calculate frame size.
398 if (CVPixelBufferLockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly) == 456 if (CVPixelBufferLockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly) ==
399 kCVReturnSuccess) { 457 kCVReturnSuccess) {
400 baseAddress = static_cast<char*>(CVPixelBufferGetBaseAddress(videoFrame)); 458 baseAddress = static_cast<char*>(CVPixelBufferGetBaseAddress(videoFrame));
401 frameSize = CVPixelBufferGetHeight(videoFrame) * 459 frameSize = CVPixelBufferGetHeight(videoFrame) *
402 CVPixelBufferGetBytesPerRow(videoFrame); 460 CVPixelBufferGetBytesPerRow(videoFrame);
403 } else { 461 } else {
404 videoFrame = nil; 462 videoFrame = nil;
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
436 } 494 }
437 495
438 - (void)sendErrorString:(NSString*)error { 496 - (void)sendErrorString:(NSString*)error {
439 DLOG(ERROR) << [error UTF8String]; 497 DLOG(ERROR) << [error UTF8String];
440 base::AutoLock lock(lock_); 498 base::AutoLock lock(lock_);
441 if (frameReceiver_) 499 if (frameReceiver_)
442 frameReceiver_->ReceiveError(FROM_HERE, [error UTF8String]); 500 frameReceiver_->ReceiveError(FROM_HERE, [error UTF8String]);
443 } 501 }
444 502
445 @end 503 @end
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698