Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(228)

Side by Side Diff: media/capture/video/mac/video_capture_device_avfoundation_mac.mm

Issue 2146973002: RELAND: ImageCapture: Implement takePhoto() for Mac AVFoundation (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: add a [captureSession_ canAddOutput:stillImageOutput_] guard Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 The Chromium Authors. All rights reserved. 1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #import "media/capture/video/mac/video_capture_device_avfoundation_mac.h" 5 #import "media/capture/video/mac/video_capture_device_avfoundation_mac.h"
6 6
7 #import <CoreMedia/CoreMedia.h> 7 #import <CoreMedia/CoreMedia.h>
8 #import <CoreVideo/CoreVideo.h> 8 #import <CoreVideo/CoreVideo.h>
9 #include <stddef.h> 9 #include <stddef.h>
10 #include <stdint.h> 10 #include <stdint.h>
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
88 UMA_HISTOGRAM_COUNTS("Media.VideoCapture.MacBook.NumberOfDevices", 88 UMA_HISTOGRAM_COUNTS("Media.VideoCapture.MacBook.NumberOfDevices",
89 number_of_devices + number_of_suspended_devices); 89 number_of_devices + number_of_suspended_devices);
90 if (number_of_devices + number_of_suspended_devices == 0) { 90 if (number_of_devices + number_of_suspended_devices == 0) {
91 UMA_HISTOGRAM_ENUMERATION( 91 UMA_HISTOGRAM_ENUMERATION(
92 "Media.VideoCapture.MacBook.HardwareVersionWhenNoCamera", 92 "Media.VideoCapture.MacBook.HardwareVersionWhenNoCamera",
93 GetMacBookModel(model), MAX_MACBOOK_VERSION + 1); 93 GetMacBookModel(model), MAX_MACBOOK_VERSION + 1);
94 } 94 }
95 } 95 }
96 } 96 }
97 97
98 } // anonymous namespace
99
100 // This function translates Mac Core Video pixel formats to Chromium pixel 98 // This function translates Mac Core Video pixel formats to Chromium pixel
101 // formats. 99 // formats.
102 media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) { 100 media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) {
103 switch (code) { 101 switch (code) {
104 case kCVPixelFormatType_422YpCbCr8: 102 case kCVPixelFormatType_422YpCbCr8:
105 return media::PIXEL_FORMAT_UYVY; 103 return media::PIXEL_FORMAT_UYVY;
106 case CoreMediaGlue::kCMPixelFormat_422YpCbCr8_yuvs: 104 case CoreMediaGlue::kCMPixelFormat_422YpCbCr8_yuvs:
107 return media::PIXEL_FORMAT_YUY2; 105 return media::PIXEL_FORMAT_YUY2;
108 case CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML: 106 case CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML:
109 return media::PIXEL_FORMAT_MJPEG; 107 return media::PIXEL_FORMAT_MJPEG;
110 default: 108 default:
111 return media::PIXEL_FORMAT_UNKNOWN; 109 return media::PIXEL_FORMAT_UNKNOWN;
112 } 110 }
113 } 111 }
114 112
113 // Extracts |base_address| and |length| out of a SampleBuffer.
114 void ExtractBaseAddressAndLength(
115 char** base_address,
116 size_t* length,
117 CoreMediaGlue::CMSampleBufferRef sample_buffer) {
118 CoreMediaGlue::CMBlockBufferRef block_buffer =
119 CoreMediaGlue::CMSampleBufferGetDataBuffer(sample_buffer);
120 DCHECK(block_buffer);
121
122 size_t length_at_offset;
123 const OSStatus status = CoreMediaGlue::CMBlockBufferGetDataPointer(
124 block_buffer, 0, &length_at_offset, length, base_address);
125 DCHECK_EQ(noErr, status);
126 // Expect the (M)JPEG data to be available as a contiguous reference, i.e.
127 // not covered by multiple memory blocks.
128 DCHECK_EQ(length_at_offset, *length);
129 }
130
131 } // anonymous namespace
132
115 @implementation VideoCaptureDeviceAVFoundation 133 @implementation VideoCaptureDeviceAVFoundation
116 134
117 #pragma mark Class methods 135 #pragma mark Class methods
118 136
119 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames { 137 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames {
120 // At this stage we already know that AVFoundation is supported and the whole 138 // At this stage we already know that AVFoundation is supported and the whole
121 // library is loaded and initialised, by the device monitoring. 139 // library is loaded and initialised, by the device monitoring.
122 NSArray* devices = [AVCaptureDeviceGlue devices]; 140 NSArray* devices = [AVCaptureDeviceGlue devices];
123 int number_of_suspended_devices = 0; 141 int number_of_suspended_devices = 0;
124 for (CrAVCaptureDevice* device in devices) { 142 for (CrAVCaptureDevice* device in devices) {
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
210 if (!deviceId) { 228 if (!deviceId) {
211 // First stop the capture session, if it's running. 229 // First stop the capture session, if it's running.
212 [self stopCapture]; 230 [self stopCapture];
213 // Now remove the input and output from the capture session. 231 // Now remove the input and output from the capture session.
214 [captureSession_ removeOutput:captureVideoDataOutput_]; 232 [captureSession_ removeOutput:captureVideoDataOutput_];
215 if (captureDeviceInput_) { 233 if (captureDeviceInput_) {
216 [captureSession_ removeInput:captureDeviceInput_]; 234 [captureSession_ removeInput:captureDeviceInput_];
217 // No need to release |captureDeviceInput_|, is owned by the session. 235 // No need to release |captureDeviceInput_|, is owned by the session.
218 captureDeviceInput_ = nil; 236 captureDeviceInput_ = nil;
219 } 237 }
238 if (stillImageOutput_)
239 [captureSession_ removeOutput:stillImageOutput_];
220 return YES; 240 return YES;
221 } 241 }
222 242
223 // Look for input device with requested name. 243 // Look for input device with requested name.
224 captureDevice_ = [AVCaptureDeviceGlue deviceWithUniqueID:deviceId]; 244 captureDevice_ = [AVCaptureDeviceGlue deviceWithUniqueID:deviceId];
225 if (!captureDevice_) { 245 if (!captureDevice_) {
226 [self 246 [self
227 sendErrorString:[NSString stringWithUTF8String: 247 sendErrorString:[NSString stringWithUTF8String:
228 "Could not open video capture device."]]; 248 "Could not open video capture device."]];
229 return NO; 249 return NO;
(...skipping 24 matching lines...) Expand all
254 [self sendErrorString:[NSString stringWithUTF8String: 274 [self sendErrorString:[NSString stringWithUTF8String:
255 "Could not create video data output."]]; 275 "Could not create video data output."]];
256 return NO; 276 return NO;
257 } 277 }
258 [captureVideoDataOutput_ setAlwaysDiscardsLateVideoFrames:true]; 278 [captureVideoDataOutput_ setAlwaysDiscardsLateVideoFrames:true];
259 [captureVideoDataOutput_ 279 [captureVideoDataOutput_
260 setSampleBufferDelegate:self 280 setSampleBufferDelegate:self
261 queue:dispatch_get_global_queue( 281 queue:dispatch_get_global_queue(
262 DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)]; 282 DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
263 [captureSession_ addOutput:captureVideoDataOutput_]; 283 [captureSession_ addOutput:captureVideoDataOutput_];
284
285 // Create and plug the still image capture output. This should happen in
286 // advance of the actual picture to allow for the 3A to stabilize.
287 stillImageOutput_.reset(
288 [[AVFoundationGlue::AVCaptureStillImageOutputClass() alloc] init]);
289 if ([captureSession_ canAddOutput:stillImageOutput_])
290 [captureSession_ addOutput:stillImageOutput_];
291
264 return YES; 292 return YES;
265 } 293 }
266 294
267 - (BOOL)setCaptureHeight:(int)height 295 - (BOOL)setCaptureHeight:(int)height
268 width:(int)width 296 width:(int)width
269 frameRate:(float)frameRate { 297 frameRate:(float)frameRate {
270 DCHECK(![captureSession_ isRunning] && 298 DCHECK(![captureSession_ isRunning] &&
271 main_thread_checker_.CalledOnValidThread()); 299 main_thread_checker_.CalledOnValidThread());
272 300
273 frameWidth_ = width; 301 frameWidth_ = width;
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
352 return YES; 380 return YES;
353 } 381 }
354 382
355 - (void)stopCapture { 383 - (void)stopCapture {
356 DCHECK(main_thread_checker_.CalledOnValidThread()); 384 DCHECK(main_thread_checker_.CalledOnValidThread());
357 if ([captureSession_ isRunning]) 385 if ([captureSession_ isRunning])
358 [captureSession_ stopRunning]; // Synchronous. 386 [captureSession_ stopRunning]; // Synchronous.
359 [[NSNotificationCenter defaultCenter] removeObserver:self]; 387 [[NSNotificationCenter defaultCenter] removeObserver:self];
360 } 388 }
361 389
390 - (void)takePhoto {
391 DCHECK(main_thread_checker_.CalledOnValidThread());
392 DCHECK([captureSession_ isRunning]);
393
394 DCHECK_EQ(1u, [[stillImageOutput_ connections] count]);
395 CrAVCaptureConnection* const connection =
396 [[stillImageOutput_ connections] firstObject];
397 if (!connection) {
398 base::AutoLock lock(lock_);
399 frameReceiver_->OnPhotoError();
400 return;
401 }
402
403 const auto handler = ^(CoreMediaGlue::CMSampleBufferRef sampleBuffer,
404 NSError* error) {
405 base::AutoLock lock(lock_);
406 if (!frameReceiver_)
407 return;
408 if (error != nil) {
409 frameReceiver_->OnPhotoError();
410 return;
411 }
412
413 // Recommended compressed pixel format is JPEG, we don't expect surprises.
414 // TODO(mcasas): Consider using [1] for merging EXIF output information:
415 // [1] +(NSData*)jpegStillImageNSDataRepresentation:jpegSampleBuffer;
416 DCHECK_EQ(
417 CoreMediaGlue::kCMVideoCodecType_JPEG,
418 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(
419 CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer)));
420
421 char* baseAddress = 0;
422 size_t length = 0;
423 ExtractBaseAddressAndLength(&baseAddress, &length, sampleBuffer);
424 frameReceiver_->OnPhotoTaken(reinterpret_cast<uint8_t*>(baseAddress),
425 length, "image/jpeg");
426 };
427
428 [stillImageOutput_ captureStillImageAsynchronouslyFromConnection:connection
429 completionHandler:handler];
430 }
431
362 #pragma mark Private methods 432 #pragma mark Private methods
363 433
364 // |captureOutput| is called by the capture device to deliver a new frame. 434 // |captureOutput| is called by the capture device to deliver a new frame.
365 // AVFoundation calls from a number of threads, depending on, at least, if 435 // AVFoundation calls from a number of threads, depending on, at least, if
366 // Chrome is on foreground or background. 436 // Chrome is on foreground or background.
367 - (void)captureOutput:(CrAVCaptureOutput*)captureOutput 437 - (void)captureOutput:(CrAVCaptureOutput*)captureOutput
368 didOutputSampleBuffer:(CoreMediaGlue::CMSampleBufferRef)sampleBuffer 438 didOutputSampleBuffer:(CoreMediaGlue::CMSampleBufferRef)sampleBuffer
369 fromConnection:(CrAVCaptureConnection*)connection { 439 fromConnection:(CrAVCaptureConnection*)connection {
370 const CoreMediaGlue::CMFormatDescriptionRef formatDescription = 440 const CoreMediaGlue::CMFormatDescriptionRef formatDescription =
371 CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer); 441 CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer);
372 const FourCharCode fourcc = 442 const FourCharCode fourcc =
373 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(formatDescription); 443 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(formatDescription);
374 const CoreMediaGlue::CMVideoDimensions dimensions = 444 const CoreMediaGlue::CMVideoDimensions dimensions =
375 CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(formatDescription); 445 CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(formatDescription);
376 const media::VideoCaptureFormat captureFormat( 446 const media::VideoCaptureFormat captureFormat(
377 gfx::Size(dimensions.width, dimensions.height), frameRate_, 447 gfx::Size(dimensions.width, dimensions.height), frameRate_,
378 FourCCToChromiumPixelFormat(fourcc)); 448 FourCCToChromiumPixelFormat(fourcc));
379 449
380 char* baseAddress = 0; 450 char* baseAddress = 0;
381 size_t frameSize = 0; 451 size_t frameSize = 0;
382 CVImageBufferRef videoFrame = nil; 452 CVImageBufferRef videoFrame = nil;
383 if (fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) { 453 if (fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) {
384 // If MJPEG, use block buffer instead of pixel buffer. 454 ExtractBaseAddressAndLength(&baseAddress, &frameSize, sampleBuffer);
385 CoreMediaGlue::CMBlockBufferRef blockBuffer =
386 CoreMediaGlue::CMSampleBufferGetDataBuffer(sampleBuffer);
387 if (blockBuffer) {
388 size_t lengthAtOffset;
389 CoreMediaGlue::CMBlockBufferGetDataPointer(
390 blockBuffer, 0, &lengthAtOffset, &frameSize, &baseAddress);
391 // Expect the MJPEG data to be available as a contiguous reference, i.e.
392 // not covered by multiple memory blocks.
393 CHECK_EQ(lengthAtOffset, frameSize);
394 }
395 } else { 455 } else {
396 videoFrame = CoreMediaGlue::CMSampleBufferGetImageBuffer(sampleBuffer); 456 videoFrame = CoreMediaGlue::CMSampleBufferGetImageBuffer(sampleBuffer);
397 // Lock the frame and calculate frame size. 457 // Lock the frame and calculate frame size.
398 if (CVPixelBufferLockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly) == 458 if (CVPixelBufferLockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly) ==
399 kCVReturnSuccess) { 459 kCVReturnSuccess) {
400 baseAddress = static_cast<char*>(CVPixelBufferGetBaseAddress(videoFrame)); 460 baseAddress = static_cast<char*>(CVPixelBufferGetBaseAddress(videoFrame));
401 frameSize = CVPixelBufferGetHeight(videoFrame) * 461 frameSize = CVPixelBufferGetHeight(videoFrame) *
402 CVPixelBufferGetBytesPerRow(videoFrame); 462 CVPixelBufferGetBytesPerRow(videoFrame);
403 } else { 463 } else {
404 videoFrame = nil; 464 videoFrame = nil;
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
436 } 496 }
437 497
438 - (void)sendErrorString:(NSString*)error { 498 - (void)sendErrorString:(NSString*)error {
439 DLOG(ERROR) << [error UTF8String]; 499 DLOG(ERROR) << [error UTF8String];
440 base::AutoLock lock(lock_); 500 base::AutoLock lock(lock_);
441 if (frameReceiver_) 501 if (frameReceiver_)
442 frameReceiver_->ReceiveError(FROM_HERE, [error UTF8String]); 502 frameReceiver_->ReceiveError(FROM_HERE, [error UTF8String]);
443 } 503 }
444 504
445 @end 505 @end
OLDNEW
« no previous file with comments | « media/capture/video/mac/video_capture_device_avfoundation_mac.h ('k') | media/capture/video/mac/video_capture_device_mac.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698