Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(76)

Side by Side Diff: media/capture/video/mac/video_capture_device_avfoundation_mac.mm

Issue 2151443003: Revert of RELAND: ImageCapture: Implement takePhoto() for Mac AVFoundation (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 The Chromium Authors. All rights reserved. 1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #import "media/capture/video/mac/video_capture_device_avfoundation_mac.h" 5 #import "media/capture/video/mac/video_capture_device_avfoundation_mac.h"
6 6
7 #import <CoreMedia/CoreMedia.h> 7 #import <CoreMedia/CoreMedia.h>
8 #import <CoreVideo/CoreVideo.h> 8 #import <CoreVideo/CoreVideo.h>
9 #include <stddef.h> 9 #include <stddef.h>
10 #include <stdint.h> 10 #include <stdint.h>
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
88 UMA_HISTOGRAM_COUNTS("Media.VideoCapture.MacBook.NumberOfDevices", 88 UMA_HISTOGRAM_COUNTS("Media.VideoCapture.MacBook.NumberOfDevices",
89 number_of_devices + number_of_suspended_devices); 89 number_of_devices + number_of_suspended_devices);
90 if (number_of_devices + number_of_suspended_devices == 0) { 90 if (number_of_devices + number_of_suspended_devices == 0) {
91 UMA_HISTOGRAM_ENUMERATION( 91 UMA_HISTOGRAM_ENUMERATION(
92 "Media.VideoCapture.MacBook.HardwareVersionWhenNoCamera", 92 "Media.VideoCapture.MacBook.HardwareVersionWhenNoCamera",
93 GetMacBookModel(model), MAX_MACBOOK_VERSION + 1); 93 GetMacBookModel(model), MAX_MACBOOK_VERSION + 1);
94 } 94 }
95 } 95 }
96 } 96 }
97 97
98 } // anonymous namespace
99
98 // This function translates Mac Core Video pixel formats to Chromium pixel 100 // This function translates Mac Core Video pixel formats to Chromium pixel
99 // formats. 101 // formats.
100 media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) { 102 media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) {
101 switch (code) { 103 switch (code) {
102 case kCVPixelFormatType_422YpCbCr8: 104 case kCVPixelFormatType_422YpCbCr8:
103 return media::PIXEL_FORMAT_UYVY; 105 return media::PIXEL_FORMAT_UYVY;
104 case CoreMediaGlue::kCMPixelFormat_422YpCbCr8_yuvs: 106 case CoreMediaGlue::kCMPixelFormat_422YpCbCr8_yuvs:
105 return media::PIXEL_FORMAT_YUY2; 107 return media::PIXEL_FORMAT_YUY2;
106 case CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML: 108 case CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML:
107 return media::PIXEL_FORMAT_MJPEG; 109 return media::PIXEL_FORMAT_MJPEG;
108 default: 110 default:
109 return media::PIXEL_FORMAT_UNKNOWN; 111 return media::PIXEL_FORMAT_UNKNOWN;
110 } 112 }
111 } 113 }
112 114
113 // Extracts |base_address| and |length| out of a SampleBuffer.
114 void ExtractBaseAddressAndLength(
115 char** base_address,
116 size_t* length,
117 CoreMediaGlue::CMSampleBufferRef sample_buffer) {
118 CoreMediaGlue::CMBlockBufferRef block_buffer =
119 CoreMediaGlue::CMSampleBufferGetDataBuffer(sample_buffer);
120 DCHECK(block_buffer);
121
122 size_t length_at_offset;
123 const OSStatus status = CoreMediaGlue::CMBlockBufferGetDataPointer(
124 block_buffer, 0, &length_at_offset, length, base_address);
125 DCHECK_EQ(noErr, status);
126 // Expect the (M)JPEG data to be available as a contiguous reference, i.e.
127 // not covered by multiple memory blocks.
128 DCHECK_EQ(length_at_offset, *length);
129 }
130
131 } // anonymous namespace
132
133 @implementation VideoCaptureDeviceAVFoundation 115 @implementation VideoCaptureDeviceAVFoundation
134 116
135 #pragma mark Class methods 117 #pragma mark Class methods
136 118
137 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames { 119 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames {
138 // At this stage we already know that AVFoundation is supported and the whole 120 // At this stage we already know that AVFoundation is supported and the whole
139 // library is loaded and initialised, by the device monitoring. 121 // library is loaded and initialised, by the device monitoring.
140 NSArray* devices = [AVCaptureDeviceGlue devices]; 122 NSArray* devices = [AVCaptureDeviceGlue devices];
141 int number_of_suspended_devices = 0; 123 int number_of_suspended_devices = 0;
142 for (CrAVCaptureDevice* device in devices) { 124 for (CrAVCaptureDevice* device in devices) {
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
228 if (!deviceId) { 210 if (!deviceId) {
229 // First stop the capture session, if it's running. 211 // First stop the capture session, if it's running.
230 [self stopCapture]; 212 [self stopCapture];
231 // Now remove the input and output from the capture session. 213 // Now remove the input and output from the capture session.
232 [captureSession_ removeOutput:captureVideoDataOutput_]; 214 [captureSession_ removeOutput:captureVideoDataOutput_];
233 if (captureDeviceInput_) { 215 if (captureDeviceInput_) {
234 [captureSession_ removeInput:captureDeviceInput_]; 216 [captureSession_ removeInput:captureDeviceInput_];
235 // No need to release |captureDeviceInput_|, is owned by the session. 217 // No need to release |captureDeviceInput_|, is owned by the session.
236 captureDeviceInput_ = nil; 218 captureDeviceInput_ = nil;
237 } 219 }
238 if (stillImageOutput_)
239 [captureSession_ removeOutput:stillImageOutput_];
240 return YES; 220 return YES;
241 } 221 }
242 222
243 // Look for input device with requested name. 223 // Look for input device with requested name.
244 captureDevice_ = [AVCaptureDeviceGlue deviceWithUniqueID:deviceId]; 224 captureDevice_ = [AVCaptureDeviceGlue deviceWithUniqueID:deviceId];
245 if (!captureDevice_) { 225 if (!captureDevice_) {
246 [self 226 [self
247 sendErrorString:[NSString stringWithUTF8String: 227 sendErrorString:[NSString stringWithUTF8String:
248 "Could not open video capture device."]]; 228 "Could not open video capture device."]];
249 return NO; 229 return NO;
(...skipping 24 matching lines...) Expand all
274 [self sendErrorString:[NSString stringWithUTF8String: 254 [self sendErrorString:[NSString stringWithUTF8String:
275 "Could not create video data output."]]; 255 "Could not create video data output."]];
276 return NO; 256 return NO;
277 } 257 }
278 [captureVideoDataOutput_ setAlwaysDiscardsLateVideoFrames:true]; 258 [captureVideoDataOutput_ setAlwaysDiscardsLateVideoFrames:true];
279 [captureVideoDataOutput_ 259 [captureVideoDataOutput_
280 setSampleBufferDelegate:self 260 setSampleBufferDelegate:self
281 queue:dispatch_get_global_queue( 261 queue:dispatch_get_global_queue(
282 DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)]; 262 DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
283 [captureSession_ addOutput:captureVideoDataOutput_]; 263 [captureSession_ addOutput:captureVideoDataOutput_];
284
285 // Create and plug the still image capture output. This should happen in
286 // advance of the actual picture to allow for the 3A to stabilize.
287 stillImageOutput_.reset(
288 [[AVFoundationGlue::AVCaptureStillImageOutputClass() alloc] init]);
289 if ([captureSession_ canAddOutput:stillImageOutput_])
290 [captureSession_ addOutput:stillImageOutput_];
291
292 return YES; 264 return YES;
293 } 265 }
294 266
295 - (BOOL)setCaptureHeight:(int)height 267 - (BOOL)setCaptureHeight:(int)height
296 width:(int)width 268 width:(int)width
297 frameRate:(float)frameRate { 269 frameRate:(float)frameRate {
298 DCHECK(![captureSession_ isRunning] && 270 DCHECK(![captureSession_ isRunning] &&
299 main_thread_checker_.CalledOnValidThread()); 271 main_thread_checker_.CalledOnValidThread());
300 272
301 frameWidth_ = width; 273 frameWidth_ = width;
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
380 return YES; 352 return YES;
381 } 353 }
382 354
383 - (void)stopCapture { 355 - (void)stopCapture {
384 DCHECK(main_thread_checker_.CalledOnValidThread()); 356 DCHECK(main_thread_checker_.CalledOnValidThread());
385 if ([captureSession_ isRunning]) 357 if ([captureSession_ isRunning])
386 [captureSession_ stopRunning]; // Synchronous. 358 [captureSession_ stopRunning]; // Synchronous.
387 [[NSNotificationCenter defaultCenter] removeObserver:self]; 359 [[NSNotificationCenter defaultCenter] removeObserver:self];
388 } 360 }
389 361
390 - (void)takePhoto {
391 DCHECK(main_thread_checker_.CalledOnValidThread());
392 DCHECK([captureSession_ isRunning]);
393
394 DCHECK_EQ(1u, [[stillImageOutput_ connections] count]);
395 CrAVCaptureConnection* const connection =
396 [[stillImageOutput_ connections] firstObject];
397 if (!connection) {
398 base::AutoLock lock(lock_);
399 frameReceiver_->OnPhotoError();
400 return;
401 }
402
403 const auto handler = ^(CoreMediaGlue::CMSampleBufferRef sampleBuffer,
404 NSError* error) {
405 base::AutoLock lock(lock_);
406 if (!frameReceiver_)
407 return;
408 if (error != nil) {
409 frameReceiver_->OnPhotoError();
410 return;
411 }
412
413 // Recommended compressed pixel format is JPEG, we don't expect surprises.
414 // TODO(mcasas): Consider using [1] for merging EXIF output information:
415 // [1] +(NSData*)jpegStillImageNSDataRepresentation:jpegSampleBuffer;
416 DCHECK_EQ(
417 CoreMediaGlue::kCMVideoCodecType_JPEG,
418 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(
419 CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer)));
420
421 char* baseAddress = 0;
422 size_t length = 0;
423 ExtractBaseAddressAndLength(&baseAddress, &length, sampleBuffer);
424 frameReceiver_->OnPhotoTaken(reinterpret_cast<uint8_t*>(baseAddress),
425 length, "image/jpeg");
426 };
427
428 [stillImageOutput_ captureStillImageAsynchronouslyFromConnection:connection
429 completionHandler:handler];
430 }
431
432 #pragma mark Private methods 362 #pragma mark Private methods
433 363
434 // |captureOutput| is called by the capture device to deliver a new frame. 364 // |captureOutput| is called by the capture device to deliver a new frame.
435 // AVFoundation calls from a number of threads, depending on, at least, if 365 // AVFoundation calls from a number of threads, depending on, at least, if
436 // Chrome is on foreground or background. 366 // Chrome is on foreground or background.
437 - (void)captureOutput:(CrAVCaptureOutput*)captureOutput 367 - (void)captureOutput:(CrAVCaptureOutput*)captureOutput
438 didOutputSampleBuffer:(CoreMediaGlue::CMSampleBufferRef)sampleBuffer 368 didOutputSampleBuffer:(CoreMediaGlue::CMSampleBufferRef)sampleBuffer
439 fromConnection:(CrAVCaptureConnection*)connection { 369 fromConnection:(CrAVCaptureConnection*)connection {
440 const CoreMediaGlue::CMFormatDescriptionRef formatDescription = 370 const CoreMediaGlue::CMFormatDescriptionRef formatDescription =
441 CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer); 371 CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer);
442 const FourCharCode fourcc = 372 const FourCharCode fourcc =
443 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(formatDescription); 373 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(formatDescription);
444 const CoreMediaGlue::CMVideoDimensions dimensions = 374 const CoreMediaGlue::CMVideoDimensions dimensions =
445 CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(formatDescription); 375 CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(formatDescription);
446 const media::VideoCaptureFormat captureFormat( 376 const media::VideoCaptureFormat captureFormat(
447 gfx::Size(dimensions.width, dimensions.height), frameRate_, 377 gfx::Size(dimensions.width, dimensions.height), frameRate_,
448 FourCCToChromiumPixelFormat(fourcc)); 378 FourCCToChromiumPixelFormat(fourcc));
449 379
450 char* baseAddress = 0; 380 char* baseAddress = 0;
451 size_t frameSize = 0; 381 size_t frameSize = 0;
452 CVImageBufferRef videoFrame = nil; 382 CVImageBufferRef videoFrame = nil;
453 if (fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) { 383 if (fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) {
454 ExtractBaseAddressAndLength(&baseAddress, &frameSize, sampleBuffer); 384 // If MJPEG, use block buffer instead of pixel buffer.
385 CoreMediaGlue::CMBlockBufferRef blockBuffer =
386 CoreMediaGlue::CMSampleBufferGetDataBuffer(sampleBuffer);
387 if (blockBuffer) {
388 size_t lengthAtOffset;
389 CoreMediaGlue::CMBlockBufferGetDataPointer(
390 blockBuffer, 0, &lengthAtOffset, &frameSize, &baseAddress);
391 // Expect the MJPEG data to be available as a contiguous reference, i.e.
392 // not covered by multiple memory blocks.
393 CHECK_EQ(lengthAtOffset, frameSize);
394 }
455 } else { 395 } else {
456 videoFrame = CoreMediaGlue::CMSampleBufferGetImageBuffer(sampleBuffer); 396 videoFrame = CoreMediaGlue::CMSampleBufferGetImageBuffer(sampleBuffer);
457 // Lock the frame and calculate frame size. 397 // Lock the frame and calculate frame size.
458 if (CVPixelBufferLockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly) == 398 if (CVPixelBufferLockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly) ==
459 kCVReturnSuccess) { 399 kCVReturnSuccess) {
460 baseAddress = static_cast<char*>(CVPixelBufferGetBaseAddress(videoFrame)); 400 baseAddress = static_cast<char*>(CVPixelBufferGetBaseAddress(videoFrame));
461 frameSize = CVPixelBufferGetHeight(videoFrame) * 401 frameSize = CVPixelBufferGetHeight(videoFrame) *
462 CVPixelBufferGetBytesPerRow(videoFrame); 402 CVPixelBufferGetBytesPerRow(videoFrame);
463 } else { 403 } else {
464 videoFrame = nil; 404 videoFrame = nil;
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
496 } 436 }
497 437
498 - (void)sendErrorString:(NSString*)error { 438 - (void)sendErrorString:(NSString*)error {
499 DLOG(ERROR) << [error UTF8String]; 439 DLOG(ERROR) << [error UTF8String];
500 base::AutoLock lock(lock_); 440 base::AutoLock lock(lock_);
501 if (frameReceiver_) 441 if (frameReceiver_)
502 frameReceiver_->ReceiveError(FROM_HERE, [error UTF8String]); 442 frameReceiver_->ReceiveError(FROM_HERE, [error UTF8String]);
503 } 443 }
504 444
505 @end 445 @end
OLDNEW
« no previous file with comments | « media/capture/video/mac/video_capture_device_avfoundation_mac.h ('k') | media/capture/video/mac/video_capture_device_mac.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698