OLD | NEW |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #import "media/capture/video/mac/video_capture_device_avfoundation_mac.h" | 5 #import "media/capture/video/mac/video_capture_device_avfoundation_mac.h" |
6 | 6 |
| 7 #import <AVFoundation/AVFoundation.h> |
7 #import <CoreMedia/CoreMedia.h> | 8 #import <CoreMedia/CoreMedia.h> |
8 #import <CoreVideo/CoreVideo.h> | 9 #import <CoreVideo/CoreVideo.h> |
9 #include <stddef.h> | 10 #include <stddef.h> |
10 #include <stdint.h> | 11 #include <stdint.h> |
11 | 12 |
12 #include "base/location.h" | 13 #include "base/location.h" |
13 #include "base/logging.h" | 14 #include "base/logging.h" |
14 #include "base/mac/foundation_util.h" | 15 #include "base/mac/foundation_util.h" |
15 #include "base/mac/mac_util.h" | 16 #include "base/mac/mac_util.h" |
16 #include "base/metrics/histogram_macros.h" | 17 #include "base/metrics/histogram_macros.h" |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
95 } | 96 } |
96 } | 97 } |
97 } | 98 } |
98 | 99 |
99 // This function translates Mac Core Video pixel formats to Chromium pixel | 100 // This function translates Mac Core Video pixel formats to Chromium pixel |
100 // formats. | 101 // formats. |
101 media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) { | 102 media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) { |
102 switch (code) { | 103 switch (code) { |
103 case kCVPixelFormatType_422YpCbCr8: | 104 case kCVPixelFormatType_422YpCbCr8: |
104 return media::PIXEL_FORMAT_UYVY; | 105 return media::PIXEL_FORMAT_UYVY; |
105 case CoreMediaGlue::kCMPixelFormat_422YpCbCr8_yuvs: | 106 case kCMPixelFormat_422YpCbCr8_yuvs: |
106 return media::PIXEL_FORMAT_YUY2; | 107 return media::PIXEL_FORMAT_YUY2; |
107 case CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML: | 108 case kCMVideoCodecType_JPEG_OpenDML: |
108 return media::PIXEL_FORMAT_MJPEG; | 109 return media::PIXEL_FORMAT_MJPEG; |
109 default: | 110 default: |
110 return media::PIXEL_FORMAT_UNKNOWN; | 111 return media::PIXEL_FORMAT_UNKNOWN; |
111 } | 112 } |
112 } | 113 } |
113 | 114 |
114 // Extracts |base_address| and |length| out of a SampleBuffer. | 115 // Extracts |base_address| and |length| out of a SampleBuffer. |
115 void ExtractBaseAddressAndLength( | 116 void ExtractBaseAddressAndLength(char** base_address, |
116 char** base_address, | 117 size_t* length, |
117 size_t* length, | 118 CMSampleBufferRef sample_buffer) { |
118 CoreMediaGlue::CMSampleBufferRef sample_buffer) { | 119 CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(sample_buffer); |
119 CoreMediaGlue::CMBlockBufferRef block_buffer = | |
120 CoreMediaGlue::CMSampleBufferGetDataBuffer(sample_buffer); | |
121 DCHECK(block_buffer); | 120 DCHECK(block_buffer); |
122 | 121 |
123 size_t length_at_offset; | 122 size_t length_at_offset; |
124 const OSStatus status = CoreMediaGlue::CMBlockBufferGetDataPointer( | 123 const OSStatus status = CMBlockBufferGetDataPointer( |
125 block_buffer, 0, &length_at_offset, length, base_address); | 124 block_buffer, 0, &length_at_offset, length, base_address); |
126 DCHECK_EQ(noErr, status); | 125 DCHECK_EQ(noErr, status); |
127 // Expect the (M)JPEG data to be available as a contiguous reference, i.e. | 126 // Expect the (M)JPEG data to be available as a contiguous reference, i.e. |
128 // not covered by multiple memory blocks. | 127 // not covered by multiple memory blocks. |
129 DCHECK_EQ(length_at_offset, *length); | 128 DCHECK_EQ(length_at_offset, *length); |
130 } | 129 } |
131 | 130 |
132 } // anonymous namespace | 131 } // anonymous namespace |
133 | 132 |
134 @implementation VideoCaptureDeviceAVFoundation | 133 @implementation VideoCaptureDeviceAVFoundation |
135 | 134 |
136 #pragma mark Class methods | 135 #pragma mark Class methods |
137 | 136 |
138 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames { | 137 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames { |
139 // At this stage we already know that AVFoundation is supported and the whole | 138 // At this stage we already know that AVFoundation is supported and the whole |
140 // library is loaded and initialised, by the device monitoring. | 139 // library is loaded and initialised, by the device monitoring. |
141 NSArray* devices = [AVCaptureDeviceGlue devices]; | 140 NSArray* devices = [AVCaptureDevice devices]; |
142 int number_of_suspended_devices = 0; | 141 int number_of_suspended_devices = 0; |
143 for (CrAVCaptureDevice* device in devices) { | 142 for (AVCaptureDevice* device in devices) { |
144 if ([device hasMediaType:AVFoundationGlue::AVMediaTypeVideo()] || | 143 if ([device hasMediaType:AVMediaTypeVideo] || |
145 [device hasMediaType:AVFoundationGlue::AVMediaTypeMuxed()]) { | 144 [device hasMediaType:AVMediaTypeMuxed]) { |
146 if ([device isSuspended]) { | 145 if ([device isSuspended]) { |
147 ++number_of_suspended_devices; | 146 ++number_of_suspended_devices; |
148 continue; | 147 continue; |
149 } | 148 } |
150 DeviceNameAndTransportType* nameAndTransportType = | 149 DeviceNameAndTransportType* nameAndTransportType = |
151 [[[DeviceNameAndTransportType alloc] | 150 [[[DeviceNameAndTransportType alloc] |
152 initWithName:[device localizedName] | 151 initWithName:[device localizedName] |
153 transportType:[device transportType]] autorelease]; | 152 transportType:[device transportType]] autorelease]; |
154 [deviceNames setObject:nameAndTransportType forKey:[device uniqueID]]; | 153 [deviceNames setObject:nameAndTransportType forKey:[device uniqueID]]; |
155 } | 154 } |
156 } | 155 } |
157 MaybeWriteUma([deviceNames count], number_of_suspended_devices); | 156 MaybeWriteUma([deviceNames count], number_of_suspended_devices); |
158 } | 157 } |
159 | 158 |
160 + (NSDictionary*)deviceNames { | 159 + (NSDictionary*)deviceNames { |
161 NSMutableDictionary* deviceNames = | 160 NSMutableDictionary* deviceNames = |
162 [[[NSMutableDictionary alloc] init] autorelease]; | 161 [[[NSMutableDictionary alloc] init] autorelease]; |
163 // The device name retrieval is not going to happen in the main thread, and | 162 // The device name retrieval is not going to happen in the main thread, and |
164 // this might cause instabilities (it did in QTKit), so keep an eye here. | 163 // this might cause instabilities (it did in QTKit), so keep an eye here. |
165 [self getDeviceNames:deviceNames]; | 164 [self getDeviceNames:deviceNames]; |
166 return deviceNames; | 165 return deviceNames; |
167 } | 166 } |
168 | 167 |
169 + (void)getDevice:(const media::VideoCaptureDeviceDescriptor&)descriptor | 168 + (void)getDevice:(const media::VideoCaptureDeviceDescriptor&)descriptor |
170 supportedFormats:(media::VideoCaptureFormats*)formats { | 169 supportedFormats:(media::VideoCaptureFormats*)formats { |
171 NSArray* devices = [AVCaptureDeviceGlue devices]; | 170 NSArray* devices = [AVCaptureDevice devices]; |
172 CrAVCaptureDevice* device = nil; | 171 AVCaptureDevice* device = nil; |
173 for (device in devices) { | 172 for (device in devices) { |
174 if ([[device uniqueID] UTF8String] == descriptor.device_id) | 173 if ([[device uniqueID] UTF8String] == descriptor.device_id) |
175 break; | 174 break; |
176 } | 175 } |
177 if (device == nil) | 176 if (device == nil) |
178 return; | 177 return; |
179 for (CrAVCaptureDeviceFormat* format in device.formats) { | 178 for (AVCaptureDeviceFormat* format in device.formats) { |
180 // MediaSubType is a CMPixelFormatType but can be used as CVPixelFormatType | 179 // MediaSubType is a CMPixelFormatType but can be used as CVPixelFormatType |
181 // as well according to CMFormatDescription.h | 180 // as well according to CMFormatDescription.h |
182 const media::VideoPixelFormat pixelFormat = FourCCToChromiumPixelFormat( | 181 const media::VideoPixelFormat pixelFormat = FourCCToChromiumPixelFormat( |
183 CoreMediaGlue::CMFormatDescriptionGetMediaSubType( | 182 CMFormatDescriptionGetMediaSubType([format formatDescription])); |
184 [format formatDescription])); | |
185 | 183 |
186 CoreMediaGlue::CMVideoDimensions dimensions = | 184 CMVideoDimensions dimensions = |
187 CoreMediaGlue::CMVideoFormatDescriptionGetDimensions( | 185 CMVideoFormatDescriptionGetDimensions([format formatDescription]); |
188 [format formatDescription]); | |
189 | 186 |
190 for (CrAVFrameRateRange* frameRate in | 187 for (AVFrameRateRange* frameRate in |
191 [format videoSupportedFrameRateRanges]) { | 188 [format videoSupportedFrameRateRanges]) { |
192 media::VideoCaptureFormat format( | 189 media::VideoCaptureFormat format( |
193 gfx::Size(dimensions.width, dimensions.height), | 190 gfx::Size(dimensions.width, dimensions.height), |
194 frameRate.maxFrameRate, pixelFormat); | 191 frameRate.maxFrameRate, pixelFormat); |
195 formats->push_back(format); | 192 formats->push_back(format); |
196 DVLOG(2) << descriptor.display_name << " " | 193 DVLOG(2) << descriptor.display_name << " " |
197 << media::VideoCaptureFormat::ToString(format); | 194 << media::VideoCaptureFormat::ToString(format); |
198 } | 195 } |
199 } | 196 } |
200 } | 197 } |
201 | 198 |
202 #pragma mark Public methods | 199 #pragma mark Public methods |
203 | 200 |
204 - (id)initWithFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver { | 201 - (id)initWithFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver { |
205 if ((self = [super init])) { | 202 if ((self = [super init])) { |
206 DCHECK(main_thread_checker_.CalledOnValidThread()); | 203 DCHECK(main_thread_checker_.CalledOnValidThread()); |
207 DCHECK(frameReceiver); | 204 DCHECK(frameReceiver); |
208 [self setFrameReceiver:frameReceiver]; | 205 [self setFrameReceiver:frameReceiver]; |
209 captureSession_.reset( | 206 captureSession_.reset([[AVCaptureSession alloc] init]); |
210 [[AVFoundationGlue::AVCaptureSessionClass() alloc] init]); | |
211 } | 207 } |
212 return self; | 208 return self; |
213 } | 209 } |
214 | 210 |
215 - (void)dealloc { | 211 - (void)dealloc { |
216 [self stopCapture]; | 212 [self stopCapture]; |
217 [super dealloc]; | 213 [super dealloc]; |
218 } | 214 } |
219 | 215 |
220 - (void)setFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver { | 216 - (void)setFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver { |
(...skipping 14 matching lines...) Expand all Loading... |
235 [captureSession_ removeOutput:stillImageOutput_]; | 231 [captureSession_ removeOutput:stillImageOutput_]; |
236 if (captureDeviceInput_) { | 232 if (captureDeviceInput_) { |
237 [captureSession_ removeInput:captureDeviceInput_]; | 233 [captureSession_ removeInput:captureDeviceInput_]; |
238 // No need to release |captureDeviceInput_|, is owned by the session. | 234 // No need to release |captureDeviceInput_|, is owned by the session. |
239 captureDeviceInput_ = nil; | 235 captureDeviceInput_ = nil; |
240 } | 236 } |
241 return YES; | 237 return YES; |
242 } | 238 } |
243 | 239 |
244 // Look for input device with requested name. | 240 // Look for input device with requested name. |
245 captureDevice_ = [AVCaptureDeviceGlue deviceWithUniqueID:deviceId]; | 241 captureDevice_ = [AVCaptureDevice deviceWithUniqueID:deviceId]; |
246 if (!captureDevice_) { | 242 if (!captureDevice_) { |
247 [self | 243 [self |
248 sendErrorString:[NSString stringWithUTF8String: | 244 sendErrorString:[NSString stringWithUTF8String: |
249 "Could not open video capture device."]]; | 245 "Could not open video capture device."]]; |
250 return NO; | 246 return NO; |
251 } | 247 } |
252 | 248 |
253 // Create the capture input associated with the device. Easy peasy. | 249 // Create the capture input associated with the device. Easy peasy. |
254 NSError* error = nil; | 250 NSError* error = nil; |
255 captureDeviceInput_ = | 251 captureDeviceInput_ = |
256 [AVCaptureDeviceInputGlue deviceInputWithDevice:captureDevice_ | 252 [AVCaptureDeviceInput deviceInputWithDevice:captureDevice_ error:&error]; |
257 error:&error]; | |
258 if (!captureDeviceInput_) { | 253 if (!captureDeviceInput_) { |
259 captureDevice_ = nil; | 254 captureDevice_ = nil; |
260 [self sendErrorString: | 255 [self sendErrorString: |
261 [NSString stringWithFormat: | 256 [NSString stringWithFormat: |
262 @"Could not create video capture input (%@): %@", | 257 @"Could not create video capture input (%@): %@", |
263 [error localizedDescription], | 258 [error localizedDescription], |
264 [error localizedFailureReason]]]; | 259 [error localizedFailureReason]]]; |
265 return NO; | 260 return NO; |
266 } | 261 } |
267 [captureSession_ addInput:captureDeviceInput_]; | 262 [captureSession_ addInput:captureDeviceInput_]; |
268 | 263 |
269 // Create a new data output for video. The data output is configured to | 264 // Create a new data output for video. The data output is configured to |
270 // discard late frames by default. | 265 // discard late frames by default. |
271 captureVideoDataOutput_.reset( | 266 captureVideoDataOutput_.reset([[AVCaptureVideoDataOutput alloc] init]); |
272 [[AVFoundationGlue::AVCaptureVideoDataOutputClass() alloc] init]); | |
273 if (!captureVideoDataOutput_) { | 267 if (!captureVideoDataOutput_) { |
274 [captureSession_ removeInput:captureDeviceInput_]; | 268 [captureSession_ removeInput:captureDeviceInput_]; |
275 [self sendErrorString:[NSString stringWithUTF8String: | 269 [self sendErrorString:[NSString stringWithUTF8String: |
276 "Could not create video data output."]]; | 270 "Could not create video data output."]]; |
277 return NO; | 271 return NO; |
278 } | 272 } |
279 [captureVideoDataOutput_ setAlwaysDiscardsLateVideoFrames:true]; | 273 [captureVideoDataOutput_ setAlwaysDiscardsLateVideoFrames:true]; |
280 [captureVideoDataOutput_ | 274 [captureVideoDataOutput_ |
281 setSampleBufferDelegate:self | 275 setSampleBufferDelegate:self |
282 queue:dispatch_get_global_queue( | 276 queue:dispatch_get_global_queue( |
283 DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)]; | 277 DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)]; |
284 [captureSession_ addOutput:captureVideoDataOutput_]; | 278 [captureSession_ addOutput:captureVideoDataOutput_]; |
285 | 279 |
286 // Create and plug the still image capture output. This should happen in | 280 // Create and plug the still image capture output. This should happen in |
287 // advance of the actual picture to allow for the 3A to stabilize. | 281 // advance of the actual picture to allow for the 3A to stabilize. |
288 stillImageOutput_.reset( | 282 stillImageOutput_.reset([[AVCaptureStillImageOutput alloc] init]); |
289 [[AVFoundationGlue::AVCaptureStillImageOutputClass() alloc] init]); | |
290 if (stillImageOutput_ && [captureSession_ canAddOutput:stillImageOutput_]) | 283 if (stillImageOutput_ && [captureSession_ canAddOutput:stillImageOutput_]) |
291 [captureSession_ addOutput:stillImageOutput_]; | 284 [captureSession_ addOutput:stillImageOutput_]; |
292 | 285 |
293 return YES; | 286 return YES; |
294 } | 287 } |
295 | 288 |
296 - (BOOL)setCaptureHeight:(int)height | 289 - (BOOL)setCaptureHeight:(int)height |
297 width:(int)width | 290 width:(int)width |
298 frameRate:(float)frameRate { | 291 frameRate:(float)frameRate { |
299 DCHECK(![captureSession_ isRunning] && | 292 DCHECK(![captureSession_ isRunning] && |
300 main_thread_checker_.CalledOnValidThread()); | 293 main_thread_checker_.CalledOnValidThread()); |
301 | 294 |
302 frameWidth_ = width; | 295 frameWidth_ = width; |
303 frameHeight_ = height; | 296 frameHeight_ = height; |
304 frameRate_ = frameRate; | 297 frameRate_ = frameRate; |
305 | 298 |
306 FourCharCode best_fourcc = kCVPixelFormatType_422YpCbCr8; | 299 FourCharCode best_fourcc = kCVPixelFormatType_422YpCbCr8; |
307 const bool prefer_mjpeg = | 300 const bool prefer_mjpeg = |
308 width > kMjpegWidthThreshold || height > kMjpegHeightThreshold; | 301 width > kMjpegWidthThreshold || height > kMjpegHeightThreshold; |
309 for (CrAVCaptureDeviceFormat* format in captureDevice_.formats) { | 302 for (AVCaptureDeviceFormat* format in captureDevice_.formats) { |
310 const FourCharCode fourcc = | 303 const FourCharCode fourcc = |
311 CoreMediaGlue::CMFormatDescriptionGetMediaSubType( | 304 CMFormatDescriptionGetMediaSubType([format formatDescription]); |
312 [format formatDescription]); | 305 if (prefer_mjpeg && fourcc == kCMVideoCodecType_JPEG_OpenDML) { |
313 if (prefer_mjpeg && | |
314 fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) { | |
315 best_fourcc = fourcc; | 306 best_fourcc = fourcc; |
316 break; | 307 break; |
317 } | 308 } |
318 | 309 |
319 // Compare according to Chromium preference. | 310 // Compare according to Chromium preference. |
320 if (media::VideoCaptureFormat::ComparePixelFormatPreference( | 311 if (media::VideoCaptureFormat::ComparePixelFormatPreference( |
321 FourCCToChromiumPixelFormat(fourcc), | 312 FourCCToChromiumPixelFormat(fourcc), |
322 FourCCToChromiumPixelFormat(best_fourcc))) { | 313 FourCCToChromiumPixelFormat(best_fourcc))) { |
323 best_fourcc = fourcc; | 314 best_fourcc = fourcc; |
324 } | 315 } |
325 } | 316 } |
326 | 317 |
327 if (best_fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) { | 318 if (best_fourcc == kCMVideoCodecType_JPEG_OpenDML) { |
328 [captureSession_ removeOutput:stillImageOutput_]; | 319 [captureSession_ removeOutput:stillImageOutput_]; |
329 stillImageOutput_.reset(); | 320 stillImageOutput_.reset(); |
330 } | 321 } |
331 | 322 |
332 // The capture output has to be configured, despite Mac documentation | 323 // The capture output has to be configured, despite Mac documentation |
333 // detailing that setting the sessionPreset would be enough. The reason for | 324 // detailing that setting the sessionPreset would be enough. The reason for |
334 // this mismatch is probably because most of the AVFoundation docs are written | 325 // this mismatch is probably because most of the AVFoundation docs are written |
335 // for iOS and not for MacOsX. AVVideoScalingModeKey() refers to letterboxing | 326 // for iOS and not for MacOsX. AVVideoScalingModeKey() refers to letterboxing |
336 // yes/no and preserve aspect ratio yes/no when scaling. Currently we set | 327 // yes/no and preserve aspect ratio yes/no when scaling. Currently we set |
337 // cropping and preservation. | 328 // cropping and preservation. |
338 NSDictionary* videoSettingsDictionary = @{ | 329 NSDictionary* videoSettingsDictionary = @{ |
339 (id) kCVPixelBufferWidthKey : @(width), (id) | 330 (id)kCVPixelBufferWidthKey : @(width), |
340 kCVPixelBufferHeightKey : @(height), (id) | 331 (id)kCVPixelBufferHeightKey : @(height), |
341 kCVPixelBufferPixelFormatTypeKey : @(best_fourcc), | 332 (id)kCVPixelBufferPixelFormatTypeKey : @(best_fourcc), |
342 AVFoundationGlue::AVVideoScalingModeKey() : | 333 AVVideoScalingModeKey : AVVideoScalingModeResizeAspectFill |
343 AVFoundationGlue::AVVideoScalingModeResizeAspectFill() | |
344 }; | 334 }; |
345 [captureVideoDataOutput_ setVideoSettings:videoSettingsDictionary]; | 335 [captureVideoDataOutput_ setVideoSettings:videoSettingsDictionary]; |
346 | 336 |
347 CrAVCaptureConnection* captureConnection = [captureVideoDataOutput_ | 337 AVCaptureConnection* captureConnection = |
348 connectionWithMediaType:AVFoundationGlue::AVMediaTypeVideo()]; | 338 [captureVideoDataOutput_ connectionWithMediaType:AVMediaTypeVideo]; |
349 // Check selector existence, related to bugs http://crbug.com/327532 and | 339 // Check selector existence, related to bugs http://crbug.com/327532 and |
350 // http://crbug.com/328096. | 340 // http://crbug.com/328096. |
351 // CMTimeMake accepts integer argumenst but |frameRate| is float, round it. | 341 // CMTimeMake accepts integer argumenst but |frameRate| is float, round it. |
352 if ([captureConnection | 342 if ([captureConnection |
353 respondsToSelector:@selector(isVideoMinFrameDurationSupported)] && | 343 respondsToSelector:@selector(isVideoMinFrameDurationSupported)] && |
354 [captureConnection isVideoMinFrameDurationSupported]) { | 344 [captureConnection isVideoMinFrameDurationSupported]) { |
355 [captureConnection | 345 [captureConnection |
356 setVideoMinFrameDuration:CoreMediaGlue::CMTimeMake( | 346 setVideoMinFrameDuration:CMTimeMake(media::kFrameRatePrecision, |
357 media::kFrameRatePrecision, | 347 (int)(frameRate * |
358 (int)(frameRate * | 348 media::kFrameRatePrecision))]; |
359 media::kFrameRatePrecision))]; | |
360 } | 349 } |
361 if ([captureConnection | 350 if ([captureConnection |
362 respondsToSelector:@selector(isVideoMaxFrameDurationSupported)] && | 351 respondsToSelector:@selector(isVideoMaxFrameDurationSupported)] && |
363 [captureConnection isVideoMaxFrameDurationSupported]) { | 352 [captureConnection isVideoMaxFrameDurationSupported]) { |
364 [captureConnection | 353 [captureConnection |
365 setVideoMaxFrameDuration:CoreMediaGlue::CMTimeMake( | 354 setVideoMaxFrameDuration:CMTimeMake(media::kFrameRatePrecision, |
366 media::kFrameRatePrecision, | 355 (int)(frameRate * |
367 (int)(frameRate * | 356 media::kFrameRatePrecision))]; |
368 media::kFrameRatePrecision))]; | |
369 } | 357 } |
370 return YES; | 358 return YES; |
371 } | 359 } |
372 | 360 |
373 - (BOOL)startCapture { | 361 - (BOOL)startCapture { |
374 DCHECK(main_thread_checker_.CalledOnValidThread()); | 362 DCHECK(main_thread_checker_.CalledOnValidThread()); |
375 if (!captureSession_) { | 363 if (!captureSession_) { |
376 DLOG(ERROR) << "Video capture session not initialized."; | 364 DLOG(ERROR) << "Video capture session not initialized."; |
377 return NO; | 365 return NO; |
378 } | 366 } |
379 // Connect the notifications. | 367 // Connect the notifications. |
380 NSNotificationCenter* nc = [NSNotificationCenter defaultCenter]; | 368 NSNotificationCenter* nc = [NSNotificationCenter defaultCenter]; |
381 [nc addObserver:self | 369 [nc addObserver:self |
382 selector:@selector(onVideoError:) | 370 selector:@selector(onVideoError:) |
383 name:AVFoundationGlue::AVCaptureSessionRuntimeErrorNotification() | 371 name:AVCaptureSessionRuntimeErrorNotification |
384 object:captureSession_]; | 372 object:captureSession_]; |
385 [captureSession_ startRunning]; | 373 [captureSession_ startRunning]; |
386 return YES; | 374 return YES; |
387 } | 375 } |
388 | 376 |
389 - (void)stopCapture { | 377 - (void)stopCapture { |
390 DCHECK(main_thread_checker_.CalledOnValidThread()); | 378 DCHECK(main_thread_checker_.CalledOnValidThread()); |
391 if ([captureSession_ isRunning]) | 379 if ([captureSession_ isRunning]) |
392 [captureSession_ stopRunning]; // Synchronous. | 380 [captureSession_ stopRunning]; // Synchronous. |
393 [[NSNotificationCenter defaultCenter] removeObserver:self]; | 381 [[NSNotificationCenter defaultCenter] removeObserver:self]; |
394 } | 382 } |
395 | 383 |
396 - (void)takePhoto { | 384 - (void)takePhoto { |
397 DCHECK(main_thread_checker_.CalledOnValidThread()); | 385 DCHECK(main_thread_checker_.CalledOnValidThread()); |
398 DCHECK([captureSession_ isRunning]); | 386 DCHECK([captureSession_ isRunning]); |
399 if (!stillImageOutput_) | 387 if (!stillImageOutput_) |
400 return; | 388 return; |
401 | 389 |
402 DCHECK_EQ(1u, [[stillImageOutput_ connections] count]); | 390 DCHECK_EQ(1u, [[stillImageOutput_ connections] count]); |
403 CrAVCaptureConnection* const connection = | 391 AVCaptureConnection* const connection = |
404 [[stillImageOutput_ connections] firstObject]; | 392 [[stillImageOutput_ connections] firstObject]; |
405 if (!connection) { | 393 if (!connection) { |
406 base::AutoLock lock(lock_); | 394 base::AutoLock lock(lock_); |
407 frameReceiver_->OnPhotoError(); | 395 frameReceiver_->OnPhotoError(); |
408 return; | 396 return; |
409 } | 397 } |
410 | 398 |
411 const auto handler = ^(CoreMediaGlue::CMSampleBufferRef sampleBuffer, | 399 const auto handler = ^(CMSampleBufferRef sampleBuffer, NSError* error) { |
412 NSError* error) { | |
413 base::AutoLock lock(lock_); | 400 base::AutoLock lock(lock_); |
414 if (!frameReceiver_) | 401 if (!frameReceiver_) |
415 return; | 402 return; |
416 if (error != nil) { | 403 if (error != nil) { |
417 frameReceiver_->OnPhotoError(); | 404 frameReceiver_->OnPhotoError(); |
418 return; | 405 return; |
419 } | 406 } |
420 | 407 |
421 // Recommended compressed pixel format is JPEG, we don't expect surprises. | 408 // Recommended compressed pixel format is JPEG, we don't expect surprises. |
422 // TODO(mcasas): Consider using [1] for merging EXIF output information: | 409 // TODO(mcasas): Consider using [1] for merging EXIF output information: |
423 // [1] +(NSData*)jpegStillImageNSDataRepresentation:jpegSampleBuffer; | 410 // [1] +(NSData*)jpegStillImageNSDataRepresentation:jpegSampleBuffer; |
424 DCHECK_EQ( | 411 DCHECK_EQ(kCMVideoCodecType_JPEG, |
425 CoreMediaGlue::kCMVideoCodecType_JPEG, | 412 CMFormatDescriptionGetMediaSubType( |
426 CoreMediaGlue::CMFormatDescriptionGetMediaSubType( | 413 CMSampleBufferGetFormatDescription(sampleBuffer))); |
427 CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer))); | |
428 | 414 |
429 char* baseAddress = 0; | 415 char* baseAddress = 0; |
430 size_t length = 0; | 416 size_t length = 0; |
431 ExtractBaseAddressAndLength(&baseAddress, &length, sampleBuffer); | 417 ExtractBaseAddressAndLength(&baseAddress, &length, sampleBuffer); |
432 frameReceiver_->OnPhotoTaken(reinterpret_cast<uint8_t*>(baseAddress), | 418 frameReceiver_->OnPhotoTaken(reinterpret_cast<uint8_t*>(baseAddress), |
433 length, "image/jpeg"); | 419 length, "image/jpeg"); |
434 }; | 420 }; |
435 | 421 |
436 [stillImageOutput_ captureStillImageAsynchronouslyFromConnection:connection | 422 [stillImageOutput_ captureStillImageAsynchronouslyFromConnection:connection |
437 completionHandler:handler]; | 423 completionHandler:handler]; |
438 } | 424 } |
439 | 425 |
440 #pragma mark Private methods | 426 #pragma mark Private methods |
441 | 427 |
442 // |captureOutput| is called by the capture device to deliver a new frame. | 428 // |captureOutput| is called by the capture device to deliver a new frame. |
443 // AVFoundation calls from a number of threads, depending on, at least, if | 429 // AVFoundation calls from a number of threads, depending on, at least, if |
444 // Chrome is on foreground or background. | 430 // Chrome is on foreground or background. |
445 - (void)captureOutput:(CrAVCaptureOutput*)captureOutput | 431 - (void)captureOutput:(AVCaptureOutput*)captureOutput |
446 didOutputSampleBuffer:(CoreMediaGlue::CMSampleBufferRef)sampleBuffer | 432 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
447 fromConnection:(CrAVCaptureConnection*)connection { | 433 fromConnection:(AVCaptureConnection*)connection { |
448 const CoreMediaGlue::CMFormatDescriptionRef formatDescription = | 434 const CMFormatDescriptionRef formatDescription = |
449 CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer); | 435 CMSampleBufferGetFormatDescription(sampleBuffer); |
450 const FourCharCode fourcc = | 436 const FourCharCode fourcc = |
451 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(formatDescription); | 437 CMFormatDescriptionGetMediaSubType(formatDescription); |
452 const CoreMediaGlue::CMVideoDimensions dimensions = | 438 const CMVideoDimensions dimensions = |
453 CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(formatDescription); | 439 CMVideoFormatDescriptionGetDimensions(formatDescription); |
454 const media::VideoCaptureFormat captureFormat( | 440 const media::VideoCaptureFormat captureFormat( |
455 gfx::Size(dimensions.width, dimensions.height), frameRate_, | 441 gfx::Size(dimensions.width, dimensions.height), frameRate_, |
456 FourCCToChromiumPixelFormat(fourcc)); | 442 FourCCToChromiumPixelFormat(fourcc)); |
457 | 443 |
458 char* baseAddress = 0; | 444 char* baseAddress = 0; |
459 size_t frameSize = 0; | 445 size_t frameSize = 0; |
460 CVImageBufferRef videoFrame = nil; | 446 CVImageBufferRef videoFrame = nil; |
461 if (fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) { | 447 if (fourcc == kCMVideoCodecType_JPEG_OpenDML) { |
462 ExtractBaseAddressAndLength(&baseAddress, &frameSize, sampleBuffer); | 448 ExtractBaseAddressAndLength(&baseAddress, &frameSize, sampleBuffer); |
463 } else { | 449 } else { |
464 videoFrame = CoreMediaGlue::CMSampleBufferGetImageBuffer(sampleBuffer); | 450 videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer); |
465 // Lock the frame and calculate frame size. | 451 // Lock the frame and calculate frame size. |
466 if (CVPixelBufferLockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly) == | 452 if (CVPixelBufferLockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly) == |
467 kCVReturnSuccess) { | 453 kCVReturnSuccess) { |
468 baseAddress = static_cast<char*>(CVPixelBufferGetBaseAddress(videoFrame)); | 454 baseAddress = static_cast<char*>(CVPixelBufferGetBaseAddress(videoFrame)); |
469 frameSize = CVPixelBufferGetHeight(videoFrame) * | 455 frameSize = CVPixelBufferGetHeight(videoFrame) * |
470 CVPixelBufferGetBytesPerRow(videoFrame); | 456 CVPixelBufferGetBytesPerRow(videoFrame); |
471 } else { | 457 } else { |
472 videoFrame = nil; | 458 videoFrame = nil; |
473 } | 459 } |
474 } | 460 } |
475 | 461 |
476 { | 462 { |
477 base::AutoLock lock(lock_); | 463 base::AutoLock lock(lock_); |
478 const CoreMediaGlue::CMTime cm_timestamp = | 464 const CMTime cm_timestamp = |
479 CoreMediaGlue::CMSampleBufferGetPresentationTimeStamp(sampleBuffer); | 465 CMSampleBufferGetPresentationTimeStamp(sampleBuffer); |
480 const base::TimeDelta timestamp = | 466 const base::TimeDelta timestamp = |
481 CMTIME_IS_VALID(cm_timestamp) | 467 CMTIME_IS_VALID(cm_timestamp) |
482 ? base::TimeDelta::FromMicroseconds( | 468 ? base::TimeDelta::FromMicroseconds( |
483 cm_timestamp.value * base::TimeTicks::kMicrosecondsPerSecond / | 469 cm_timestamp.value * base::TimeTicks::kMicrosecondsPerSecond / |
484 cm_timestamp.timescale) | 470 cm_timestamp.timescale) |
485 : media::kNoTimestamp; | 471 : media::kNoTimestamp; |
486 | 472 |
487 if (frameReceiver_ && baseAddress) { | 473 if (frameReceiver_ && baseAddress) { |
488 frameReceiver_->ReceiveFrame(reinterpret_cast<uint8_t*>(baseAddress), | 474 frameReceiver_->ReceiveFrame(reinterpret_cast<uint8_t*>(baseAddress), |
489 frameSize, captureFormat, 0, 0, timestamp); | 475 frameSize, captureFormat, 0, 0, timestamp); |
490 } | 476 } |
491 } | 477 } |
492 | 478 |
493 if (videoFrame) | 479 if (videoFrame) |
494 CVPixelBufferUnlockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly); | 480 CVPixelBufferUnlockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly); |
495 } | 481 } |
496 | 482 |
497 - (void)onVideoError:(NSNotification*)errorNotification { | 483 - (void)onVideoError:(NSNotification*)errorNotification { |
498 NSError* error = base::mac::ObjCCast<NSError>([[errorNotification userInfo] | 484 NSError* error = base::mac::ObjCCast<NSError>( |
499 objectForKey:AVFoundationGlue::AVCaptureSessionErrorKey()]); | 485 [[errorNotification userInfo] objectForKey:AVCaptureSessionErrorKey]); |
500 [self sendErrorString:[NSString | 486 [self sendErrorString:[NSString |
501 stringWithFormat:@"%@: %@", | 487 stringWithFormat:@"%@: %@", |
502 [error localizedDescription], | 488 [error localizedDescription], |
503 [error localizedFailureReason]]]; | 489 [error localizedFailureReason]]]; |
504 } | 490 } |
505 | 491 |
506 - (void)sendErrorString:(NSString*)error { | 492 - (void)sendErrorString:(NSString*)error { |
507 DLOG(ERROR) << [error UTF8String]; | 493 DLOG(ERROR) << [error UTF8String]; |
508 base::AutoLock lock(lock_); | 494 base::AutoLock lock(lock_); |
509 if (frameReceiver_) | 495 if (frameReceiver_) |
510 frameReceiver_->ReceiveError(FROM_HERE, [error UTF8String]); | 496 frameReceiver_->ReceiveError(FROM_HERE, [error UTF8String]); |
511 } | 497 } |
512 | 498 |
513 @end | 499 @end |
OLD | NEW |