| OLD | NEW |
| (Empty) |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #import "media/capture/video/mac/video_capture_device_qtkit_mac.h" | |
| 6 | |
| 7 #import <QTKit/QTKit.h> | |
| 8 #include <stddef.h> | |
| 9 | |
| 10 #include "base/debug/crash_logging.h" | |
| 11 #include "base/location.h" | |
| 12 #include "base/logging.h" | |
| 13 #include "media/base/timestamp_constants.h" | |
| 14 #include "media/base/video_capture_types.h" | |
| 15 #include "media/capture/video/mac/video_capture_device_mac.h" | |
| 16 #include "media/capture/video/video_capture_device.h" | |
| 17 #include "ui/gfx/geometry/size.h" | |
| 18 | |
| 19 @implementation VideoCaptureDeviceQTKit | |
| 20 | |
| 21 #pragma mark Class methods | |
| 22 | |
| 23 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames { | |
| 24 // Third-party drivers often throw exceptions. The following catches any | |
| 25 // exceptions and continues in an orderly fashion with no devices detected. | |
| 26 NSArray* captureDevices = nil; | |
| 27 @try { | |
| 28 captureDevices = | |
| 29 [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo]; | |
| 30 } @catch (id exception) { | |
| 31 } | |
| 32 | |
| 33 for (QTCaptureDevice* device in captureDevices) { | |
| 34 if ([[device attributeForKey:QTCaptureDeviceSuspendedAttribute] boolValue]) | |
| 35 continue; | |
| 36 DeviceNameAndTransportType* nameAndTransportType = [[ | |
| 37 [DeviceNameAndTransportType alloc] | |
| 38 initWithName:[device localizedDisplayName] | |
| 39 transportType:media::kIOAudioDeviceTransportTypeUnknown] autorelease]; | |
| 40 [deviceNames setObject:nameAndTransportType forKey:[device uniqueID]]; | |
| 41 } | |
| 42 } | |
| 43 | |
| 44 + (NSDictionary*)deviceNames { | |
| 45 NSMutableDictionary* deviceNames = | |
| 46 [[[NSMutableDictionary alloc] init] autorelease]; | |
| 47 | |
| 48 // TODO(shess): Post to the main thread to see if that helps | |
| 49 // http://crbug.com/139164 | |
| 50 [self performSelectorOnMainThread:@selector(getDeviceNames:) | |
| 51 withObject:deviceNames | |
| 52 waitUntilDone:YES]; | |
| 53 return deviceNames; | |
| 54 } | |
| 55 | |
| 56 #pragma mark Public methods | |
| 57 | |
| 58 - (id)initWithFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver { | |
| 59 self = [super init]; | |
| 60 if (self) { | |
| 61 frameReceiver_ = frameReceiver; | |
| 62 lock_ = [[NSLock alloc] init]; | |
| 63 } | |
| 64 return self; | |
| 65 } | |
| 66 | |
| 67 - (void)dealloc { | |
| 68 [captureSession_ release]; | |
| 69 [captureDeviceInput_ release]; | |
| 70 [super dealloc]; | |
| 71 } | |
| 72 | |
| 73 - (void)setFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver { | |
| 74 [lock_ lock]; | |
| 75 frameReceiver_ = frameReceiver; | |
| 76 [lock_ unlock]; | |
| 77 } | |
| 78 | |
| 79 - (BOOL)setCaptureDevice:(NSString*)deviceId { | |
| 80 if (deviceId) { | |
| 81 // Set the capture device. | |
| 82 if (captureDeviceInput_) { | |
| 83 DLOG(ERROR) << "Video capture device already set."; | |
| 84 return NO; | |
| 85 } | |
| 86 | |
| 87 // TODO(mcasas): Consider using [QTCaptureDevice deviceWithUniqueID] instead | |
| 88 // of explicitly forcing reenumeration of devices. | |
| 89 NSArray* captureDevices = | |
| 90 [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo]; | |
| 91 NSArray* captureDevicesNames = [captureDevices valueForKey:@"uniqueID"]; | |
| 92 NSUInteger index = [captureDevicesNames indexOfObject:deviceId]; | |
| 93 if (index == NSNotFound) { | |
| 94 [self sendErrorString:[NSString stringWithUTF8String: | |
| 95 "Video capture device not found."]]; | |
| 96 return NO; | |
| 97 } | |
| 98 QTCaptureDevice* device = [captureDevices objectAtIndex:index]; | |
| 99 if ([[device | |
| 100 attributeForKey:QTCaptureDeviceSuspendedAttribute] boolValue]) { | |
| 101 [self sendErrorString: | |
| 102 [NSString stringWithUTF8String: | |
| 103 "Cannot open suspended video capture device."]]; | |
| 104 return NO; | |
| 105 } | |
| 106 NSError* error; | |
| 107 if (![device open:&error]) { | |
| 108 [self sendErrorString: | |
| 109 [NSString stringWithFormat: | |
| 110 @"Could not open video capture device (%@): %@", | |
| 111 [error localizedDescription], | |
| 112 [error localizedFailureReason]]]; | |
| 113 return NO; | |
| 114 } | |
| 115 captureDeviceInput_ = [[QTCaptureDeviceInput alloc] initWithDevice:device]; | |
| 116 captureSession_ = [[QTCaptureSession alloc] init]; | |
| 117 | |
| 118 QTCaptureDecompressedVideoOutput* captureDecompressedOutput = | |
| 119 [[[QTCaptureDecompressedVideoOutput alloc] init] autorelease]; | |
| 120 [captureDecompressedOutput setDelegate:self]; | |
| 121 [captureDecompressedOutput setAutomaticallyDropsLateVideoFrames:YES]; | |
| 122 if (![captureSession_ addOutput:captureDecompressedOutput error:&error]) { | |
| 123 [self | |
| 124 sendErrorString: | |
| 125 [NSString stringWithFormat: | |
| 126 @"Could not connect video capture output (%@): %@", | |
| 127 [error localizedDescription], | |
| 128 [error localizedFailureReason]]]; | |
| 129 return NO; | |
| 130 } | |
| 131 | |
| 132 // This key can be used to check if video capture code was related to a | |
| 133 // particular crash. | |
| 134 base::debug::SetCrashKeyValue("VideoCaptureDeviceQTKit", "OpenedDevice"); | |
| 135 | |
| 136 // Set the video pixel format to 2VUY (a.k.a UYVY, packed 4:2:2). | |
| 137 NSDictionary* captureDictionary = [NSDictionary | |
| 138 dictionaryWithObject: | |
| 139 [NSNumber numberWithUnsignedInt:kCVPixelFormatType_422YpCbCr8] | |
| 140 forKey:(id)kCVPixelBufferPixelFormatTypeKey]; | |
| 141 [captureDecompressedOutput setPixelBufferAttributes:captureDictionary]; | |
| 142 | |
| 143 return YES; | |
| 144 } else { | |
| 145 // Remove the previously set capture device. | |
| 146 if (!captureDeviceInput_) { | |
| 147 // Being here means stopping a device that never started OK in the first | |
| 148 // place, log it. | |
| 149 [self sendLogString:[NSString | |
| 150 stringWithUTF8String: | |
| 151 "No video capture device set, on removal."]]; | |
| 152 return YES; | |
| 153 } | |
| 154 // Tear down input and output, stop the capture and deregister observers. | |
| 155 [self stopCapture]; | |
| 156 [captureSession_ release]; | |
| 157 captureSession_ = nil; | |
| 158 [captureDeviceInput_ release]; | |
| 159 captureDeviceInput_ = nil; | |
| 160 return YES; | |
| 161 } | |
| 162 } | |
| 163 | |
| 164 - (BOOL)setCaptureHeight:(int)height | |
| 165 width:(int)width | |
| 166 frameRate:(float)frameRate { | |
| 167 if (!captureDeviceInput_) { | |
| 168 [self sendErrorString: | |
| 169 [NSString stringWithUTF8String:"No video capture device set."]]; | |
| 170 return NO; | |
| 171 } | |
| 172 if ([[captureSession_ outputs] count] != 1) { | |
| 173 [self sendErrorString:[NSString | |
| 174 stringWithUTF8String: | |
| 175 "Video capture capabilities already set."]]; | |
| 176 return NO; | |
| 177 } | |
| 178 if (frameRate <= 0.0f) { | |
| 179 [self sendErrorString:[NSString stringWithUTF8String:"Wrong frame rate."]]; | |
| 180 return NO; | |
| 181 } | |
| 182 | |
| 183 frameRate_ = frameRate; | |
| 184 | |
| 185 QTCaptureDecompressedVideoOutput* output = | |
| 186 [[captureSession_ outputs] objectAtIndex:0]; | |
| 187 | |
| 188 // Set up desired output properties. The old capture dictionary is used to | |
| 189 // retrieve the initial pixel format, which must be maintained. | |
| 190 NSDictionary* videoSettingsDictionary = @{ | |
| 191 (id)kCVPixelBufferWidthKey : @(width), (id) | |
| 192 kCVPixelBufferHeightKey : @(height), (id) | |
| 193 kCVPixelBufferPixelFormatTypeKey : [[output pixelBufferAttributes] | |
| 194 valueForKey:(id)kCVPixelBufferPixelFormatTypeKey] | |
| 195 }; | |
| 196 [output setPixelBufferAttributes:videoSettingsDictionary]; | |
| 197 | |
| 198 [output setMinimumVideoFrameInterval:(NSTimeInterval)1 / frameRate]; | |
| 199 return YES; | |
| 200 } | |
| 201 | |
| 202 - (BOOL)startCapture { | |
| 203 if ([[captureSession_ outputs] count] == 0) { | |
| 204 // Capture properties not set. | |
| 205 [self | |
| 206 sendErrorString:[NSString stringWithUTF8String: | |
| 207 "Video capture device not initialized."]]; | |
| 208 return NO; | |
| 209 } | |
| 210 if ([[captureSession_ inputs] count] == 0) { | |
| 211 NSError* error; | |
| 212 if (![captureSession_ addInput:captureDeviceInput_ error:&error]) { | |
| 213 [self | |
| 214 sendErrorString: | |
| 215 [NSString stringWithFormat: | |
| 216 @"Could not connect video capture device (%@): %@", | |
| 217 [error localizedDescription], | |
| 218 [error localizedFailureReason]]]; | |
| 219 | |
| 220 return NO; | |
| 221 } | |
| 222 NSNotificationCenter* notificationCenter = | |
| 223 [NSNotificationCenter defaultCenter]; | |
| 224 [notificationCenter addObserver:self | |
| 225 selector:@selector(handleNotification:) | |
| 226 name:QTCaptureSessionRuntimeErrorNotification | |
| 227 object:captureSession_]; | |
| 228 [captureSession_ startRunning]; | |
| 229 } | |
| 230 return YES; | |
| 231 } | |
| 232 | |
| 233 - (void)stopCapture { | |
| 234 // QTKit achieves thread safety and asynchronous execution by posting messages | |
| 235 // to the main thread, e.g. -addOutput:. Both -removeOutput: and -removeInput: | |
| 236 // post a message to the main thread while holding a lock that the | |
| 237 // notification handler might need. To avoid a deadlock, we perform those | |
| 238 // tasks in the main thread. See bugs http://crbug.com/152757 and | |
| 239 // http://crbug.com/399792. | |
| 240 [self performSelectorOnMainThread:@selector(stopCaptureOnUIThread:) | |
| 241 withObject:nil | |
| 242 waitUntilDone:YES]; | |
| 243 [[NSNotificationCenter defaultCenter] removeObserver:self]; | |
| 244 } | |
| 245 | |
| 246 - (void)stopCaptureOnUIThread:(id)dummy { | |
| 247 if ([[captureSession_ inputs] count] > 0) { | |
| 248 DCHECK_EQ([[captureSession_ inputs] count], 1u); | |
| 249 [captureSession_ removeInput:captureDeviceInput_]; | |
| 250 [captureSession_ stopRunning]; | |
| 251 } | |
| 252 if ([[captureSession_ outputs] count] > 0) { | |
| 253 DCHECK_EQ([[captureSession_ outputs] count], 1u); | |
| 254 id output = [[captureSession_ outputs] objectAtIndex:0]; | |
| 255 [output setDelegate:nil]; | |
| 256 [captureSession_ removeOutput:output]; | |
| 257 } | |
| 258 } | |
| 259 | |
| 260 // |captureOutput| is called by the capture device to deliver a new frame. | |
| 261 - (void)captureOutput:(QTCaptureOutput*)captureOutput | |
| 262 didOutputVideoFrame:(CVImageBufferRef)videoFrame | |
| 263 withSampleBuffer:(QTSampleBuffer*)sampleBuffer | |
| 264 fromConnection:(QTCaptureConnection*)connection { | |
| 265 [lock_ lock]; | |
| 266 if (!frameReceiver_) { | |
| 267 [lock_ unlock]; | |
| 268 return; | |
| 269 } | |
| 270 | |
| 271 // Lock the frame and calculate frame size. | |
| 272 const int kLockFlags = 0; | |
| 273 if (CVPixelBufferLockBaseAddress(videoFrame, kLockFlags) == | |
| 274 kCVReturnSuccess) { | |
| 275 void* baseAddress = CVPixelBufferGetBaseAddress(videoFrame); | |
| 276 size_t bytesPerRow = CVPixelBufferGetBytesPerRow(videoFrame); | |
| 277 size_t frameWidth = CVPixelBufferGetWidth(videoFrame); | |
| 278 size_t frameHeight = CVPixelBufferGetHeight(videoFrame); | |
| 279 size_t frameSize = bytesPerRow * frameHeight; | |
| 280 | |
| 281 // TODO(shess): bytesPerRow may not correspond to frameWidth_*2, | |
| 282 // but VideoCaptureController::OnIncomingCapturedData() requires | |
| 283 // it to do so. Plumbing things through is intrusive, for now | |
| 284 // just deliver an adjusted buffer. | |
| 285 // TODO(nick): This workaround could probably be eliminated by using | |
| 286 // VideoCaptureController::OnIncomingCapturedVideoFrame, which supports | |
| 287 // pitches. | |
| 288 UInt8* addressToPass = static_cast<UInt8*>(baseAddress); | |
| 289 // UYVY is 2 bytes per pixel. | |
| 290 size_t expectedBytesPerRow = frameWidth * 2; | |
| 291 if (bytesPerRow > expectedBytesPerRow) { | |
| 292 // TODO(shess): frameHeight and frameHeight_ are not the same, | |
| 293 // try to do what the surrounding code seems to assume. | |
| 294 // Ironically, captureCapability and frameSize are ignored | |
| 295 // anyhow. | |
| 296 adjustedFrame_.resize(expectedBytesPerRow * frameHeight); | |
| 297 // std::vector is contiguous according to standard. | |
| 298 UInt8* adjustedAddress = &adjustedFrame_[0]; | |
| 299 | |
| 300 for (size_t y = 0; y < frameHeight; ++y) { | |
| 301 memcpy(adjustedAddress + y * expectedBytesPerRow, | |
| 302 addressToPass + y * bytesPerRow, expectedBytesPerRow); | |
| 303 } | |
| 304 | |
| 305 addressToPass = adjustedAddress; | |
| 306 frameSize = frameHeight * expectedBytesPerRow; | |
| 307 } | |
| 308 | |
| 309 media::VideoCaptureFormat captureFormat( | |
| 310 gfx::Size(frameWidth, frameHeight), frameRate_, | |
| 311 media::PIXEL_FORMAT_UYVY); | |
| 312 | |
| 313 // The aspect ratio dictionary is often missing, in which case we report | |
| 314 // a pixel aspect ratio of 0:0. | |
| 315 int aspectNumerator = 0, aspectDenominator = 0; | |
| 316 CFDictionaryRef aspectRatioDict = (CFDictionaryRef)CVBufferGetAttachment( | |
| 317 videoFrame, kCVImageBufferPixelAspectRatioKey, NULL); | |
| 318 if (aspectRatioDict) { | |
| 319 CFNumberRef aspectNumeratorRef = (CFNumberRef)CFDictionaryGetValue( | |
| 320 aspectRatioDict, kCVImageBufferPixelAspectRatioHorizontalSpacingKey); | |
| 321 CFNumberRef aspectDenominatorRef = (CFNumberRef)CFDictionaryGetValue( | |
| 322 aspectRatioDict, kCVImageBufferPixelAspectRatioVerticalSpacingKey); | |
| 323 DCHECK(aspectNumeratorRef && aspectDenominatorRef) | |
| 324 << "Aspect Ratio dictionary missing its entries."; | |
| 325 CFNumberGetValue(aspectNumeratorRef, kCFNumberIntType, &aspectNumerator); | |
| 326 CFNumberGetValue(aspectDenominatorRef, kCFNumberIntType, | |
| 327 &aspectDenominator); | |
| 328 } | |
| 329 | |
| 330 // Deliver the captured video frame. | |
| 331 const QTTime qt_timestamp = [sampleBuffer presentationTime]; | |
| 332 base::TimeDelta timestamp; | |
| 333 if (!(qt_timestamp.flags & kQTTimeIsIndefinite) && qt_timestamp.timeScale) { | |
| 334 timestamp = base::TimeDelta::FromMicroseconds( | |
| 335 qt_timestamp.timeValue * base::TimeTicks::kMicrosecondsPerSecond / | |
| 336 qt_timestamp.timeScale); | |
| 337 } else { | |
| 338 timestamp = media::kNoTimestamp(); | |
| 339 } | |
| 340 frameReceiver_->ReceiveFrame(addressToPass, frameSize, captureFormat, | |
| 341 aspectNumerator, aspectDenominator, timestamp); | |
| 342 | |
| 343 CVPixelBufferUnlockBaseAddress(videoFrame, kLockFlags); | |
| 344 } | |
| 345 [lock_ unlock]; | |
| 346 } | |
| 347 | |
| 348 - (void)handleNotification:(NSNotification*)errorNotification { | |
| 349 NSError* error = (NSError*) | |
| 350 [[errorNotification userInfo] objectForKey:QTCaptureSessionErrorKey]; | |
| 351 [self sendErrorString: | |
| 352 [NSString stringWithFormat:@"%@: %@", [error localizedDescription], | |
| 353 [error localizedFailureReason]]]; | |
| 354 } | |
| 355 | |
| 356 - (void)sendErrorString:(NSString*)error { | |
| 357 DLOG(ERROR) << [error UTF8String]; | |
| 358 [lock_ lock]; | |
| 359 if (frameReceiver_) | |
| 360 frameReceiver_->ReceiveError(FROM_HERE, [error UTF8String]); | |
| 361 [lock_ unlock]; | |
| 362 } | |
| 363 | |
| 364 - (void)sendLogString:(NSString*)message { | |
| 365 DVLOG(1) << [message UTF8String]; | |
| 366 [lock_ lock]; | |
| 367 if (frameReceiver_) | |
| 368 frameReceiver_->LogMessage([message UTF8String]); | |
| 369 [lock_ unlock]; | |
| 370 } | |
| 371 | |
| 372 @end | |
| OLD | NEW |