OLD | NEW |
| (Empty) |
1 /* | |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | |
3 * | |
4 * Use of this source code is governed by a BSD-style license | |
5 * that can be found in the LICENSE file in the root of the source | |
6 * tree. An additional intellectual property rights grant can be found | |
7 * in the file PATENTS. All contributing project authors may | |
8 * be found in the AUTHORS file in the root of the source tree. | |
9 */ | |
10 | |
11 #include "avfoundationvideocapturer.h" | |
12 | |
13 #import <AVFoundation/AVFoundation.h> | |
14 | |
15 #import "RTCAVFoundationVideoCapturerInternal.h" | |
16 #import "RTCDispatcher+Private.h" | |
17 #import "WebRTC/RTCLogging.h" | |
18 #import "WebRTC/RTCVideoFrameBuffer.h" | |
19 | |
20 #include "avfoundationformatmapper.h" | |
21 | |
22 #include "webrtc/api/video/video_rotation.h" | |
23 #include "webrtc/rtc_base/bind.h" | |
24 #include "webrtc/rtc_base/checks.h" | |
25 #include "webrtc/rtc_base/logging.h" | |
26 #include "webrtc/rtc_base/thread.h" | |
27 #include "webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h" | |
28 | |
29 namespace webrtc { | |
30 | |
31 enum AVFoundationVideoCapturerMessageType : uint32_t { | |
32 kMessageTypeFrame, | |
33 }; | |
34 | |
35 AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) { | |
36 _capturer = | |
37 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this]; | |
38 | |
39 std::set<cricket::VideoFormat> front_camera_video_formats = | |
40 GetSupportedVideoFormatsForDevice([_capturer frontCaptureDevice]); | |
41 std::set<cricket::VideoFormat> back_camera_video_formats = | |
42 GetSupportedVideoFormatsForDevice([_capturer backCaptureDevice]); | |
43 std::vector<cricket::VideoFormat> intersection_video_formats; | |
44 if (back_camera_video_formats.empty()) { | |
45 intersection_video_formats.assign(front_camera_video_formats.begin(), | |
46 front_camera_video_formats.end()); | |
47 | |
48 } else if (front_camera_video_formats.empty()) { | |
49 intersection_video_formats.assign(back_camera_video_formats.begin(), | |
50 back_camera_video_formats.end()); | |
51 } else { | |
52 std::set_intersection( | |
53 front_camera_video_formats.begin(), front_camera_video_formats.end(), | |
54 back_camera_video_formats.begin(), back_camera_video_formats.end(), | |
55 std::back_inserter(intersection_video_formats)); | |
56 } | |
57 SetSupportedFormats(intersection_video_formats); | |
58 } | |
59 | |
60 AVFoundationVideoCapturer::~AVFoundationVideoCapturer() { | |
61 _capturer = nil; | |
62 } | |
63 | |
64 cricket::CaptureState AVFoundationVideoCapturer::Start( | |
65 const cricket::VideoFormat& format) { | |
66 if (!_capturer) { | |
67 LOG(LS_ERROR) << "Failed to create AVFoundation capturer."; | |
68 return cricket::CaptureState::CS_FAILED; | |
69 } | |
70 if (_capturer.isRunning) { | |
71 LOG(LS_ERROR) << "The capturer is already running."; | |
72 return cricket::CaptureState::CS_FAILED; | |
73 } | |
74 | |
75 AVCaptureDevice* device = [_capturer getActiveCaptureDevice]; | |
76 AVCaptureSession* session = _capturer.captureSession; | |
77 | |
78 if (!SetFormatForCaptureDevice(device, session, format)) { | |
79 return cricket::CaptureState::CS_FAILED; | |
80 } | |
81 | |
82 SetCaptureFormat(&format); | |
83 // This isn't super accurate because it takes a while for the AVCaptureSession | |
84 // to spin up, and this call returns async. | |
85 // TODO(tkchin): make this better. | |
86 [_capturer start]; | |
87 SetCaptureState(cricket::CaptureState::CS_RUNNING); | |
88 | |
89 return cricket::CaptureState::CS_STARTING; | |
90 } | |
91 | |
92 void AVFoundationVideoCapturer::Stop() { | |
93 [_capturer stop]; | |
94 SetCaptureFormat(NULL); | |
95 } | |
96 | |
97 bool AVFoundationVideoCapturer::IsRunning() { | |
98 return _capturer.isRunning; | |
99 } | |
100 | |
101 AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() { | |
102 return _capturer.captureSession; | |
103 } | |
104 | |
105 bool AVFoundationVideoCapturer::CanUseBackCamera() const { | |
106 return _capturer.canUseBackCamera; | |
107 } | |
108 | |
109 void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) { | |
110 _capturer.useBackCamera = useBackCamera; | |
111 } | |
112 | |
113 bool AVFoundationVideoCapturer::GetUseBackCamera() const { | |
114 return _capturer.useBackCamera; | |
115 } | |
116 | |
117 void AVFoundationVideoCapturer::AdaptOutputFormat(int width, int height, int fps
) { | |
118 cricket::VideoFormat format(width, height, cricket::VideoFormat::FpsToInterval
(fps), 0); | |
119 video_adapter()->OnOutputFormatRequest(format); | |
120 } | |
121 | |
122 void AVFoundationVideoCapturer::CaptureSampleBuffer( | |
123 CMSampleBufferRef sample_buffer, VideoRotation rotation) { | |
124 if (CMSampleBufferGetNumSamples(sample_buffer) != 1 || | |
125 !CMSampleBufferIsValid(sample_buffer) || | |
126 !CMSampleBufferDataIsReady(sample_buffer)) { | |
127 return; | |
128 } | |
129 | |
130 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sample_buffer); | |
131 if (image_buffer == NULL) { | |
132 return; | |
133 } | |
134 | |
135 int captured_width = CVPixelBufferGetWidth(image_buffer); | |
136 int captured_height = CVPixelBufferGetHeight(image_buffer); | |
137 | |
138 int adapted_width; | |
139 int adapted_height; | |
140 int crop_width; | |
141 int crop_height; | |
142 int crop_x; | |
143 int crop_y; | |
144 int64_t translated_camera_time_us; | |
145 | |
146 if (!AdaptFrame(captured_width, captured_height, | |
147 rtc::TimeNanos() / rtc::kNumNanosecsPerMicrosec, | |
148 rtc::TimeMicros(), &adapted_width, &adapted_height, | |
149 &crop_width, &crop_height, &crop_x, &crop_y, | |
150 &translated_camera_time_us)) { | |
151 return; | |
152 } | |
153 | |
154 RTCCVPixelBuffer* rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuff
er:image_buffer | |
155 adaptedWid
th:adapted_width | |
156 adaptedHeig
ht:adapted_height | |
157 cropWid
th:crop_width | |
158 cropHeig
ht:crop_height | |
159 cro
pX:crop_x | |
160 cro
pY:crop_y]; | |
161 rtc::scoped_refptr<VideoFrameBuffer> buffer = | |
162 new rtc::RefCountedObject<ObjCFrameBuffer>(rtcPixelBuffer); | |
163 | |
164 // Applying rotation is only supported for legacy reasons and performance is | |
165 // not critical here. | |
166 if (apply_rotation() && rotation != kVideoRotation_0) { | |
167 buffer = I420Buffer::Rotate(*buffer->ToI420(), rotation); | |
168 if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) { | |
169 std::swap(captured_width, captured_height); | |
170 } | |
171 | |
172 rotation = kVideoRotation_0; | |
173 } | |
174 | |
175 OnFrame(webrtc::VideoFrame(buffer, rotation, translated_camera_time_us), | |
176 captured_width, captured_height); | |
177 } | |
178 | |
179 } // namespace webrtc | |
OLD | NEW |