OLD | NEW |
| (Empty) |
1 /** | |
2 * Copyright 2017 The WebRTC Project Authors. All rights reserved. | |
3 * | |
4 * Use of this source code is governed by a BSD-style license | |
5 * that can be found in the LICENSE file in the root of the source | |
6 * tree. An additional intellectual property rights grant can be found | |
7 * in the file PATENTS. All contributing project authors may | |
8 * be found in the AUTHORS file in the root of the source tree. | |
9 */ | |
10 | |
11 #import "RTCFileVideoCapturer.h" | |
12 | |
13 #import "WebRTC/RTCLogging.h" | |
14 #import "WebRTC/RTCVideoFrameBuffer.h" | |
15 | |
16 @implementation RTCFileVideoCapturer { | |
17 AVAssetReader *_reader; | |
18 AVAssetReaderTrackOutput *_outTrack; | |
19 BOOL _capturerStopped; | |
20 CMTime _lastPresentationTime; | |
21 dispatch_queue_t _frameQueue; | |
22 } | |
23 | |
24 - (void)startCapturingFromFileNamed:(NSString *)nameOfFile { | |
25 dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),
^{ | |
26 if (_reader && _reader.status == AVAssetReaderStatusReading) { | |
27 RTCLog("Capturer exists and reads another file. Start capture request fail
ed."); | |
28 return; | |
29 } | |
30 NSString *pathForFile = [self pathForFileName:nameOfFile]; | |
31 if (!pathForFile) { | |
32 RTCLog("File %@ not found in bundle", nameOfFile); | |
33 return; | |
34 } | |
35 | |
36 _lastPresentationTime = CMTimeMake(0, 0); | |
37 | |
38 NSURL *URLForFile = [NSURL fileURLWithPath:pathForFile]; | |
39 AVURLAsset *asset = [AVURLAsset URLAssetWithURL:URLForFile options:nil]; | |
40 | |
41 NSArray *allTracks = [asset tracksWithMediaType:AVMediaTypeVideo]; | |
42 NSError *error = nil; | |
43 _reader = [[AVAssetReader alloc] initWithAsset:asset error:&error]; | |
44 if (error) { | |
45 RTCLog("File reader failed with error: %@", error); | |
46 return; | |
47 } | |
48 | |
49 NSDictionary *options = @{ | |
50 (NSString *) | |
51 kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanar
FullRange) | |
52 }; | |
53 _outTrack = [[AVAssetReaderTrackOutput alloc] initWithTrack:allTracks.firstO
bject | |
54 outputSettings:options]; | |
55 [_reader addOutput:_outTrack]; | |
56 | |
57 [_reader startReading]; | |
58 RTCLog(@"File capturer started reading"); | |
59 [self readNextBuffer]; | |
60 }); | |
61 } | |
62 | |
63 - (void)stopCapture { | |
64 _capturerStopped = YES; | |
65 RTCLog(@"File capturer stopped."); | |
66 } | |
67 | |
68 #pragma mark - Private | |
69 | |
70 - (nullable NSString *)pathForFileName:(NSString *)fileName { | |
71 NSArray *nameComponents = [fileName componentsSeparatedByString:@"."]; | |
72 if (nameComponents.count != 2) { | |
73 return nil; | |
74 } | |
75 | |
76 NSString *path = | |
77 [[NSBundle mainBundle] pathForResource:nameComponents[0] ofType:nameCompon
ents[1]]; | |
78 return path; | |
79 } | |
80 | |
81 - (dispatch_queue_t)frameQueue { | |
82 if (!_frameQueue) { | |
83 _frameQueue = dispatch_queue_create("org.webrtc.filecapturer.video", DISPATC
H_QUEUE_SERIAL); | |
84 dispatch_set_target_queue(_frameQueue, | |
85 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_
BACKGROUND, 0)); | |
86 } | |
87 return _frameQueue; | |
88 } | |
89 | |
90 - (void)readNextBuffer { | |
91 if (_reader.status != AVAssetReaderStatusReading || _capturerStopped) { | |
92 [_reader cancelReading]; | |
93 _reader = nil; | |
94 return; | |
95 } | |
96 | |
97 CMSampleBufferRef sampleBuffer = [_outTrack copyNextSampleBuffer]; | |
98 if (!sampleBuffer) { | |
99 [self readNextBuffer]; | |
100 return; | |
101 } | |
102 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(s
ampleBuffer) || | |
103 !CMSampleBufferDataIsReady(sampleBuffer)) { | |
104 [self readNextBuffer]; | |
105 return; | |
106 } | |
107 | |
108 [self publishSampleBuffer:sampleBuffer]; | |
109 } | |
110 | |
111 - (void)publishSampleBuffer:(CMSampleBufferRef)sampleBuffer { | |
112 CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
; | |
113 Float64 presentationDifference = | |
114 CMTimeGetSeconds(CMTimeSubtract(presentationTime, _lastPresentationTime)); | |
115 _lastPresentationTime = presentationTime; | |
116 int64_t presentationDifferenceRound = lroundf(presentationDifference * NSEC_PE
R_SEC); | |
117 | |
118 __block dispatch_source_t timer = [self createStrictTimer]; | |
119 // Strict timer that will fire |presentationDifferenceRound| ns from now and n
ever again. | |
120 dispatch_source_set_timer(timer, | |
121 dispatch_time(DISPATCH_TIME_NOW, presentationDiffere
nceRound), | |
122 DISPATCH_TIME_FOREVER, | |
123 0); | |
124 dispatch_source_set_event_handler(timer, ^{ | |
125 dispatch_source_cancel(timer); | |
126 timer = nil; | |
127 | |
128 CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); | |
129 if (!pixelBuffer) { | |
130 CFRelease(sampleBuffer); | |
131 dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT,
0), ^{ | |
132 [self readNextBuffer]; | |
133 }); | |
134 return; | |
135 } | |
136 | |
137 RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBu
ffer:pixelBuffer]; | |
138 NSTimeInterval timeStampSeconds = CACurrentMediaTime(); | |
139 int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC); | |
140 RTCVideoFrame *videoFrame = | |
141 [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer rotation:0 timeStam
pNs:timeStampNs]; | |
142 CFRelease(sampleBuffer); | |
143 | |
144 dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)
, ^{ | |
145 [self readNextBuffer]; | |
146 }); | |
147 | |
148 [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; | |
149 }); | |
150 dispatch_activate(timer); | |
151 } | |
152 | |
153 - (dispatch_source_t)createStrictTimer { | |
154 dispatch_source_t timer = dispatch_source_create( | |
155 DISPATCH_SOURCE_TYPE_TIMER, 0, DISPATCH_TIMER_STRICT, [self frameQueue]); | |
156 return timer; | |
157 } | |
158 | |
159 - (void)dealloc { | |
160 [self stopCapture]; | |
161 } | |
162 | |
163 @end | |
OLD | NEW |