OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/cast/sender/video_sender.h" | 5 #include "media/cast/sender/video_sender.h" |
6 | 6 |
7 #include <stdint.h> | 7 #include <stdint.h> |
8 #include <algorithm> | 8 #include <algorithm> |
9 #include <cmath> | 9 #include <cmath> |
10 #include <cstring> | 10 #include <cstring> |
(...skipping 27 matching lines...) Expand all Loading... |
38 // The target maximum utilization of the encoder and network resources. This is | 38 // The target maximum utilization of the encoder and network resources. This is |
39 // used to attenuate the actual measured utilization values in order to provide | 39 // used to attenuate the actual measured utilization values in order to provide |
40 // "breathing room" (i.e., to ensure there will be sufficient CPU and bandwidth | 40 // "breathing room" (i.e., to ensure there will be sufficient CPU and bandwidth |
41 // available to handle the occasional more-complex frames). | 41 // available to handle the occasional more-complex frames). |
42 const int kTargetUtilizationPercentage = 75; | 42 const int kTargetUtilizationPercentage = 75; |
43 | 43 |
44 // Extract capture begin/end timestamps from |video_frame|'s metadata and log | 44 // Extract capture begin/end timestamps from |video_frame|'s metadata and log |
45 // it. | 45 // it. |
46 void LogVideoCaptureTimestamps(CastEnvironment* cast_environment, | 46 void LogVideoCaptureTimestamps(CastEnvironment* cast_environment, |
47 const media::VideoFrame& video_frame, | 47 const media::VideoFrame& video_frame, |
48 RtpTimestamp rtp_timestamp) { | 48 RtpTimeTicks rtp_timestamp) { |
49 scoped_ptr<FrameEvent> capture_begin_event(new FrameEvent()); | 49 scoped_ptr<FrameEvent> capture_begin_event(new FrameEvent()); |
50 capture_begin_event->type = FRAME_CAPTURE_BEGIN; | 50 capture_begin_event->type = FRAME_CAPTURE_BEGIN; |
51 capture_begin_event->media_type = VIDEO_EVENT; | 51 capture_begin_event->media_type = VIDEO_EVENT; |
52 capture_begin_event->rtp_timestamp = rtp_timestamp; | 52 capture_begin_event->rtp_timestamp = rtp_timestamp; |
53 | 53 |
54 scoped_ptr<FrameEvent> capture_end_event(new FrameEvent()); | 54 scoped_ptr<FrameEvent> capture_end_event(new FrameEvent()); |
55 capture_end_event->type = FRAME_CAPTURE_END; | 55 capture_end_event->type = FRAME_CAPTURE_END; |
56 capture_end_event->media_type = VIDEO_EVENT; | 56 capture_end_event->media_type = VIDEO_EVENT; |
57 capture_end_event->rtp_timestamp = rtp_timestamp; | 57 capture_end_event->rtp_timestamp = rtp_timestamp; |
58 capture_end_event->width = video_frame.visible_rect().width(); | 58 capture_end_event->width = video_frame.visible_rect().width(); |
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
147 void VideoSender::InsertRawVideoFrame( | 147 void VideoSender::InsertRawVideoFrame( |
148 const scoped_refptr<media::VideoFrame>& video_frame, | 148 const scoped_refptr<media::VideoFrame>& video_frame, |
149 const base::TimeTicks& reference_time) { | 149 const base::TimeTicks& reference_time) { |
150 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); | 150 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); |
151 | 151 |
152 if (!video_encoder_) { | 152 if (!video_encoder_) { |
153 NOTREACHED(); | 153 NOTREACHED(); |
154 return; | 154 return; |
155 } | 155 } |
156 | 156 |
157 const RtpTimestamp rtp_timestamp = | 157 const RtpTimeTicks rtp_timestamp = |
158 TimeDeltaToRtpDelta(video_frame->timestamp(), kVideoFrequency); | 158 RtpTimeTicks::FromTimeDelta(video_frame->timestamp(), kVideoFrequency); |
159 LogVideoCaptureTimestamps(cast_environment_.get(), *video_frame, | 159 LogVideoCaptureTimestamps(cast_environment_.get(), *video_frame, |
160 rtp_timestamp); | 160 rtp_timestamp); |
161 | 161 |
162 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc | 162 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc |
163 TRACE_EVENT_INSTANT2( | 163 TRACE_EVENT_INSTANT2( |
164 "cast_perf_test", "InsertRawVideoFrame", | 164 "cast_perf_test", "InsertRawVideoFrame", |
165 TRACE_EVENT_SCOPE_THREAD, | 165 TRACE_EVENT_SCOPE_THREAD, |
166 "timestamp", reference_time.ToInternalValue(), | 166 "timestamp", reference_time.ToInternalValue(), |
167 "rtp_timestamp", rtp_timestamp); | 167 "rtp_timestamp", rtp_timestamp.lower_32_bits()); |
168 | 168 |
169 bool low_latency_mode; | 169 bool low_latency_mode; |
170 if (video_frame->metadata()->GetBoolean( | 170 if (video_frame->metadata()->GetBoolean( |
171 VideoFrameMetadata::INTERACTIVE_CONTENT, &low_latency_mode)) { | 171 VideoFrameMetadata::INTERACTIVE_CONTENT, &low_latency_mode)) { |
172 if (low_latency_mode && !low_latency_mode_) { | 172 if (low_latency_mode && !low_latency_mode_) { |
173 VLOG(1) << "Interactive mode playout time " << min_playout_delay_; | 173 VLOG(1) << "Interactive mode playout time " << min_playout_delay_; |
174 playout_delay_change_cb_.Run(min_playout_delay_); | 174 playout_delay_change_cb_.Run(min_playout_delay_); |
175 } | 175 } |
176 low_latency_mode_ = low_latency_mode; | 176 low_latency_mode_ = low_latency_mode; |
177 } | 177 } |
178 | 178 |
179 // Drop the frame if either its RTP or reference timestamp is not an increase | 179 // Drop the frame if either its RTP or reference timestamp is not an increase |
180 // over the last frame's. This protects: 1) the duration calculations that | 180 // over the last frame's. This protects: 1) the duration calculations that |
181 // assume timestamps are monotonically non-decreasing, and 2) assumptions made | 181 // assume timestamps are monotonically non-decreasing, and 2) assumptions made |
182 // deeper in the implementation where each frame's RTP timestamp needs to be | 182 // deeper in the implementation where each frame's RTP timestamp needs to be |
183 // unique. | 183 // unique. |
184 if (!last_enqueued_frame_reference_time_.is_null() && | 184 if (!last_enqueued_frame_reference_time_.is_null() && |
185 (!IsNewerRtpTimestamp(rtp_timestamp, | 185 (rtp_timestamp <= last_enqueued_frame_rtp_timestamp_ || |
186 last_enqueued_frame_rtp_timestamp_) || | |
187 reference_time <= last_enqueued_frame_reference_time_)) { | 186 reference_time <= last_enqueued_frame_reference_time_)) { |
188 VLOG(1) << "Dropping video frame: RTP or reference time did not increase."; | 187 VLOG(1) << "Dropping video frame: RTP or reference time did not increase."; |
189 TRACE_EVENT_INSTANT2("cast.stream", "Video Frame Drop", | 188 TRACE_EVENT_INSTANT2("cast.stream", "Video Frame Drop", |
190 TRACE_EVENT_SCOPE_THREAD, | 189 TRACE_EVENT_SCOPE_THREAD, |
191 "rtp_timestamp", rtp_timestamp, | 190 "rtp_timestamp", rtp_timestamp.lower_32_bits(), |
192 "reason", "time did not increase"); | 191 "reason", "time did not increase"); |
193 return; | 192 return; |
194 } | 193 } |
195 | 194 |
196 // Two video frames are needed to compute the exact media duration added by | 195 // Two video frames are needed to compute the exact media duration added by |
197 // the next frame. If there are no frames in the encoder, compute a guess | 196 // the next frame. If there are no frames in the encoder, compute a guess |
198 // based on the configured |max_frame_rate_|. Any error introduced by this | 197 // based on the configured |max_frame_rate_|. Any error introduced by this |
199 // guess will be eliminated when |duration_in_encoder_| is updated in | 198 // guess will be eliminated when |duration_in_encoder_| is updated in |
200 // OnEncodedVideoFrame(). | 199 // OnEncodedVideoFrame(). |
201 const base::TimeDelta duration_added_by_next_frame = frames_in_encoder_ > 0 ? | 200 const base::TimeDelta duration_added_by_next_frame = frames_in_encoder_ > 0 ? |
202 reference_time - last_enqueued_frame_reference_time_ : | 201 reference_time - last_enqueued_frame_reference_time_ : |
(...skipping 21 matching lines...) Expand all Loading... |
224 } | 223 } |
225 | 224 |
226 // Some encoder implementations have a frame window for analysis. Since we | 225 // Some encoder implementations have a frame window for analysis. Since we |
227 // are dropping this frame, unless we instruct the encoder to flush all the | 226 // are dropping this frame, unless we instruct the encoder to flush all the |
228 // frames that have been enqueued for encoding, frames_in_encoder_ and | 227 // frames that have been enqueued for encoding, frames_in_encoder_ and |
229 // last_enqueued_frame_reference_time_ will never be updated and we will | 228 // last_enqueued_frame_reference_time_ will never be updated and we will |
230 // drop every subsequent frame for the rest of the session. | 229 // drop every subsequent frame for the rest of the session. |
231 video_encoder_->EmitFrames(); | 230 video_encoder_->EmitFrames(); |
232 | 231 |
233 TRACE_EVENT_INSTANT2("cast.stream", "Video Frame Drop", | 232 TRACE_EVENT_INSTANT2("cast.stream", "Video Frame Drop", |
234 TRACE_EVENT_SCOPE_THREAD, | 233 TRACE_EVENT_SCOPE_THREAD, |
235 "rtp_timestamp", rtp_timestamp, | 234 "rtp_timestamp", rtp_timestamp.lower_32_bits(), |
236 "reason", "too much in flight"); | 235 "reason", "too much in flight"); |
237 return; | 236 return; |
238 } | 237 } |
239 | 238 |
240 if (video_frame->visible_rect().IsEmpty()) { | 239 if (video_frame->visible_rect().IsEmpty()) { |
241 VLOG(1) << "Rejecting empty video frame."; | 240 VLOG(1) << "Rejecting empty video frame."; |
242 return; | 241 return; |
243 } | 242 } |
244 | 243 |
245 const int bitrate = congestion_control_->GetBitrate( | 244 const int bitrate = congestion_control_->GetBitrate( |
246 reference_time + target_playout_delay_, target_playout_delay_, | 245 reference_time + target_playout_delay_, target_playout_delay_, |
(...skipping 14 matching lines...) Expand all Loading... |
261 video_frame.get()); | 260 video_frame.get()); |
262 | 261 |
263 if (video_encoder_->EncodeVideoFrame( | 262 if (video_encoder_->EncodeVideoFrame( |
264 video_frame, | 263 video_frame, |
265 reference_time, | 264 reference_time, |
266 base::Bind(&VideoSender::OnEncodedVideoFrame, | 265 base::Bind(&VideoSender::OnEncodedVideoFrame, |
267 weak_factory_.GetWeakPtr(), | 266 weak_factory_.GetWeakPtr(), |
268 video_frame, | 267 video_frame, |
269 bitrate))) { | 268 bitrate))) { |
270 TRACE_EVENT_ASYNC_BEGIN1("cast.stream", "Video Encode", video_frame.get(), | 269 TRACE_EVENT_ASYNC_BEGIN1("cast.stream", "Video Encode", video_frame.get(), |
271 "rtp_timestamp", rtp_timestamp); | 270 "rtp_timestamp", rtp_timestamp.lower_32_bits()); |
272 frames_in_encoder_++; | 271 frames_in_encoder_++; |
273 duration_in_encoder_ += duration_added_by_next_frame; | 272 duration_in_encoder_ += duration_added_by_next_frame; |
274 last_enqueued_frame_rtp_timestamp_ = rtp_timestamp; | 273 last_enqueued_frame_rtp_timestamp_ = rtp_timestamp; |
275 last_enqueued_frame_reference_time_ = reference_time; | 274 last_enqueued_frame_reference_time_ = reference_time; |
276 } else { | 275 } else { |
277 VLOG(1) << "Encoder rejected a frame. Skipping..."; | 276 VLOG(1) << "Encoder rejected a frame. Skipping..."; |
278 TRACE_EVENT_INSTANT1("cast.stream", "Video Encode Reject", | 277 TRACE_EVENT_INSTANT1("cast.stream", "Video Encode Reject", |
279 TRACE_EVENT_SCOPE_THREAD, | 278 TRACE_EVENT_SCOPE_THREAD, |
280 "rtp_timestamp", rtp_timestamp); | 279 "rtp_timestamp", rtp_timestamp.lower_32_bits()); |
281 } | 280 } |
282 } | 281 } |
283 | 282 |
284 scoped_ptr<VideoFrameFactory> VideoSender::CreateVideoFrameFactory() { | 283 scoped_ptr<VideoFrameFactory> VideoSender::CreateVideoFrameFactory() { |
285 return video_encoder_ ? video_encoder_->CreateVideoFrameFactory() : nullptr; | 284 return video_encoder_ ? video_encoder_->CreateVideoFrameFactory() : nullptr; |
286 } | 285 } |
287 | 286 |
288 int VideoSender::GetNumberOfFramesInEncoder() const { | 287 int VideoSender::GetNumberOfFramesInEncoder() const { |
289 return frames_in_encoder_; | 288 return frames_in_encoder_; |
290 } | 289 } |
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
384 media::VideoFrameMetadata::RESOURCE_UTILIZATION, | 383 media::VideoFrameMetadata::RESOURCE_UTILIZATION, |
385 encoded_frame->dependency == EncodedFrame::KEY ? | 384 encoded_frame->dependency == EncodedFrame::KEY ? |
386 std::min(1.0, attenuated_utilization) : attenuated_utilization); | 385 std::min(1.0, attenuated_utilization) : attenuated_utilization); |
387 } | 386 } |
388 | 387 |
389 SendEncodedFrame(encoder_bitrate, std::move(encoded_frame)); | 388 SendEncodedFrame(encoder_bitrate, std::move(encoded_frame)); |
390 } | 389 } |
391 | 390 |
392 } // namespace cast | 391 } // namespace cast |
393 } // namespace media | 392 } // namespace media |
OLD | NEW |