Index: media/cast/sender/video_sender.cc |
diff --git a/media/cast/sender/video_sender.cc b/media/cast/sender/video_sender.cc |
index f4dcb6e020687f071aaaf10fce0776fe5c9b51c4..8914444e0ca7a77e6abaec9d9c19040f03ccbfb8 100644 |
--- a/media/cast/sender/video_sender.cc |
+++ b/media/cast/sender/video_sender.cc |
@@ -19,6 +19,12 @@ |
namespace media { |
namespace cast { |
+namespace { |
+// The additional number of frames that can be in-flight when input exceeds the |
+// maximum frame rate. |
+const int kMaxFrameBurst = 5; |
+} |
+ |
// Note, we use a fixed bitrate value when external video encoder is used. |
// Some hardware encoder shows bad behavior if we set the bitrate too |
// frequently, e.g. quality drop, not abiding by target bitrate, etc. |
@@ -82,7 +88,6 @@ VideoSender::VideoSender( |
transport_config.ssrc = video_config.ssrc; |
transport_config.feedback_ssrc = video_config.incoming_feedback_ssrc; |
transport_config.rtp_payload_type = video_config.rtp_payload_type; |
- transport_config.stored_frames = max_unacked_frames_; |
transport_config.aes_key = video_config.aes_key; |
transport_config.aes_iv_mask = video_config.aes_iv_mask; |
@@ -124,8 +129,61 @@ void VideoSender::InsertRawVideoFrame( |
"timestamp", capture_time.ToInternalValue(), |
"rtp_timestamp", rtp_timestamp); |
- if (ShouldDropNextFrame(capture_time)) { |
- VLOG(1) << "Dropping frame due to too many frames currently in-flight."; |
+ // Drop frames that are out-of-order since the duration calculations assume |
hubbe
2014/09/11 23:05:06
Do we really need separate code for audio and vide
miu
2014/09/18 01:10:52
Done.
|
+ // frame timestamps are monotonically non-decreasing. |
+ if (!last_enqueued_frame_reference_time_.is_null() && |
+ capture_time < last_enqueued_frame_reference_time_) { |
+ VLOG(1) << "Dropping video frame: Reference time is out-of-order."; |
+ return; |
+ } |
+ |
+ // Check that enqueuing the next |video_frame| won't cause more frames to |
+ // become in-flight than the system's design limit. |
+ const int count_unacked_frames = GetUnackedFrameCount(); |
+ const int count_frames_in_flight = frames_in_encoder_ + count_unacked_frames; |
+ if (count_frames_in_flight >= kMaxUnackedFrames) { |
+ VLOG(1) << "Dropping video frame: Too many frames would be in-flight."; |
+ return; |
+ } |
+ |
+ // Check that enqueuing the next |video_frame| won't exceed the configured |
+ // frame rate, allowing for short-term bursts. |
+ base::TimeDelta duration_in_flight = duration_in_encoder_; |
+ if (count_unacked_frames > 0) { |
+ const uint32 oldest_unacked_frame_id = latest_acked_frame_id_ + 1; |
+ duration_in_flight += GetRecordedReferenceTime(last_sent_frame_id_) - |
+ GetRecordedReferenceTime(oldest_unacked_frame_id); |
+ } |
+ const double max_frames_in_flight = |
+ max_frame_rate_ * duration_in_flight.InSecondsF(); |
+ if (count_frames_in_flight >= max_frames_in_flight + kMaxFrameBurst) { |
+ VLOG(1) << "Dropping video frame: Safe burst threshold would be exceeded."; |
+ return; |
+ } |
+ |
+ // Check that enqueuing the next |video_frame| won't exceed the allowed |
+ // in-flight media duration. |
+ // |
+ // Two video frames are needed to compute the exact media duration added by |
+ // the next frame. If there are no frames in the encoder, compute a guess |
+ // based on the configured |max_frame_rate_|. Any error introduced by this |
+ // guess will be eliminated when |duration_in_encoder_| is updated in |
+ // OnEncodedVideoFrame(). |
+ const base::TimeDelta duration_added_by_next_frame = frames_in_encoder_ > 0 ? |
+ capture_time - last_enqueued_frame_reference_time_ : |
+ base::TimeDelta::FromMicroseconds(1000000.0 / max_frame_rate_ + 0.5); |
+ const base::TimeDelta duration_would_be_in_flight = |
+ duration_in_flight + duration_added_by_next_frame; |
+ const base::TimeDelta allowed_in_flight = GetAllowedInFlightMediaDuration(); |
+ VLOG(2) << "Video in-flight: " |
+ << duration_in_flight.InMicroseconds() << " usec in-flight + " |
+ << duration_added_by_next_frame.InMicroseconds() |
+ << " usec for next frame would be " |
+ << (allowed_in_flight > base::TimeDelta() ? |
+ 100 * duration_would_be_in_flight / allowed_in_flight : |
+ kint64max) << "% of allowed in-flight."; |
+ if (duration_would_be_in_flight > allowed_in_flight) { |
+ VLOG(1) << "Dropping video: Too long a video duration would be in-flight."; |
return; |
} |
@@ -142,16 +200,14 @@ void VideoSender::InsertRawVideoFrame( |
base::Bind(&VideoSender::OnEncodedVideoFrame, |
weak_factory_.GetWeakPtr(), |
bitrate))) { |
+ last_enqueued_frame_reference_time_ = capture_time; |
frames_in_encoder_++; |
+ duration_in_encoder_ += duration_added_by_next_frame; |
} else { |
VLOG(1) << "Encoder rejected a frame. Skipping..."; |
} |
} |
-int VideoSender::GetNumberOfFramesInEncoder() const { |
- return frames_in_encoder_; |
-} |
- |
void VideoSender::OnAck(uint32 frame_id) { |
video_encoder_->LatestFrameIdToReference(frame_id); |
} |
@@ -171,6 +227,9 @@ void VideoSender::OnEncodedVideoFrame( |
frames_in_encoder_--; |
DCHECK_GE(frames_in_encoder_, 0); |
+ duration_in_encoder_ = |
+ last_enqueued_frame_reference_time_ - encoded_frame->reference_time; |
+ |
SendEncodedFrame(encoder_bitrate, encoded_frame.Pass()); |
} |