Index: media/cast/sender/frame_sender.cc |
diff --git a/media/cast/sender/frame_sender.cc b/media/cast/sender/frame_sender.cc |
index 300e8146ed8e0157b875a4edf9c0c869b4710d9c..946658e14fba5b339ab9ee14eb80839bca5581e7 100644 |
--- a/media/cast/sender/frame_sender.cc |
+++ b/media/cast/sender/frame_sender.cc |
@@ -4,6 +4,8 @@ |
#include "media/cast/sender/frame_sender.h" |
+#include <algorithm> |
+ |
#include "base/debug/trace_event.h" |
namespace media { |
@@ -13,6 +15,11 @@ namespace { |
const int kMinSchedulingDelayMs = 1; |
const int kNumAggressiveReportsSentAtStart = 100; |
+// These control the amount and granularity of history in the "max round trip |
+// time" estimation logic. 40 buckets * 15000 millis --> 10 minutes total |
+const int kNumRttBuckets = 40; |
+const int kRttBucketRotationMs = 15000; |
+ |
} // namespace |
FrameSender::FrameSender(scoped_refptr<CastEnvironment> cast_environment, |
@@ -33,11 +40,12 @@ FrameSender::FrameSender(scoped_refptr<CastEnvironment> cast_environment, |
last_sent_frame_id_(0), |
latest_acked_frame_id_(0), |
duplicate_ack_counter_(0), |
- rtp_timebase_(rtp_timebase), |
congestion_control_(congestion_control), |
+ rtp_timebase_(rtp_timebase), |
is_audio_(is_audio), |
weak_factory_(this) { |
DCHECK(transport_sender_); |
+ DCHECK_GE(max_frame_rate_, 0.001); |
DCHECK_GT(rtp_timebase_, 0); |
DCHECK(congestion_control_); |
SetTargetPlayoutDelay(playout_delay); |
@@ -91,6 +99,30 @@ void FrameSender::SendRtcpReport(bool schedule_future_reports) { |
void FrameSender::OnMeasuredRoundTripTime(base::TimeDelta rtt) { |
DCHECK(rtt > base::TimeDelta()); |
current_round_trip_time_ = rtt; |
+ |
+ // Rotate the "max round trip time" buckets, if needed to prune-out old |
hubbe
2014/09/11 23:05:06
This is a fun algorithm, but do we need it?
Would
miu
2014/09/18 01:10:52
Removed as discussed.
|
+ // history, and then update the current bucket and overall expected value. |
+ const base::TimeTicks now = cast_environment_->Clock()->NowTicks(); |
+ if (last_max_rtt_bucket_rotation_.is_null() || |
+ (now - last_max_rtt_bucket_rotation_) > |
+ base::TimeDelta::FromMilliseconds(kRttBucketRotationMs)) { |
+ if (max_rtt_buckets_.size() == kNumRttBuckets) |
+ max_rtt_buckets_.pop_front(); |
+ max_rtt_buckets_.push_back(base::TimeDelta()); |
+ last_max_rtt_bucket_rotation_ = now; |
+ } |
+ if (current_round_trip_time_ > max_rtt_buckets_.back()) { |
+ max_rtt_buckets_.back() = current_round_trip_time_; |
+ const base::TimeDelta& max_expected_rtt = |
+ *(std::max_element(max_rtt_buckets_.begin(), max_rtt_buckets_.end())); |
+ expected_max_one_way_trip_time_ = max_expected_rtt / 2; |
+ } |
+ |
+ VLOG(2) |
+ << (is_audio_ ? "AUDIO[" : "VIDEO[") << ssrc_ << "] Last measured RTT is " |
+ << current_round_trip_time_.InMicroseconds() |
+ << " usec (expected max one way trip time is " |
+ << expected_max_one_way_trip_time_.InMicroseconds() << " usec)"; |
} |
void FrameSender::SetTargetPlayoutDelay( |
@@ -162,11 +194,30 @@ RtpTimestamp FrameSender::GetRecordedRtpTimestamp(uint32 frame_id) const { |
return frame_rtp_timestamps_[frame_id % arraysize(frame_rtp_timestamps_)]; |
} |
+int FrameSender::GetUnackedFrameCount() const { |
+ const int count = |
+ static_cast<int32>(last_sent_frame_id_ - latest_acked_frame_id_); |
+ DCHECK_GE(count, 0); |
+ return count; |
+} |
+ |
+base::TimeDelta FrameSender::GetAllowedInFlightMediaDuration() const { |
+ // The total amount allowed in-flight media should equal the amount that fits |
+ // within the entire playout delay window, plus the maximum amount of time it |
+ // could take to receive an ACK from the receiver. |
+ return target_playout_delay_ + |
+ std::min(expected_max_one_way_trip_time_, target_playout_delay_ / 2); |
+} |
+ |
void FrameSender::SendEncodedFrame( |
int requested_bitrate_before_encode, |
scoped_ptr<EncodedFrame> encoded_frame) { |
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); |
+ VLOG(2) << (is_audio_ ? "AUDIO[" : "VIDEO[") << ssrc_ |
+ << "] About to send another frame: last_sent=" |
+ << last_sent_frame_id_ << ", latest_acked=" << latest_acked_frame_id_; |
+ |
const uint32 frame_id = encoded_frame->frame_id; |
const bool is_first_frame_to_be_sent = last_send_time_.is_null(); |
@@ -297,32 +348,5 @@ void FrameSender::OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback) { |
} |
} |
-bool FrameSender::ShouldDropNextFrame(base::TimeTicks capture_time) const { |
- DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); |
- int frames_in_flight = 0; |
- base::TimeDelta duration_in_flight; |
- if (!last_send_time_.is_null()) { |
- frames_in_flight = |
- static_cast<int32>(last_sent_frame_id_ - latest_acked_frame_id_); |
- if (frames_in_flight > 0) { |
- const uint32 oldest_unacked_frame_id = latest_acked_frame_id_ + 1; |
- duration_in_flight = |
- capture_time - GetRecordedReferenceTime(oldest_unacked_frame_id); |
- } |
- } |
- frames_in_flight += GetNumberOfFramesInEncoder(); |
- VLOG(2) << frames_in_flight |
- << " frames in flight; last sent: " << last_sent_frame_id_ |
- << "; latest acked: " << latest_acked_frame_id_ |
- << "; frames in encoder: " << GetNumberOfFramesInEncoder() |
- << "; duration in flight: " |
- << duration_in_flight.InMicroseconds() << " usec (" |
- << (target_playout_delay_ > base::TimeDelta() ? |
- 100 * duration_in_flight / target_playout_delay_ : |
- kint64max) << "%)"; |
- return frames_in_flight >= max_unacked_frames_ || |
- duration_in_flight >= target_playout_delay_; |
-} |
- |
} // namespace cast |
} // namespace media |