Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(805)

Side by Side Diff: media/cast/video_receiver/video_receiver.cc

Issue 280993002: [Cast] Repair receiver playout time calculations and frame skip logic. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2013 The Chromium Authors. All rights reserved. 1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/cast/video_receiver/video_receiver.h" 5 #include "media/cast/video_receiver/video_receiver.h"
6 6
7 #include <algorithm> 7 #include <algorithm>
8 8
9 #include "base/bind.h" 9 #include "base/bind.h"
10 #include "base/debug/trace_event.h" 10 #include "base/debug/trace_event.h"
11 #include "base/logging.h" 11 #include "base/logging.h"
12 #include "base/message_loop/message_loop.h" 12 #include "base/message_loop/message_loop.h"
13 #include "media/base/video_frame.h" 13 #include "media/base/video_frame.h"
14 #include "media/cast/logging/logging_defines.h" 14 #include "media/cast/logging/logging_defines.h"
15 #include "media/cast/transport/cast_transport_defines.h" 15 #include "media/cast/transport/cast_transport_defines.h"
16 #include "media/cast/video_receiver/video_decoder.h" 16 #include "media/cast/video_receiver/video_decoder.h"
17 17
18 namespace { 18 namespace {
19 const int kMinSchedulingDelayMs = 1; 19 const int kMinSchedulingDelayMs = 1;
20 const int kMinTimeBetweenOffsetUpdatesMs = 1000;
21 const int kTimeOffsetMaxCounter = 10;
22 } // namespace 20 } // namespace
23 21
24 namespace media { 22 namespace media {
25 namespace cast { 23 namespace cast {
26 24
27 VideoReceiver::VideoReceiver(scoped_refptr<CastEnvironment> cast_environment, 25 VideoReceiver::VideoReceiver(scoped_refptr<CastEnvironment> cast_environment,
28 const VideoReceiverConfig& video_config, 26 const VideoReceiverConfig& video_config,
29 transport::PacedPacketSender* const packet_sender) 27 transport::PacedPacketSender* const packet_sender)
30 : RtpReceiver(cast_environment->Clock(), NULL, &video_config), 28 : RtpReceiver(cast_environment->Clock(), NULL, &video_config),
31 cast_environment_(cast_environment), 29 cast_environment_(cast_environment),
32 event_subscriber_(kReceiverRtcpEventHistorySize, VIDEO_EVENT), 30 event_subscriber_(kReceiverRtcpEventHistorySize, VIDEO_EVENT),
33 codec_(video_config.codec), 31 codec_(video_config.codec),
34 target_delay_delta_( 32 target_playout_delay_(
35 base::TimeDelta::FromMilliseconds(video_config.rtp_max_delay_ms)), 33 base::TimeDelta::FromMilliseconds(video_config.rtp_max_delay_ms)),
36 expected_frame_duration_( 34 expected_frame_duration_(
37 base::TimeDelta::FromSeconds(1) / video_config.max_frame_rate), 35 base::TimeDelta::FromSeconds(1) / video_config.max_frame_rate),
36 reports_are_scheduled_(false),
38 framer_(cast_environment->Clock(), 37 framer_(cast_environment->Clock(),
39 this, 38 this,
40 video_config.incoming_ssrc, 39 video_config.incoming_ssrc,
41 video_config.decoder_faster_than_max_frame_rate, 40 video_config.decoder_faster_than_max_frame_rate,
42 video_config.rtp_max_delay_ms * video_config.max_frame_rate / 41 video_config.rtp_max_delay_ms * video_config.max_frame_rate /
43 1000), 42 1000),
44 rtcp_(cast_environment_, 43 rtcp_(cast_environment_,
45 NULL, 44 NULL,
46 NULL, 45 NULL,
47 packet_sender, 46 packet_sender,
48 GetStatistics(), 47 GetStatistics(),
49 video_config.rtcp_mode, 48 video_config.rtcp_mode,
50 base::TimeDelta::FromMilliseconds(video_config.rtcp_interval), 49 base::TimeDelta::FromMilliseconds(video_config.rtcp_interval),
51 video_config.feedback_ssrc, 50 video_config.feedback_ssrc,
52 video_config.incoming_ssrc, 51 video_config.incoming_ssrc,
53 video_config.rtcp_c_name, 52 video_config.rtcp_c_name,
54 false), 53 false),
55 time_offset_counter_(0), 54 is_waiting_to_emit_frames_(false),
56 time_incoming_packet_updated_(false),
57 incoming_rtp_timestamp_(0),
58 is_waiting_for_consecutive_frame_(false),
59 weak_factory_(this) { 55 weak_factory_(this) {
60 DCHECK_GT(video_config.rtp_max_delay_ms, 0); 56 DCHECK_GT(video_config.rtp_max_delay_ms, 0);
61 DCHECK_GT(video_config.max_frame_rate, 0); 57 DCHECK_GT(video_config.max_frame_rate, 0);
62 if (!video_config.use_external_decoder) { 58 if (!video_config.use_external_decoder) {
63 video_decoder_.reset(new VideoDecoder(cast_environment, video_config)); 59 video_decoder_.reset(new VideoDecoder(cast_environment, video_config));
64 } 60 }
65 decryptor_.Initialize(video_config.aes_key, video_config.aes_iv_mask); 61 decryptor_.Initialize(video_config.aes_key, video_config.aes_iv_mask);
66 rtcp_.SetTargetDelay(target_delay_delta_); 62 rtcp_.SetTargetDelay(target_playout_delay_);
67 cast_environment_->Logging()->AddRawEventSubscriber(&event_subscriber_); 63 cast_environment_->Logging()->AddRawEventSubscriber(&event_subscriber_);
68 memset(frame_id_to_rtp_timestamp_, 0, sizeof(frame_id_to_rtp_timestamp_)); 64 memset(frame_id_to_rtp_timestamp_, 0, sizeof(frame_id_to_rtp_timestamp_));
69 } 65 }
70 66
71 VideoReceiver::~VideoReceiver() { 67 VideoReceiver::~VideoReceiver() {
72 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); 68 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
73 cast_environment_->Logging()->RemoveRawEventSubscriber(&event_subscriber_); 69 cast_environment_->Logging()->RemoveRawEventSubscriber(&event_subscriber_);
74 } 70 }
75 71
76 void VideoReceiver::InitializeTimers() {
77 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
78 ScheduleNextRtcpReport();
79 ScheduleNextCastMessage();
80 }
81
82 void VideoReceiver::GetRawVideoFrame( 72 void VideoReceiver::GetRawVideoFrame(
83 const VideoFrameDecodedCallback& callback) { 73 const VideoFrameDecodedCallback& callback) {
84 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); 74 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
85 DCHECK(!callback.is_null()); 75 DCHECK(!callback.is_null());
86 DCHECK(video_decoder_.get()); 76 DCHECK(video_decoder_.get());
87 GetEncodedVideoFrame(base::Bind( 77 GetEncodedVideoFrame(base::Bind(
88 &VideoReceiver::DecodeEncodedVideoFrame, 78 &VideoReceiver::DecodeEncodedVideoFrame,
89 // Note: Use of Unretained is safe since this Closure is guaranteed to be 79 // Note: Use of Unretained is safe since this Closure is guaranteed to be
90 // invoked before destruction of |this|. 80 // invoked before destruction of |this|.
91 base::Unretained(this), 81 base::Unretained(this),
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
153 // TODO(miu): We should only be peeking at the metadata, and not copying the 143 // TODO(miu): We should only be peeking at the metadata, and not copying the
154 // payload yet! Or, at least, peek using a StringPiece instead of a copy. 144 // payload yet! Or, at least, peek using a StringPiece instead of a copy.
155 scoped_ptr<transport::EncodedVideoFrame> encoded_frame( 145 scoped_ptr<transport::EncodedVideoFrame> encoded_frame(
156 new transport::EncodedVideoFrame()); 146 new transport::EncodedVideoFrame());
157 bool is_consecutively_next_frame = false; 147 bool is_consecutively_next_frame = false;
158 if (!framer_.GetEncodedVideoFrame(encoded_frame.get(), 148 if (!framer_.GetEncodedVideoFrame(encoded_frame.get(),
159 &is_consecutively_next_frame)) { 149 &is_consecutively_next_frame)) {
160 VLOG(1) << "Wait for more video packets to produce a completed frame."; 150 VLOG(1) << "Wait for more video packets to produce a completed frame.";
161 return; // OnReceivedPayloadData() will invoke this method in the future. 151 return; // OnReceivedPayloadData() will invoke this method in the future.
162 } 152 }
153 if (!is_consecutively_next_frame &&
154 !transport::CanDropFramesForCodec(codec_)) {
155 VLOG(1) << "Wait for the next frame in sequence (codec requirement).";
156 return; // OnReceivedPayloadData() will invoke this method in the future.
157 }
158
159 const base::TimeTicks playout_time =
160 GetPlayoutTime(encoded_frame->rtp_timestamp);
163 161
164 // If |framer_| has a frame ready that is out of sequence, examine the 162 // If |framer_| has a frame ready that is out of sequence, examine the
165 // playout time to determine whether it's acceptable to continue, thereby 163 // playout time to determine whether it's acceptable to continue, thereby
166 // skipping one or more frames. Skip if the missing frame wouldn't complete 164 // skipping one or more frames. Skip if the missing frame wouldn't complete
167 // playing before the start of playback of the available frame. 165 // playing before the start of playback of the available frame.
168 const base::TimeTicks now = cast_environment_->Clock()->NowTicks();
169 const base::TimeTicks playout_time =
170 GetPlayoutTime(now, encoded_frame->rtp_timestamp);
171 if (!is_consecutively_next_frame) { 166 if (!is_consecutively_next_frame) {
167 const base::TimeTicks now = cast_environment_->Clock()->NowTicks();
172 // TODO(miu): Also account for expected decode time here? 168 // TODO(miu): Also account for expected decode time here?
173 const base::TimeTicks earliest_possible_end_time_of_missing_frame = 169 const base::TimeTicks earliest_possible_end_time_of_missing_frame =
174 now + expected_frame_duration_; 170 now + expected_frame_duration_;
175 if (earliest_possible_end_time_of_missing_frame < playout_time) { 171 if (earliest_possible_end_time_of_missing_frame < playout_time) {
176 VLOG(1) << "Wait for next consecutive frame instead of skipping."; 172 VLOG(1) << "Wait for next consecutive frame instead of skipping.";
177 if (!is_waiting_for_consecutive_frame_) { 173 RetryEmitAfterWaiting(playout_time - now);
178 is_waiting_for_consecutive_frame_ = true;
179 cast_environment_->PostDelayedTask(
180 CastEnvironment::MAIN,
181 FROM_HERE,
182 base::Bind(&VideoReceiver::EmitAvailableEncodedFramesAfterWaiting,
183 weak_factory_.GetWeakPtr()),
184 playout_time - now);
185 }
186 return; 174 return;
187 } 175 }
188 } 176 }
189 177
190 // Decrypt the payload data in the frame, if crypto is being used. 178 // Decrypt the payload data in the frame, if crypto is being used.
191 if (decryptor_.initialized()) { 179 if (decryptor_.initialized()) {
192 std::string decrypted_video_data; 180 std::string decrypted_video_data;
193 if (!decryptor_.Decrypt(encoded_frame->frame_id, 181 if (!decryptor_.Decrypt(encoded_frame->frame_id,
194 encoded_frame->data, 182 encoded_frame->data,
195 &decrypted_video_data)) { 183 &decrypted_video_data)) {
(...skipping 17 matching lines...) Expand all
213 "render_time", playout_time.ToInternalValue()); 201 "render_time", playout_time.ToInternalValue());
214 cast_environment_->PostTask(CastEnvironment::MAIN, 202 cast_environment_->PostTask(CastEnvironment::MAIN,
215 FROM_HERE, 203 FROM_HERE,
216 base::Bind(frame_request_queue_.front(), 204 base::Bind(frame_request_queue_.front(),
217 base::Passed(&encoded_frame), 205 base::Passed(&encoded_frame),
218 playout_time)); 206 playout_time));
219 frame_request_queue_.pop_front(); 207 frame_request_queue_.pop_front();
220 } 208 }
221 } 209 }
222 210
211 void VideoReceiver::RetryEmitAfterWaiting(base::TimeDelta wait_time) {
212 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
213 if (is_waiting_to_emit_frames_)
214 return;
215 is_waiting_to_emit_frames_ = true;
216 cast_environment_->PostDelayedTask(
217 CastEnvironment::MAIN,
218 FROM_HERE,
219 base::Bind(&VideoReceiver::EmitAvailableEncodedFramesAfterWaiting,
220 weak_factory_.GetWeakPtr()),
221 wait_time);
222 }
223
223 void VideoReceiver::EmitAvailableEncodedFramesAfterWaiting() { 224 void VideoReceiver::EmitAvailableEncodedFramesAfterWaiting() {
224 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); 225 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
225 DCHECK(is_waiting_for_consecutive_frame_); 226 DCHECK(is_waiting_to_emit_frames_);
226 is_waiting_for_consecutive_frame_ = false; 227 is_waiting_to_emit_frames_ = false;
227 EmitAvailableEncodedFrames(); 228 EmitAvailableEncodedFrames();
228 } 229 }
229 230
230 base::TimeTicks VideoReceiver::GetPlayoutTime(base::TimeTicks now, 231 base::TimeTicks VideoReceiver::GetPlayoutTime(uint32 rtp_timestamp) const {
231 uint32 rtp_timestamp) { 232 base::TimeTicks capture_time = rtcp_.ToApproximateCaptureTime(
232 // TODO(miu): This and AudioReceiver::GetPlayoutTime() need to be reconciled! 233 rtp_timestamp, kVideoFrequency);
233 234
234 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); 235 // HACK: The sender should have provided Sender Reports which allow this
235 // Senders time in ms when this frame was captured. 236 // receiver to map RTP timestamps back to the time the frame was captured on
236 // Note: the senders clock and our local clock might not be synced. 237 // the sender. It should have done this before sending the first frame, but
237 base::TimeTicks rtp_timestamp_in_ticks; 238 // the spec does not currently require this. Therefore, if the data is
238 239 // missing, this receiver is forced to take a guess.
239 // Compute the time offset_in_ticks based on the incoming_rtp_timestamp_. 240 //
240 if (time_offset_counter_ == 0) { 241 // The guess is based on a number of assumptions which in many environments
241 // Check for received RTCP to sync the stream play it out asap. 242 // will be completely wrong:
242 if (rtcp_.RtpTimestampInSenderTime(kVideoFrequency, 243 // 1. The difference between the sender clock and receiver clock (relative
243 incoming_rtp_timestamp_, 244 // to NTP epoch) is very close to zero.
244 &rtp_timestamp_in_ticks)) { 245 // 2. The amount of time the sender took to encode/process the frame before
245 ++time_offset_counter_; 246 // transport is approximately 1/2 the amount of time between frames.
246 } 247 // 3. Perfect network conditions (i.e., negligible latency, no packet loss,
247 } else if (time_incoming_packet_updated_) { 248 // frames are arriving in-order, etc.).
248 if (rtcp_.RtpTimestampInSenderTime(kVideoFrequency, 249 if (capture_time.is_null()) {
249 incoming_rtp_timestamp_, 250 VLOG(1) << ("Guessing playout time because sender has not yet sent lip "
250 &rtp_timestamp_in_ticks)) { 251 "sync info. Expect jank in the near future!");
251 // Time to update the time_offset. 252 capture_time = cast_environment_->Clock()->NowTicks() -
hubbe 2014/05/14 23:12:23 Doesn't ToApproximateCaptureTime() also guess? Bet
miu 2014/05/16 22:45:47 No. Only XXXXXReceiver::GetPlayoutTime() has the
252 base::TimeDelta time_offset = 253 (expected_frame_duration_ / 2);
253 time_incoming_packet_ - rtp_timestamp_in_ticks;
254 // Taking the minimum of the first kTimeOffsetMaxCounter values. We are
255 // assuming that we are looking for the minimum offset, which will occur
256 // when network conditions are the best. This should occur at least once
257 // within the first kTimeOffsetMaxCounter samples. Any drift should be
258 // very slow, and negligible for this use case.
259 if (time_offset_counter_ == 1)
260 time_offset_ = time_offset;
261 else if (time_offset_counter_ < kTimeOffsetMaxCounter) {
262 time_offset_ = std::min(time_offset_, time_offset);
263 }
264 if (time_offset_counter_ < kTimeOffsetMaxCounter)
265 ++time_offset_counter_;
266 }
267 }
268 // Reset |time_incoming_packet_updated_| to enable a future measurement.
269 time_incoming_packet_updated_ = false;
270 // Compute the actual rtp_timestamp_in_ticks based on the current timestamp.
271 if (!rtcp_.RtpTimestampInSenderTime(
272 kVideoFrequency, rtp_timestamp, &rtp_timestamp_in_ticks)) {
273 // This can fail if we have not received any RTCP packets in a long time.
274 // BUG: These calculations are a placeholder, and to be revisited in a
275 // soon-upcoming change. http://crbug.com/356942
276 const int frequency_khz = kVideoFrequency / 1000;
277 const base::TimeDelta delta_based_on_rtp_timestamps =
278 base::TimeDelta::FromMilliseconds(
279 static_cast<int32>(rtp_timestamp - incoming_rtp_timestamp_) /
280 frequency_khz);
281 return time_incoming_packet_ + delta_based_on_rtp_timestamps;
282 } 254 }
283 255
284 base::TimeTicks render_time = 256 return capture_time + target_playout_delay_;
285 rtp_timestamp_in_ticks + time_offset_ + target_delay_delta_;
286 // TODO(miu): This is broken since this "getter" method may be called on
287 // frames received out-of-order, which means the playout times for earlier
288 // frames will be computed incorrectly.
289 #if 0
290 if (last_render_time_ > render_time)
291 render_time = last_render_time_;
292 last_render_time_ = render_time;
293 #endif
294
295 return render_time;
296 } 257 }
297 258
298 void VideoReceiver::IncomingPacket(scoped_ptr<Packet> packet) { 259 void VideoReceiver::IncomingPacket(scoped_ptr<Packet> packet) {
299 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); 260 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
300 if (Rtcp::IsRtcpPacket(&packet->front(), packet->size())) { 261 if (Rtcp::IsRtcpPacket(&packet->front(), packet->size())) {
301 rtcp_.IncomingRtcpPacket(&packet->front(), packet->size()); 262 rtcp_.IncomingRtcpPacket(&packet->front(), packet->size());
302 } else { 263 } else {
303 ReceivedPacket(&packet->front(), packet->size()); 264 ReceivedPacket(&packet->front(), packet->size());
304 } 265 }
266 if (!reports_are_scheduled_) {
267 ScheduleNextRtcpReport();
268 ScheduleNextCastMessage();
269 reports_are_scheduled_ = true;
270 }
305 } 271 }
306 272
307 void VideoReceiver::OnReceivedPayloadData(const uint8* payload_data, 273 void VideoReceiver::OnReceivedPayloadData(const uint8* payload_data,
308 size_t payload_size, 274 size_t payload_size,
309 const RtpCastHeader& rtp_header) { 275 const RtpCastHeader& rtp_header) {
310 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); 276 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
311 277
312 base::TimeTicks now = cast_environment_->Clock()->NowTicks(); 278 const base::TimeTicks now = cast_environment_->Clock()->NowTicks();
313 if (time_incoming_packet_.is_null() ||
314 now - time_incoming_packet_ >
315 base::TimeDelta::FromMilliseconds(kMinTimeBetweenOffsetUpdatesMs)) {
316 if (time_incoming_packet_.is_null())
317 InitializeTimers();
318 incoming_rtp_timestamp_ = rtp_header.rtp_timestamp;
319 // The following incoming packet info is used for syncing sender and
320 // receiver clock. Use only the first packet of every frame to obtain a
321 // minimal value.
322 if (rtp_header.packet_id == 0) {
323 time_incoming_packet_ = now;
324 time_incoming_packet_updated_ = true;
325 }
326 }
327 279
328 frame_id_to_rtp_timestamp_[rtp_header.frame_id & 0xff] = 280 frame_id_to_rtp_timestamp_[rtp_header.frame_id & 0xff] =
329 rtp_header.rtp_timestamp; 281 rtp_header.rtp_timestamp;
330 cast_environment_->Logging()->InsertPacketEvent( 282 cast_environment_->Logging()->InsertPacketEvent(
331 now, 283 now,
332 PACKET_RECEIVED, 284 PACKET_RECEIVED,
333 VIDEO_EVENT, 285 VIDEO_EVENT,
334 rtp_header.rtp_timestamp, 286 rtp_header.rtp_timestamp,
335 rtp_header.frame_id, 287 rtp_header.frame_id,
336 rtp_header.packet_id, 288 rtp_header.packet_id,
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
410 } 362 }
411 363
412 void VideoReceiver::SendNextRtcpReport() { 364 void VideoReceiver::SendNextRtcpReport() {
413 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); 365 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
414 rtcp_.SendRtcpFromRtpReceiver(NULL, NULL); 366 rtcp_.SendRtcpFromRtpReceiver(NULL, NULL);
415 ScheduleNextRtcpReport(); 367 ScheduleNextRtcpReport();
416 } 368 }
417 369
418 } // namespace cast 370 } // namespace cast
419 } // namespace media 371 } // namespace media
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698