OLD | NEW |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/cast/video_receiver/video_receiver.h" | 5 #include "media/cast/receiver/cast_receiver_impl.h" |
6 | |
7 #include <algorithm> | |
8 | 6 |
9 #include "base/bind.h" | 7 #include "base/bind.h" |
| 8 #include "base/bind_helpers.h" |
| 9 #include "base/callback.h" |
10 #include "base/debug/trace_event.h" | 10 #include "base/debug/trace_event.h" |
11 #include "base/logging.h" | 11 #include "base/logging.h" |
12 #include "base/message_loop/message_loop.h" | 12 #include "base/message_loop/message_loop.h" |
13 #include "media/base/video_frame.h" | 13 #include "media/cast/receiver/audio_decoder.h" |
14 #include "media/cast/logging/logging_defines.h" | 14 #include "media/cast/receiver/video_decoder.h" |
15 #include "media/cast/transport/cast_transport_defines.h" | |
16 #include "media/cast/video_receiver/video_decoder.h" | |
17 | |
18 namespace { | |
19 const int kMinSchedulingDelayMs = 1; | |
20 } // namespace | |
21 | 15 |
22 namespace media { | 16 namespace media { |
23 namespace cast { | 17 namespace cast { |
24 | 18 |
25 VideoReceiver::VideoReceiver(scoped_refptr<CastEnvironment> cast_environment, | 19 scoped_ptr<CastReceiver> CastReceiver::Create( |
26 const FrameReceiverConfig& video_config, | 20 scoped_refptr<CastEnvironment> cast_environment, |
27 transport::PacedPacketSender* const packet_sender) | 21 const FrameReceiverConfig& audio_config, |
28 : RtpReceiver(cast_environment->Clock(), NULL, &video_config), | 22 const FrameReceiverConfig& video_config, |
29 cast_environment_(cast_environment), | 23 transport::PacketSender* const packet_sender) { |
30 event_subscriber_(kReceiverRtcpEventHistorySize, VIDEO_EVENT), | 24 return scoped_ptr<CastReceiver>(new CastReceiverImpl( |
31 codec_(video_config.codec.video), | 25 cast_environment, audio_config, video_config, packet_sender)); |
32 target_playout_delay_( | |
33 base::TimeDelta::FromMilliseconds(video_config.rtp_max_delay_ms)), | |
34 expected_frame_duration_( | |
35 base::TimeDelta::FromSeconds(1) / video_config.max_frame_rate), | |
36 reports_are_scheduled_(false), | |
37 framer_(cast_environment->Clock(), | |
38 this, | |
39 video_config.incoming_ssrc, | |
40 true, | |
41 video_config.rtp_max_delay_ms * video_config.max_frame_rate / | |
42 1000), | |
43 rtcp_(cast_environment_, | |
44 NULL, | |
45 NULL, | |
46 packet_sender, | |
47 GetStatistics(), | |
48 video_config.rtcp_mode, | |
49 base::TimeDelta::FromMilliseconds(video_config.rtcp_interval), | |
50 video_config.feedback_ssrc, | |
51 video_config.incoming_ssrc, | |
52 video_config.rtcp_c_name, | |
53 false), | |
54 is_waiting_for_consecutive_frame_(false), | |
55 lip_sync_drift_(ClockDriftSmoother::GetDefaultTimeConstant()), | |
56 weak_factory_(this) { | |
57 DCHECK_GT(video_config.rtp_max_delay_ms, 0); | |
58 DCHECK_GT(video_config.max_frame_rate, 0); | |
59 video_decoder_.reset(new VideoDecoder(cast_environment, video_config)); | |
60 decryptor_.Initialize(video_config.aes_key, video_config.aes_iv_mask); | |
61 rtcp_.SetTargetDelay(target_playout_delay_); | |
62 cast_environment_->Logging()->AddRawEventSubscriber(&event_subscriber_); | |
63 memset(frame_id_to_rtp_timestamp_, 0, sizeof(frame_id_to_rtp_timestamp_)); | |
64 } | 26 } |
65 | 27 |
66 VideoReceiver::~VideoReceiver() { | 28 CastReceiverImpl::CastReceiverImpl( |
67 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); | 29 scoped_refptr<CastEnvironment> cast_environment, |
68 cast_environment_->Logging()->RemoveRawEventSubscriber(&event_subscriber_); | 30 const FrameReceiverConfig& audio_config, |
| 31 const FrameReceiverConfig& video_config, |
| 32 transport::PacketSender* const packet_sender) |
| 33 : cast_environment_(cast_environment), |
| 34 pacer_(cast_environment->Clock(), |
| 35 cast_environment->Logging(), |
| 36 packet_sender, |
| 37 cast_environment->GetTaskRunner(CastEnvironment::MAIN)), |
| 38 audio_receiver_(cast_environment, audio_config, AUDIO_EVENT, &pacer_), |
| 39 video_receiver_(cast_environment, video_config, VIDEO_EVENT, &pacer_), |
| 40 ssrc_of_audio_sender_(audio_config.incoming_ssrc), |
| 41 ssrc_of_video_sender_(video_config.incoming_ssrc), |
| 42 num_audio_channels_(audio_config.channels), |
| 43 audio_sampling_rate_(audio_config.frequency), |
| 44 audio_codec_(audio_config.codec.audio), |
| 45 video_codec_(video_config.codec.video) {} |
| 46 |
| 47 CastReceiverImpl::~CastReceiverImpl() {} |
| 48 |
| 49 void CastReceiverImpl::DispatchReceivedPacket(scoped_ptr<Packet> packet) { |
| 50 const uint8_t* const data = &packet->front(); |
| 51 const size_t length = packet->size(); |
| 52 |
| 53 uint32 ssrc_of_sender; |
| 54 if (Rtcp::IsRtcpPacket(data, length)) { |
| 55 ssrc_of_sender = Rtcp::GetSsrcOfSender(data, length); |
| 56 } else if (!FrameReceiver::ParseSenderSsrc(data, length, &ssrc_of_sender)) { |
| 57 VLOG(1) << "Invalid RTP packet."; |
| 58 return; |
| 59 } |
| 60 |
| 61 base::WeakPtr<FrameReceiver> target; |
| 62 if (ssrc_of_sender == ssrc_of_video_sender_) { |
| 63 target = video_receiver_.AsWeakPtr(); |
| 64 } else if (ssrc_of_sender == ssrc_of_audio_sender_) { |
| 65 target = audio_receiver_.AsWeakPtr(); |
| 66 } else { |
| 67 VLOG(1) << "Dropping packet with a non matching sender SSRC: " |
| 68 << ssrc_of_sender; |
| 69 return; |
| 70 } |
| 71 cast_environment_->PostTask( |
| 72 CastEnvironment::MAIN, |
| 73 FROM_HERE, |
| 74 base::Bind(base::IgnoreResult(&FrameReceiver::ProcessPacket), |
| 75 target, |
| 76 base::Passed(&packet))); |
69 } | 77 } |
70 | 78 |
71 void VideoReceiver::GetRawVideoFrame( | 79 transport::PacketReceiverCallback CastReceiverImpl::packet_receiver() { |
72 const VideoFrameDecodedCallback& callback) { | 80 return base::Bind(&CastReceiverImpl::DispatchReceivedPacket, |
| 81 // TODO(miu): This code structure is dangerous, since the |
| 82 // callback could be stored and then invoked after |
| 83 // destruction of |this|. |
| 84 base::Unretained(this)); |
| 85 } |
| 86 |
| 87 void CastReceiverImpl::RequestDecodedAudioFrame( |
| 88 const AudioFrameDecodedCallback& callback) { |
73 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); | 89 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); |
74 DCHECK(!callback.is_null()); | 90 DCHECK(!callback.is_null()); |
75 DCHECK(video_decoder_.get()); | 91 audio_receiver_.RequestEncodedFrame(base::Bind( |
76 GetEncodedVideoFrame(base::Bind( | 92 &CastReceiverImpl::DecodeEncodedAudioFrame, |
77 &VideoReceiver::DecodeEncodedVideoFrame, | |
78 // Note: Use of Unretained is safe since this Closure is guaranteed to be | 93 // Note: Use of Unretained is safe since this Closure is guaranteed to be |
79 // invoked before destruction of |this|. | 94 // invoked or discarded by |audio_receiver_| before destruction of |this|. |
80 base::Unretained(this), | 95 base::Unretained(this), |
81 callback)); | 96 callback)); |
82 } | 97 } |
83 | 98 |
84 void VideoReceiver::DecodeEncodedVideoFrame( | 99 void CastReceiverImpl::RequestEncodedAudioFrame( |
| 100 const ReceiveEncodedFrameCallback& callback) { |
| 101 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); |
| 102 audio_receiver_.RequestEncodedFrame(callback); |
| 103 } |
| 104 |
| 105 void CastReceiverImpl::RequestDecodedVideoFrame( |
| 106 const VideoFrameDecodedCallback& callback) { |
| 107 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); |
| 108 DCHECK(!callback.is_null()); |
| 109 video_receiver_.RequestEncodedFrame(base::Bind( |
| 110 &CastReceiverImpl::DecodeEncodedVideoFrame, |
| 111 // Note: Use of Unretained is safe since this Closure is guaranteed to be |
| 112 // invoked or discarded by |video_receiver_| before destruction of |this|. |
| 113 base::Unretained(this), |
| 114 callback)); |
| 115 } |
| 116 |
| 117 void CastReceiverImpl::RequestEncodedVideoFrame( |
| 118 const ReceiveEncodedFrameCallback& callback) { |
| 119 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); |
| 120 video_receiver_.RequestEncodedFrame(callback); |
| 121 } |
| 122 |
| 123 void CastReceiverImpl::DecodeEncodedAudioFrame( |
| 124 const AudioFrameDecodedCallback& callback, |
| 125 scoped_ptr<transport::EncodedFrame> encoded_frame) { |
| 126 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); |
| 127 if (!encoded_frame) { |
| 128 callback.Run(make_scoped_ptr<AudioBus>(NULL), base::TimeTicks(), false); |
| 129 return; |
| 130 } |
| 131 |
| 132 if (!audio_decoder_) { |
| 133 audio_decoder_.reset(new AudioDecoder(cast_environment_, |
| 134 num_audio_channels_, |
| 135 audio_sampling_rate_, |
| 136 audio_codec_)); |
| 137 } |
| 138 const uint32 frame_id = encoded_frame->frame_id; |
| 139 const uint32 rtp_timestamp = encoded_frame->rtp_timestamp; |
| 140 const base::TimeTicks playout_time = encoded_frame->reference_time; |
| 141 audio_decoder_->DecodeFrame( |
| 142 encoded_frame.Pass(), |
| 143 base::Bind(&CastReceiverImpl::EmitDecodedAudioFrame, |
| 144 cast_environment_, |
| 145 callback, |
| 146 frame_id, |
| 147 rtp_timestamp, |
| 148 playout_time)); |
| 149 } |
| 150 |
| 151 void CastReceiverImpl::DecodeEncodedVideoFrame( |
85 const VideoFrameDecodedCallback& callback, | 152 const VideoFrameDecodedCallback& callback, |
86 scoped_ptr<transport::EncodedFrame> encoded_frame) { | 153 scoped_ptr<transport::EncodedFrame> encoded_frame) { |
87 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); | 154 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); |
88 if (!encoded_frame) { | 155 if (!encoded_frame) { |
89 callback.Run( | 156 callback.Run( |
90 make_scoped_refptr<VideoFrame>(NULL), base::TimeTicks(), false); | 157 make_scoped_refptr<VideoFrame>(NULL), base::TimeTicks(), false); |
91 return; | 158 return; |
92 } | 159 } |
| 160 |
| 161 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc |
| 162 TRACE_EVENT_INSTANT2( |
| 163 "cast_perf_test", "PullEncodedVideoFrame", |
| 164 TRACE_EVENT_SCOPE_THREAD, |
| 165 "rtp_timestamp", encoded_frame->rtp_timestamp, |
| 166 "render_time", encoded_frame->reference_time.ToInternalValue()); |
| 167 |
| 168 if (!video_decoder_) |
| 169 video_decoder_.reset(new VideoDecoder(cast_environment_, video_codec_)); |
93 const uint32 frame_id = encoded_frame->frame_id; | 170 const uint32 frame_id = encoded_frame->frame_id; |
94 const uint32 rtp_timestamp = encoded_frame->rtp_timestamp; | 171 const uint32 rtp_timestamp = encoded_frame->rtp_timestamp; |
95 const base::TimeTicks playout_time = encoded_frame->reference_time; | 172 const base::TimeTicks playout_time = encoded_frame->reference_time; |
96 video_decoder_->DecodeFrame(encoded_frame.Pass(), | 173 video_decoder_->DecodeFrame( |
97 base::Bind(&VideoReceiver::EmitRawVideoFrame, | 174 encoded_frame.Pass(), |
98 cast_environment_, | 175 base::Bind(&CastReceiverImpl::EmitDecodedVideoFrame, |
99 callback, | 176 cast_environment_, |
100 frame_id, | 177 callback, |
101 rtp_timestamp, | 178 frame_id, |
102 playout_time)); | 179 rtp_timestamp, |
| 180 playout_time)); |
103 } | 181 } |
104 | 182 |
105 // static | 183 // static |
106 void VideoReceiver::EmitRawVideoFrame( | 184 void CastReceiverImpl::EmitDecodedAudioFrame( |
| 185 const scoped_refptr<CastEnvironment>& cast_environment, |
| 186 const AudioFrameDecodedCallback& callback, |
| 187 uint32 frame_id, |
| 188 uint32 rtp_timestamp, |
| 189 const base::TimeTicks& playout_time, |
| 190 scoped_ptr<AudioBus> audio_bus, |
| 191 bool is_continuous) { |
| 192 DCHECK(cast_environment->CurrentlyOn(CastEnvironment::MAIN)); |
| 193 if (audio_bus.get()) { |
| 194 const base::TimeTicks now = cast_environment->Clock()->NowTicks(); |
| 195 cast_environment->Logging()->InsertFrameEvent( |
| 196 now, FRAME_DECODED, AUDIO_EVENT, rtp_timestamp, frame_id); |
| 197 cast_environment->Logging()->InsertFrameEventWithDelay( |
| 198 now, FRAME_PLAYOUT, AUDIO_EVENT, rtp_timestamp, frame_id, |
| 199 playout_time - now); |
| 200 } |
| 201 callback.Run(audio_bus.Pass(), playout_time, is_continuous); |
| 202 } |
| 203 |
| 204 // static |
| 205 void CastReceiverImpl::EmitDecodedVideoFrame( |
107 const scoped_refptr<CastEnvironment>& cast_environment, | 206 const scoped_refptr<CastEnvironment>& cast_environment, |
108 const VideoFrameDecodedCallback& callback, | 207 const VideoFrameDecodedCallback& callback, |
109 uint32 frame_id, | 208 uint32 frame_id, |
110 uint32 rtp_timestamp, | 209 uint32 rtp_timestamp, |
111 const base::TimeTicks& playout_time, | 210 const base::TimeTicks& playout_time, |
112 const scoped_refptr<VideoFrame>& video_frame, | 211 const scoped_refptr<VideoFrame>& video_frame, |
113 bool is_continuous) { | 212 bool is_continuous) { |
114 DCHECK(cast_environment->CurrentlyOn(CastEnvironment::MAIN)); | 213 DCHECK(cast_environment->CurrentlyOn(CastEnvironment::MAIN)); |
115 if (video_frame) { | 214 if (video_frame) { |
116 const base::TimeTicks now = cast_environment->Clock()->NowTicks(); | 215 const base::TimeTicks now = cast_environment->Clock()->NowTicks(); |
117 cast_environment->Logging()->InsertFrameEvent( | 216 cast_environment->Logging()->InsertFrameEvent( |
118 now, FRAME_DECODED, VIDEO_EVENT, rtp_timestamp, frame_id); | 217 now, FRAME_DECODED, VIDEO_EVENT, rtp_timestamp, frame_id); |
119 cast_environment->Logging()->InsertFrameEventWithDelay( | 218 cast_environment->Logging()->InsertFrameEventWithDelay( |
120 now, FRAME_PLAYOUT, VIDEO_EVENT, rtp_timestamp, frame_id, | 219 now, FRAME_PLAYOUT, VIDEO_EVENT, rtp_timestamp, frame_id, |
121 playout_time - now); | 220 playout_time - now); |
| 221 |
122 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc | 222 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc |
123 TRACE_EVENT_INSTANT1( | 223 TRACE_EVENT_INSTANT1( |
124 "cast_perf_test", "FrameDecoded", | 224 "cast_perf_test", "FrameDecoded", |
125 TRACE_EVENT_SCOPE_THREAD, | 225 TRACE_EVENT_SCOPE_THREAD, |
126 "rtp_timestamp", rtp_timestamp); | 226 "rtp_timestamp", rtp_timestamp); |
127 } | 227 } |
128 callback.Run(video_frame, playout_time, is_continuous); | 228 callback.Run(video_frame, playout_time, is_continuous); |
129 } | 229 } |
130 | 230 |
131 void VideoReceiver::GetEncodedVideoFrame(const FrameEncodedCallback& callback) { | |
132 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); | |
133 frame_request_queue_.push_back(callback); | |
134 EmitAvailableEncodedFrames(); | |
135 } | |
136 | |
137 void VideoReceiver::EmitAvailableEncodedFrames() { | |
138 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); | |
139 | |
140 while (!frame_request_queue_.empty()) { | |
141 // Attempt to peek at the next completed frame from the |framer_|. | |
142 // TODO(miu): We should only be peeking at the metadata, and not copying the | |
143 // payload yet! Or, at least, peek using a StringPiece instead of a copy. | |
144 scoped_ptr<transport::EncodedFrame> encoded_frame( | |
145 new transport::EncodedFrame()); | |
146 bool is_consecutively_next_frame = false; | |
147 bool have_multiple_complete_frames = false; | |
148 | |
149 if (!framer_.GetEncodedFrame(encoded_frame.get(), | |
150 &is_consecutively_next_frame, | |
151 &have_multiple_complete_frames)) { | |
152 VLOG(1) << "Wait for more video packets to produce a completed frame."; | |
153 return; // OnReceivedPayloadData() will invoke this method in the future. | |
154 } | |
155 | |
156 const base::TimeTicks now = cast_environment_->Clock()->NowTicks(); | |
157 const base::TimeTicks playout_time = | |
158 GetPlayoutTime(encoded_frame->rtp_timestamp); | |
159 | |
160 // If we have multiple decodable frames, and the current frame is | |
161 // too old, then skip it and decode the next frame instead. | |
162 if (have_multiple_complete_frames && now > playout_time) { | |
163 framer_.ReleaseFrame(encoded_frame->frame_id); | |
164 continue; | |
165 } | |
166 | |
167 // If |framer_| has a frame ready that is out of sequence, examine the | |
168 // playout time to determine whether it's acceptable to continue, thereby | |
169 // skipping one or more frames. Skip if the missing frame wouldn't complete | |
170 // playing before the start of playback of the available frame. | |
171 if (!is_consecutively_next_frame) { | |
172 // TODO(miu): Also account for expected decode time here? | |
173 const base::TimeTicks earliest_possible_end_time_of_missing_frame = | |
174 now + expected_frame_duration_; | |
175 if (earliest_possible_end_time_of_missing_frame < playout_time) { | |
176 VLOG(1) << "Wait for next consecutive frame instead of skipping."; | |
177 if (!is_waiting_for_consecutive_frame_) { | |
178 is_waiting_for_consecutive_frame_ = true; | |
179 cast_environment_->PostDelayedTask( | |
180 CastEnvironment::MAIN, | |
181 FROM_HERE, | |
182 base::Bind(&VideoReceiver::EmitAvailableEncodedFramesAfterWaiting, | |
183 weak_factory_.GetWeakPtr()), | |
184 playout_time - now); | |
185 } | |
186 return; | |
187 } | |
188 } | |
189 | |
190 // Decrypt the payload data in the frame, if crypto is being used. | |
191 if (decryptor_.initialized()) { | |
192 std::string decrypted_video_data; | |
193 if (!decryptor_.Decrypt(encoded_frame->frame_id, | |
194 encoded_frame->data, | |
195 &decrypted_video_data)) { | |
196 // Decryption failed. Give up on this frame, releasing it from the | |
197 // jitter buffer. | |
198 framer_.ReleaseFrame(encoded_frame->frame_id); | |
199 continue; | |
200 } | |
201 encoded_frame->data.swap(decrypted_video_data); | |
202 } | |
203 | |
204 // At this point, we have a decrypted EncodedFrame ready to be emitted. | |
205 encoded_frame->reference_time = playout_time; | |
206 framer_.ReleaseFrame(encoded_frame->frame_id); | |
207 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc | |
208 TRACE_EVENT_INSTANT2( | |
209 "cast_perf_test", "PullEncodedVideoFrame", | |
210 TRACE_EVENT_SCOPE_THREAD, | |
211 "rtp_timestamp", encoded_frame->rtp_timestamp, | |
212 // TODO(miu): Need to find an alternative to using ToInternalValue(): | |
213 "render_time", playout_time.ToInternalValue()); | |
214 cast_environment_->PostTask(CastEnvironment::MAIN, | |
215 FROM_HERE, | |
216 base::Bind(frame_request_queue_.front(), | |
217 base::Passed(&encoded_frame))); | |
218 frame_request_queue_.pop_front(); | |
219 } | |
220 } | |
221 | |
222 void VideoReceiver::EmitAvailableEncodedFramesAfterWaiting() { | |
223 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); | |
224 DCHECK(is_waiting_for_consecutive_frame_); | |
225 is_waiting_for_consecutive_frame_ = false; | |
226 EmitAvailableEncodedFrames(); | |
227 } | |
228 | |
229 base::TimeTicks VideoReceiver::GetPlayoutTime(uint32 rtp_timestamp) const { | |
230 return lip_sync_reference_time_ + | |
231 lip_sync_drift_.Current() + | |
232 RtpDeltaToTimeDelta( | |
233 static_cast<int32>(rtp_timestamp - lip_sync_rtp_timestamp_), | |
234 kVideoFrequency) + | |
235 target_playout_delay_; | |
236 } | |
237 | |
238 void VideoReceiver::IncomingPacket(scoped_ptr<Packet> packet) { | |
239 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); | |
240 if (Rtcp::IsRtcpPacket(&packet->front(), packet->size())) { | |
241 rtcp_.IncomingRtcpPacket(&packet->front(), packet->size()); | |
242 } else { | |
243 ReceivedPacket(&packet->front(), packet->size()); | |
244 } | |
245 if (!reports_are_scheduled_) { | |
246 ScheduleNextRtcpReport(); | |
247 ScheduleNextCastMessage(); | |
248 reports_are_scheduled_ = true; | |
249 } | |
250 } | |
251 | |
252 void VideoReceiver::OnReceivedPayloadData(const uint8* payload_data, | |
253 size_t payload_size, | |
254 const RtpCastHeader& rtp_header) { | |
255 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); | |
256 | |
257 const base::TimeTicks now = cast_environment_->Clock()->NowTicks(); | |
258 | |
259 frame_id_to_rtp_timestamp_[rtp_header.frame_id & 0xff] = | |
260 rtp_header.rtp_timestamp; | |
261 cast_environment_->Logging()->InsertPacketEvent( | |
262 now, | |
263 PACKET_RECEIVED, | |
264 VIDEO_EVENT, | |
265 rtp_header.rtp_timestamp, | |
266 rtp_header.frame_id, | |
267 rtp_header.packet_id, | |
268 rtp_header.max_packet_id, | |
269 payload_size); | |
270 | |
271 bool duplicate = false; | |
272 const bool complete = | |
273 framer_.InsertPacket(payload_data, payload_size, rtp_header, &duplicate); | |
274 | |
275 // Duplicate packets are ignored. | |
276 if (duplicate) | |
277 return; | |
278 | |
279 // Update lip-sync values upon receiving the first packet of each frame, or if | |
280 // they have never been set yet. | |
281 if (rtp_header.packet_id == 0 || lip_sync_reference_time_.is_null()) { | |
282 RtpTimestamp fresh_sync_rtp; | |
283 base::TimeTicks fresh_sync_reference; | |
284 if (!rtcp_.GetLatestLipSyncTimes(&fresh_sync_rtp, &fresh_sync_reference)) { | |
285 // HACK: The sender should have provided Sender Reports before the first | |
286 // frame was sent. However, the spec does not currently require this. | |
287 // Therefore, when the data is missing, the local clock is used to | |
288 // generate reference timestamps. | |
289 VLOG(2) << "Lip sync info missing. Falling-back to local clock."; | |
290 fresh_sync_rtp = rtp_header.rtp_timestamp; | |
291 fresh_sync_reference = now; | |
292 } | |
293 // |lip_sync_reference_time_| is always incremented according to the time | |
294 // delta computed from the difference in RTP timestamps. Then, | |
295 // |lip_sync_drift_| accounts for clock drift and also smoothes-out any | |
296 // sudden/discontinuous shifts in the series of reference time values. | |
297 if (lip_sync_reference_time_.is_null()) { | |
298 lip_sync_reference_time_ = fresh_sync_reference; | |
299 } else { | |
300 lip_sync_reference_time_ += RtpDeltaToTimeDelta( | |
301 static_cast<int32>(fresh_sync_rtp - lip_sync_rtp_timestamp_), | |
302 kVideoFrequency); | |
303 } | |
304 lip_sync_rtp_timestamp_ = fresh_sync_rtp; | |
305 lip_sync_drift_.Update( | |
306 now, fresh_sync_reference - lip_sync_reference_time_); | |
307 } | |
308 | |
309 // Video frame not complete; wait for more packets. | |
310 if (!complete) | |
311 return; | |
312 | |
313 EmitAvailableEncodedFrames(); | |
314 } | |
315 | |
316 // Send a cast feedback message. Actual message created in the framer (cast | |
317 // message builder). | |
318 void VideoReceiver::CastFeedback(const RtcpCastMessage& cast_message) { | |
319 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); | |
320 | |
321 base::TimeTicks now = cast_environment_->Clock()->NowTicks(); | |
322 RtpTimestamp rtp_timestamp = | |
323 frame_id_to_rtp_timestamp_[cast_message.ack_frame_id_ & 0xff]; | |
324 cast_environment_->Logging()->InsertFrameEvent( | |
325 now, FRAME_ACK_SENT, VIDEO_EVENT, | |
326 rtp_timestamp, cast_message.ack_frame_id_); | |
327 | |
328 ReceiverRtcpEventSubscriber::RtcpEventMultiMap rtcp_events; | |
329 event_subscriber_.GetRtcpEventsAndReset(&rtcp_events); | |
330 rtcp_.SendRtcpFromRtpReceiver(&cast_message, &rtcp_events); | |
331 } | |
332 | |
333 // Cast messages should be sent within a maximum interval. Schedule a call | |
334 // if not triggered elsewhere, e.g. by the cast message_builder. | |
335 void VideoReceiver::ScheduleNextCastMessage() { | |
336 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); | |
337 base::TimeTicks send_time; | |
338 framer_.TimeToSendNextCastMessage(&send_time); | |
339 base::TimeDelta time_to_send = | |
340 send_time - cast_environment_->Clock()->NowTicks(); | |
341 time_to_send = std::max( | |
342 time_to_send, base::TimeDelta::FromMilliseconds(kMinSchedulingDelayMs)); | |
343 cast_environment_->PostDelayedTask( | |
344 CastEnvironment::MAIN, | |
345 FROM_HERE, | |
346 base::Bind(&VideoReceiver::SendNextCastMessage, | |
347 weak_factory_.GetWeakPtr()), | |
348 time_to_send); | |
349 } | |
350 | |
351 void VideoReceiver::SendNextCastMessage() { | |
352 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); | |
353 framer_.SendCastMessage(); // Will only send a message if it is time. | |
354 ScheduleNextCastMessage(); | |
355 } | |
356 | |
357 void VideoReceiver::ScheduleNextRtcpReport() { | |
358 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); | |
359 base::TimeDelta time_to_next = rtcp_.TimeToSendNextRtcpReport() - | |
360 cast_environment_->Clock()->NowTicks(); | |
361 | |
362 time_to_next = std::max( | |
363 time_to_next, base::TimeDelta::FromMilliseconds(kMinSchedulingDelayMs)); | |
364 | |
365 cast_environment_->PostDelayedTask( | |
366 CastEnvironment::MAIN, | |
367 FROM_HERE, | |
368 base::Bind(&VideoReceiver::SendNextRtcpReport, | |
369 weak_factory_.GetWeakPtr()), | |
370 time_to_next); | |
371 } | |
372 | |
373 void VideoReceiver::SendNextRtcpReport() { | |
374 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); | |
375 rtcp_.SendRtcpFromRtpReceiver(NULL, NULL); | |
376 ScheduleNextRtcpReport(); | |
377 } | |
378 | |
379 } // namespace cast | 231 } // namespace cast |
380 } // namespace media | 232 } // namespace media |
OLD | NEW |