Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(171)

Side by Side Diff: media/cast/receiver/frame_receiver.cc

Issue 308043006: [Cast] Clean-up: Merge RtpReceiver+AudioReceiver+VideoReceiver-->FrameReceiver. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Addressed hclam's comments. Created 6 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « media/cast/receiver/frame_receiver.h ('k') | media/cast/receiver/frame_receiver_unittest.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 The Chromium Authors. All rights reserved. 1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/cast/audio_receiver/audio_receiver.h" 5 #include "media/cast/receiver/frame_receiver.h"
6 6
7 #include <algorithm> 7 #include <algorithm>
8 8
9 #include "base/big_endian.h"
9 #include "base/bind.h" 10 #include "base/bind.h"
10 #include "base/logging.h" 11 #include "base/logging.h"
11 #include "base/message_loop/message_loop.h" 12 #include "base/message_loop/message_loop.h"
12 #include "media/cast/audio_receiver/audio_decoder.h" 13 #include "media/cast/cast_environment.h"
13 #include "media/cast/transport/cast_transport_defines.h"
14 14
15 namespace { 15 namespace {
16 const int kMinSchedulingDelayMs = 1; 16 const int kMinSchedulingDelayMs = 1;
17 } // namespace 17 } // namespace
18 18
19 namespace media { 19 namespace media {
20 namespace cast { 20 namespace cast {
21 21
22 AudioReceiver::AudioReceiver(scoped_refptr<CastEnvironment> cast_environment, 22 FrameReceiver::FrameReceiver(
23 const FrameReceiverConfig& audio_config, 23 const scoped_refptr<CastEnvironment>& cast_environment,
24 transport::PacedPacketSender* const packet_sender) 24 const FrameReceiverConfig& config,
25 : RtpReceiver(cast_environment->Clock(), &audio_config, NULL), 25 EventMediaType event_media_type,
26 cast_environment_(cast_environment), 26 transport::PacedPacketSender* const packet_sender)
27 event_subscriber_(kReceiverRtcpEventHistorySize, AUDIO_EVENT), 27 : cast_environment_(cast_environment),
28 codec_(audio_config.codec.audio), 28 packet_parser_(config.incoming_ssrc, config.rtp_payload_type),
29 frequency_(audio_config.frequency), 29 stats_(cast_environment->Clock()),
30 event_media_type_(event_media_type),
31 event_subscriber_(kReceiverRtcpEventHistorySize, event_media_type),
32 rtp_timebase_(config.frequency),
30 target_playout_delay_( 33 target_playout_delay_(
31 base::TimeDelta::FromMilliseconds(audio_config.rtp_max_delay_ms)), 34 base::TimeDelta::FromMilliseconds(config.rtp_max_delay_ms)),
32 expected_frame_duration_( 35 expected_frame_duration_(
33 base::TimeDelta::FromSeconds(1) / audio_config.max_frame_rate), 36 base::TimeDelta::FromSeconds(1) / config.max_frame_rate),
34 reports_are_scheduled_(false), 37 reports_are_scheduled_(false),
35 framer_(cast_environment->Clock(), 38 framer_(cast_environment->Clock(),
36 this, 39 this,
37 audio_config.incoming_ssrc, 40 config.incoming_ssrc,
38 true, 41 true,
39 audio_config.rtp_max_delay_ms * audio_config.max_frame_rate / 42 config.rtp_max_delay_ms * config.max_frame_rate / 1000),
40 1000), 43 rtcp_(cast_environment_,
41 rtcp_(cast_environment,
42 NULL, 44 NULL,
43 NULL, 45 NULL,
44 packet_sender, 46 packet_sender,
45 GetStatistics(), 47 &stats_,
46 audio_config.rtcp_mode, 48 config.rtcp_mode,
47 base::TimeDelta::FromMilliseconds(audio_config.rtcp_interval), 49 base::TimeDelta::FromMilliseconds(config.rtcp_interval),
48 audio_config.feedback_ssrc, 50 config.feedback_ssrc,
49 audio_config.incoming_ssrc, 51 config.incoming_ssrc,
50 audio_config.rtcp_c_name, 52 config.rtcp_c_name,
51 true), 53 event_media_type),
52 is_waiting_for_consecutive_frame_(false), 54 is_waiting_for_consecutive_frame_(false),
53 lip_sync_drift_(ClockDriftSmoother::GetDefaultTimeConstant()), 55 lip_sync_drift_(ClockDriftSmoother::GetDefaultTimeConstant()),
54 weak_factory_(this) { 56 weak_factory_(this) {
55 DCHECK_GT(audio_config.rtp_max_delay_ms, 0); 57 DCHECK_GT(config.rtp_max_delay_ms, 0);
56 DCHECK_GT(audio_config.max_frame_rate, 0); 58 DCHECK_GT(config.max_frame_rate, 0);
57 audio_decoder_.reset(new AudioDecoder(cast_environment, audio_config)); 59 decryptor_.Initialize(config.aes_key, config.aes_iv_mask);
58 decryptor_.Initialize(audio_config.aes_key, audio_config.aes_iv_mask);
59 rtcp_.SetTargetDelay(target_playout_delay_); 60 rtcp_.SetTargetDelay(target_playout_delay_);
60 cast_environment_->Logging()->AddRawEventSubscriber(&event_subscriber_); 61 cast_environment_->Logging()->AddRawEventSubscriber(&event_subscriber_);
61 memset(frame_id_to_rtp_timestamp_, 0, sizeof(frame_id_to_rtp_timestamp_)); 62 memset(frame_id_to_rtp_timestamp_, 0, sizeof(frame_id_to_rtp_timestamp_));
62 } 63 }
63 64
64 AudioReceiver::~AudioReceiver() { 65 FrameReceiver::~FrameReceiver() {
65 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); 66 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
66 cast_environment_->Logging()->RemoveRawEventSubscriber(&event_subscriber_); 67 cast_environment_->Logging()->RemoveRawEventSubscriber(&event_subscriber_);
67 } 68 }
68 69
69 void AudioReceiver::OnReceivedPayloadData(const uint8* payload_data, 70 void FrameReceiver::RequestEncodedFrame(
70 size_t payload_size, 71 const ReceiveEncodedFrameCallback& callback) {
71 const RtpCastHeader& rtp_header) { 72 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
73 frame_request_queue_.push_back(callback);
74 EmitAvailableEncodedFrames();
75 }
76
77 bool FrameReceiver::ProcessPacket(scoped_ptr<Packet> packet) {
78 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
79
80 if (Rtcp::IsRtcpPacket(&packet->front(), packet->size())) {
81 rtcp_.IncomingRtcpPacket(&packet->front(), packet->size());
82 } else {
83 RtpCastHeader rtp_header;
84 const uint8* payload_data;
85 size_t payload_size;
86 if (!packet_parser_.ParsePacket(&packet->front(),
87 packet->size(),
88 &rtp_header,
89 &payload_data,
90 &payload_size)) {
91 return false;
92 }
93
94 ProcessParsedPacket(rtp_header, payload_data, payload_size);
95 stats_.UpdateStatistics(rtp_header);
96 }
97
98 if (!reports_are_scheduled_) {
99 ScheduleNextRtcpReport();
100 ScheduleNextCastMessage();
101 reports_are_scheduled_ = true;
102 }
103
104 return true;
105 }
106
107 // static
108 bool FrameReceiver::ParseSenderSsrc(const uint8* packet,
109 size_t length,
110 uint32* ssrc) {
111 base::BigEndianReader big_endian_reader(
112 reinterpret_cast<const char*>(packet), length);
113 return big_endian_reader.Skip(8) && big_endian_reader.ReadU32(ssrc);
114 }
115
116 void FrameReceiver::ProcessParsedPacket(const RtpCastHeader& rtp_header,
117 const uint8* payload_data,
118 size_t payload_size) {
72 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); 119 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
73 120
74 const base::TimeTicks now = cast_environment_->Clock()->NowTicks(); 121 const base::TimeTicks now = cast_environment_->Clock()->NowTicks();
75 122
76 frame_id_to_rtp_timestamp_[rtp_header.frame_id & 0xff] = 123 frame_id_to_rtp_timestamp_[rtp_header.frame_id & 0xff] =
77 rtp_header.rtp_timestamp; 124 rtp_header.rtp_timestamp;
78 cast_environment_->Logging()->InsertPacketEvent( 125 cast_environment_->Logging()->InsertPacketEvent(
79 now, PACKET_RECEIVED, AUDIO_EVENT, rtp_header.rtp_timestamp, 126 now, PACKET_RECEIVED, event_media_type_, rtp_header.rtp_timestamp,
80 rtp_header.frame_id, rtp_header.packet_id, rtp_header.max_packet_id, 127 rtp_header.frame_id, rtp_header.packet_id, rtp_header.max_packet_id,
81 payload_size); 128 payload_size);
82 129
83 bool duplicate = false; 130 bool duplicate = false;
84 const bool complete = 131 const bool complete =
85 framer_.InsertPacket(payload_data, payload_size, rtp_header, &duplicate); 132 framer_.InsertPacket(payload_data, payload_size, rtp_header, &duplicate);
86 133
87 // Duplicate packets are ignored. 134 // Duplicate packets are ignored.
88 if (duplicate) 135 if (duplicate)
89 return; 136 return;
(...skipping 14 matching lines...) Expand all
104 } 151 }
105 // |lip_sync_reference_time_| is always incremented according to the time 152 // |lip_sync_reference_time_| is always incremented according to the time
106 // delta computed from the difference in RTP timestamps. Then, 153 // delta computed from the difference in RTP timestamps. Then,
107 // |lip_sync_drift_| accounts for clock drift and also smoothes-out any 154 // |lip_sync_drift_| accounts for clock drift and also smoothes-out any
108 // sudden/discontinuous shifts in the series of reference time values. 155 // sudden/discontinuous shifts in the series of reference time values.
109 if (lip_sync_reference_time_.is_null()) { 156 if (lip_sync_reference_time_.is_null()) {
110 lip_sync_reference_time_ = fresh_sync_reference; 157 lip_sync_reference_time_ = fresh_sync_reference;
111 } else { 158 } else {
112 lip_sync_reference_time_ += RtpDeltaToTimeDelta( 159 lip_sync_reference_time_ += RtpDeltaToTimeDelta(
113 static_cast<int32>(fresh_sync_rtp - lip_sync_rtp_timestamp_), 160 static_cast<int32>(fresh_sync_rtp - lip_sync_rtp_timestamp_),
114 frequency_); 161 rtp_timebase_);
115 } 162 }
116 lip_sync_rtp_timestamp_ = fresh_sync_rtp; 163 lip_sync_rtp_timestamp_ = fresh_sync_rtp;
117 lip_sync_drift_.Update( 164 lip_sync_drift_.Update(
118 now, fresh_sync_reference - lip_sync_reference_time_); 165 now, fresh_sync_reference - lip_sync_reference_time_);
119 } 166 }
120 167
121 // Frame not complete; wait for more packets. 168 // Another frame is complete from a non-duplicate packet. Attempt to emit
122 if (!complete) 169 // more frames to satisfy enqueued requests.
123 return; 170 if (complete)
124 171 EmitAvailableEncodedFrames();
125 EmitAvailableEncodedFrames();
126 } 172 }
127 173
128 void AudioReceiver::GetRawAudioFrame( 174 void FrameReceiver::CastFeedback(const RtcpCastMessage& cast_message) {
129 const AudioFrameDecodedCallback& callback) {
130 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); 175 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
131 DCHECK(!callback.is_null()); 176
132 DCHECK(audio_decoder_.get()); 177 base::TimeTicks now = cast_environment_->Clock()->NowTicks();
133 GetEncodedAudioFrame(base::Bind( 178 RtpTimestamp rtp_timestamp =
134 &AudioReceiver::DecodeEncodedAudioFrame, 179 frame_id_to_rtp_timestamp_[cast_message.ack_frame_id_ & 0xff];
135 // Note: Use of Unretained is safe since this Closure is guaranteed to be 180 cast_environment_->Logging()->InsertFrameEvent(
136 // invoked before destruction of |this|. 181 now, FRAME_ACK_SENT, event_media_type_,
137 base::Unretained(this), 182 rtp_timestamp, cast_message.ack_frame_id_);
138 callback)); 183
184 ReceiverRtcpEventSubscriber::RtcpEventMultiMap rtcp_events;
185 event_subscriber_.GetRtcpEventsAndReset(&rtcp_events);
186 rtcp_.SendRtcpFromRtpReceiver(&cast_message, &rtcp_events);
139 } 187 }
140 188
141 void AudioReceiver::DecodeEncodedAudioFrame( 189 void FrameReceiver::EmitAvailableEncodedFrames() {
142 const AudioFrameDecodedCallback& callback,
143 scoped_ptr<transport::EncodedFrame> encoded_frame) {
144 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
145 if (!encoded_frame) {
146 callback.Run(make_scoped_ptr<AudioBus>(NULL), base::TimeTicks(), false);
147 return;
148 }
149 const uint32 frame_id = encoded_frame->frame_id;
150 const uint32 rtp_timestamp = encoded_frame->rtp_timestamp;
151 const base::TimeTicks playout_time = encoded_frame->reference_time;
152 audio_decoder_->DecodeFrame(encoded_frame.Pass(),
153 base::Bind(&AudioReceiver::EmitRawAudioFrame,
154 cast_environment_,
155 callback,
156 frame_id,
157 rtp_timestamp,
158 playout_time));
159 }
160
161 // static
162 void AudioReceiver::EmitRawAudioFrame(
163 const scoped_refptr<CastEnvironment>& cast_environment,
164 const AudioFrameDecodedCallback& callback,
165 uint32 frame_id,
166 uint32 rtp_timestamp,
167 const base::TimeTicks& playout_time,
168 scoped_ptr<AudioBus> audio_bus,
169 bool is_continuous) {
170 DCHECK(cast_environment->CurrentlyOn(CastEnvironment::MAIN));
171 if (audio_bus.get()) {
172 const base::TimeTicks now = cast_environment->Clock()->NowTicks();
173 cast_environment->Logging()->InsertFrameEvent(
174 now, FRAME_DECODED, AUDIO_EVENT, rtp_timestamp, frame_id);
175 cast_environment->Logging()->InsertFrameEventWithDelay(
176 now, FRAME_PLAYOUT, AUDIO_EVENT, rtp_timestamp, frame_id,
177 playout_time - now);
178 }
179 callback.Run(audio_bus.Pass(), playout_time, is_continuous);
180 }
181
182 void AudioReceiver::GetEncodedAudioFrame(const FrameEncodedCallback& callback) {
183 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
184 frame_request_queue_.push_back(callback);
185 EmitAvailableEncodedFrames();
186 }
187
188 void AudioReceiver::EmitAvailableEncodedFrames() {
189 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); 190 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
190 191
191 while (!frame_request_queue_.empty()) { 192 while (!frame_request_queue_.empty()) {
192 // Attempt to peek at the next completed frame from the |framer_|. 193 // Attempt to peek at the next completed frame from the |framer_|.
193 // TODO(miu): We should only be peeking at the metadata, and not copying the 194 // TODO(miu): We should only be peeking at the metadata, and not copying the
194 // payload yet! Or, at least, peek using a StringPiece instead of a copy. 195 // payload yet! Or, at least, peek using a StringPiece instead of a copy.
195 scoped_ptr<transport::EncodedFrame> encoded_frame( 196 scoped_ptr<transport::EncodedFrame> encoded_frame(
196 new transport::EncodedFrame()); 197 new transport::EncodedFrame());
197 bool is_consecutively_next_frame = false; 198 bool is_consecutively_next_frame = false;
198 bool have_multiple_complete_frames = false; 199 bool have_multiple_complete_frames = false;
199 if (!framer_.GetEncodedFrame(encoded_frame.get(), 200 if (!framer_.GetEncodedFrame(encoded_frame.get(),
200 &is_consecutively_next_frame, 201 &is_consecutively_next_frame,
201 &have_multiple_complete_frames)) { 202 &have_multiple_complete_frames)) {
202 VLOG(1) << "Wait for more audio packets to produce a completed frame."; 203 VLOG(1) << "Wait for more packets to produce a completed frame.";
203 return; // OnReceivedPayloadData() will invoke this method in the future. 204 return; // ProcessParsedPacket() will invoke this method in the future.
204 } 205 }
205 206
206 const base::TimeTicks now = cast_environment_->Clock()->NowTicks(); 207 const base::TimeTicks now = cast_environment_->Clock()->NowTicks();
207 const base::TimeTicks playout_time = 208 const base::TimeTicks playout_time =
208 GetPlayoutTime(encoded_frame->rtp_timestamp); 209 GetPlayoutTime(encoded_frame->rtp_timestamp);
209 210
210 // If we have multiple decodable frames, and the current frame is 211 // If we have multiple decodable frames, and the current frame is
211 // too old, then skip it and decode the next frame instead. 212 // too old, then skip it and decode the next frame instead.
212 if (have_multiple_complete_frames && now > playout_time) { 213 if (have_multiple_complete_frames && now > playout_time) {
213 framer_.ReleaseFrame(encoded_frame->frame_id); 214 framer_.ReleaseFrame(encoded_frame->frame_id);
214 continue; 215 continue;
215 } 216 }
216 217
217 // If |framer_| has a frame ready that is out of sequence, examine the 218 // If |framer_| has a frame ready that is out of sequence, examine the
218 // playout time to determine whether it's acceptable to continue, thereby 219 // playout time to determine whether it's acceptable to continue, thereby
219 // skipping one or more frames. Skip if the missing frame wouldn't complete 220 // skipping one or more frames. Skip if the missing frame wouldn't complete
220 // playing before the start of playback of the available frame. 221 // playing before the start of playback of the available frame.
221 if (!is_consecutively_next_frame) { 222 if (!is_consecutively_next_frame) {
222 // TODO(miu): Also account for expected decode time here? 223 // TODO(miu): Also account for expected decode time here?
223 const base::TimeTicks earliest_possible_end_time_of_missing_frame = 224 const base::TimeTicks earliest_possible_end_time_of_missing_frame =
224 now + expected_frame_duration_; 225 now + expected_frame_duration_;
225 if (earliest_possible_end_time_of_missing_frame < playout_time) { 226 if (earliest_possible_end_time_of_missing_frame < playout_time) {
226 VLOG(1) << "Wait for next consecutive frame instead of skipping."; 227 VLOG(1) << "Wait for next consecutive frame instead of skipping.";
227 if (!is_waiting_for_consecutive_frame_) { 228 if (!is_waiting_for_consecutive_frame_) {
228 is_waiting_for_consecutive_frame_ = true; 229 is_waiting_for_consecutive_frame_ = true;
229 cast_environment_->PostDelayedTask( 230 cast_environment_->PostDelayedTask(
230 CastEnvironment::MAIN, 231 CastEnvironment::MAIN,
231 FROM_HERE, 232 FROM_HERE,
232 base::Bind(&AudioReceiver::EmitAvailableEncodedFramesAfterWaiting, 233 base::Bind(&FrameReceiver::EmitAvailableEncodedFramesAfterWaiting,
233 weak_factory_.GetWeakPtr()), 234 weak_factory_.GetWeakPtr()),
234 playout_time - now); 235 playout_time - now);
235 } 236 }
236 return; 237 return;
237 } 238 }
238 } 239 }
239 240
240 // Decrypt the payload data in the frame, if crypto is being used. 241 // Decrypt the payload data in the frame, if crypto is being used.
241 if (decryptor_.initialized()) { 242 if (decryptor_.initialized()) {
242 std::string decrypted_audio_data; 243 std::string decrypted_data;
243 if (!decryptor_.Decrypt(encoded_frame->frame_id, 244 if (!decryptor_.Decrypt(encoded_frame->frame_id,
244 encoded_frame->data, 245 encoded_frame->data,
245 &decrypted_audio_data)) { 246 &decrypted_data)) {
246 // Decryption failed. Give up on this frame, releasing it from the 247 // Decryption failed. Give up on this frame.
247 // jitter buffer.
248 framer_.ReleaseFrame(encoded_frame->frame_id); 248 framer_.ReleaseFrame(encoded_frame->frame_id);
249 continue; 249 continue;
250 } 250 }
251 encoded_frame->data.swap(decrypted_audio_data); 251 encoded_frame->data.swap(decrypted_data);
252 } 252 }
253 253
254 // At this point, we have a decrypted EncodedFrame ready to be emitted. 254 // At this point, we have a decrypted EncodedFrame ready to be emitted.
255 encoded_frame->reference_time = playout_time; 255 encoded_frame->reference_time = playout_time;
256 framer_.ReleaseFrame(encoded_frame->frame_id); 256 framer_.ReleaseFrame(encoded_frame->frame_id);
257 cast_environment_->PostTask(CastEnvironment::MAIN, 257 cast_environment_->PostTask(CastEnvironment::MAIN,
258 FROM_HERE, 258 FROM_HERE,
259 base::Bind(frame_request_queue_.front(), 259 base::Bind(frame_request_queue_.front(),
260 base::Passed(&encoded_frame))); 260 base::Passed(&encoded_frame)));
261 frame_request_queue_.pop_front(); 261 frame_request_queue_.pop_front();
262 } 262 }
263 } 263 }
264 264
265 void AudioReceiver::EmitAvailableEncodedFramesAfterWaiting() { 265 void FrameReceiver::EmitAvailableEncodedFramesAfterWaiting() {
266 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); 266 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
267 DCHECK(is_waiting_for_consecutive_frame_); 267 DCHECK(is_waiting_for_consecutive_frame_);
268 is_waiting_for_consecutive_frame_ = false; 268 is_waiting_for_consecutive_frame_ = false;
269 EmitAvailableEncodedFrames(); 269 EmitAvailableEncodedFrames();
270 } 270 }
271 271
272 base::TimeTicks AudioReceiver::GetPlayoutTime(uint32 rtp_timestamp) const { 272 base::TimeTicks FrameReceiver::GetPlayoutTime(uint32 rtp_timestamp) const {
273 return lip_sync_reference_time_ + 273 return lip_sync_reference_time_ +
274 lip_sync_drift_.Current() + 274 lip_sync_drift_.Current() +
275 RtpDeltaToTimeDelta( 275 RtpDeltaToTimeDelta(
276 static_cast<int32>(rtp_timestamp - lip_sync_rtp_timestamp_), 276 static_cast<int32>(rtp_timestamp - lip_sync_rtp_timestamp_),
277 frequency_) + 277 rtp_timebase_) +
278 target_playout_delay_; 278 target_playout_delay_;
279 } 279 }
280 280
281 void AudioReceiver::IncomingPacket(scoped_ptr<Packet> packet) { 281 void FrameReceiver::ScheduleNextCastMessage() {
282 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
283 if (Rtcp::IsRtcpPacket(&packet->front(), packet->size())) {
284 rtcp_.IncomingRtcpPacket(&packet->front(), packet->size());
285 } else {
286 ReceivedPacket(&packet->front(), packet->size());
287 }
288 if (!reports_are_scheduled_) {
289 ScheduleNextRtcpReport();
290 ScheduleNextCastMessage();
291 reports_are_scheduled_ = true;
292 }
293 }
294
295 void AudioReceiver::CastFeedback(const RtcpCastMessage& cast_message) {
296 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
297 base::TimeTicks now = cast_environment_->Clock()->NowTicks();
298 RtpTimestamp rtp_timestamp =
299 frame_id_to_rtp_timestamp_[cast_message.ack_frame_id_ & 0xff];
300 cast_environment_->Logging()->InsertFrameEvent(
301 now, FRAME_ACK_SENT, AUDIO_EVENT, rtp_timestamp,
302 cast_message.ack_frame_id_);
303
304 ReceiverRtcpEventSubscriber::RtcpEventMultiMap rtcp_events;
305 event_subscriber_.GetRtcpEventsAndReset(&rtcp_events);
306 rtcp_.SendRtcpFromRtpReceiver(&cast_message, &rtcp_events);
307 }
308
309 void AudioReceiver::ScheduleNextRtcpReport() {
310 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
311 base::TimeDelta time_to_send = rtcp_.TimeToSendNextRtcpReport() -
312 cast_environment_->Clock()->NowTicks();
313
314 time_to_send = std::max(
315 time_to_send, base::TimeDelta::FromMilliseconds(kMinSchedulingDelayMs));
316
317 cast_environment_->PostDelayedTask(
318 CastEnvironment::MAIN,
319 FROM_HERE,
320 base::Bind(&AudioReceiver::SendNextRtcpReport,
321 weak_factory_.GetWeakPtr()),
322 time_to_send);
323 }
324
325 void AudioReceiver::SendNextRtcpReport() {
326 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
327 // TODO(pwestin): add logging.
328 rtcp_.SendRtcpFromRtpReceiver(NULL, NULL);
329 ScheduleNextRtcpReport();
330 }
331
332 // Cast messages should be sent within a maximum interval. Schedule a call
333 // if not triggered elsewhere, e.g. by the cast message_builder.
334 void AudioReceiver::ScheduleNextCastMessage() {
335 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); 282 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
336 base::TimeTicks send_time; 283 base::TimeTicks send_time;
337 framer_.TimeToSendNextCastMessage(&send_time); 284 framer_.TimeToSendNextCastMessage(&send_time);
338 base::TimeDelta time_to_send = 285 base::TimeDelta time_to_send =
339 send_time - cast_environment_->Clock()->NowTicks(); 286 send_time - cast_environment_->Clock()->NowTicks();
340 time_to_send = std::max( 287 time_to_send = std::max(
341 time_to_send, base::TimeDelta::FromMilliseconds(kMinSchedulingDelayMs)); 288 time_to_send, base::TimeDelta::FromMilliseconds(kMinSchedulingDelayMs));
342 cast_environment_->PostDelayedTask( 289 cast_environment_->PostDelayedTask(
343 CastEnvironment::MAIN, 290 CastEnvironment::MAIN,
344 FROM_HERE, 291 FROM_HERE,
345 base::Bind(&AudioReceiver::SendNextCastMessage, 292 base::Bind(&FrameReceiver::SendNextCastMessage,
346 weak_factory_.GetWeakPtr()), 293 weak_factory_.GetWeakPtr()),
347 time_to_send); 294 time_to_send);
348 } 295 }
349 296
350 void AudioReceiver::SendNextCastMessage() { 297 void FrameReceiver::SendNextCastMessage() {
351 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); 298 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
352 framer_.SendCastMessage(); // Will only send a message if it is time. 299 framer_.SendCastMessage(); // Will only send a message if it is time.
353 ScheduleNextCastMessage(); 300 ScheduleNextCastMessage();
354 } 301 }
355 302
303 void FrameReceiver::ScheduleNextRtcpReport() {
304 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
305 base::TimeDelta time_to_next = rtcp_.TimeToSendNextRtcpReport() -
306 cast_environment_->Clock()->NowTicks();
307
308 time_to_next = std::max(
309 time_to_next, base::TimeDelta::FromMilliseconds(kMinSchedulingDelayMs));
310
311 cast_environment_->PostDelayedTask(
312 CastEnvironment::MAIN,
313 FROM_HERE,
314 base::Bind(&FrameReceiver::SendNextRtcpReport,
315 weak_factory_.GetWeakPtr()),
316 time_to_next);
317 }
318
319 void FrameReceiver::SendNextRtcpReport() {
320 DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
321 rtcp_.SendRtcpFromRtpReceiver(NULL, NULL);
322 ScheduleNextRtcpReport();
323 }
324
356 } // namespace cast 325 } // namespace cast
357 } // namespace media 326 } // namespace media
OLDNEW
« no previous file with comments | « media/cast/receiver/frame_receiver.h ('k') | media/cast/receiver/frame_receiver_unittest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698