OLD | NEW |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <algorithm> | 5 #include <algorithm> |
6 #include <climits> | 6 #include <climits> |
7 #include <cstdarg> | 7 #include <cstdarg> |
8 #include <cstdio> | 8 #include <cstdio> |
| 9 #include <deque> |
9 #include <string> | 10 #include <string> |
| 11 #include <utility> |
10 | 12 |
11 #include "base/at_exit.h" | 13 #include "base/at_exit.h" |
12 #include "base/command_line.h" | 14 #include "base/command_line.h" |
13 #include "base/logging.h" | 15 #include "base/logging.h" |
14 #include "base/memory/ref_counted.h" | 16 #include "base/memory/ref_counted.h" |
15 #include "base/memory/scoped_ptr.h" | 17 #include "base/memory/scoped_ptr.h" |
16 #include "base/message_loop/message_loop.h" | 18 #include "base/message_loop/message_loop.h" |
| 19 #include "base/synchronization/lock.h" |
| 20 #include "base/synchronization/waitable_event.h" |
17 #include "base/threading/thread.h" | 21 #include "base/threading/thread.h" |
18 #include "base/time/default_tick_clock.h" | 22 #include "base/time/default_tick_clock.h" |
| 23 #include "base/timer/timer.h" |
| 24 #include "media/audio/audio_io.h" |
| 25 #include "media/audio/audio_manager.h" |
| 26 #include "media/audio/audio_parameters.h" |
| 27 #include "media/audio/fake_audio_log_factory.h" |
19 #include "media/base/audio_bus.h" | 28 #include "media/base/audio_bus.h" |
| 29 #include "media/base/channel_layout.h" |
20 #include "media/base/video_frame.h" | 30 #include "media/base/video_frame.h" |
21 #include "media/cast/cast_config.h" | 31 #include "media/cast/cast_config.h" |
22 #include "media/cast/cast_environment.h" | 32 #include "media/cast/cast_environment.h" |
23 #include "media/cast/cast_receiver.h" | 33 #include "media/cast/cast_receiver.h" |
24 #include "media/cast/logging/logging_defines.h" | 34 #include "media/cast/logging/logging_defines.h" |
25 #include "media/cast/test/utility/default_config.h" | 35 #include "media/cast/test/utility/default_config.h" |
26 #include "media/cast/test/utility/in_process_receiver.h" | 36 #include "media/cast/test/utility/in_process_receiver.h" |
27 #include "media/cast/test/utility/input_builder.h" | 37 #include "media/cast/test/utility/input_builder.h" |
28 #include "media/cast/test/utility/standalone_cast_environment.h" | 38 #include "media/cast/test/utility/standalone_cast_environment.h" |
29 #include "media/cast/transport/transport/udp_transport.h" | 39 #include "media/cast/transport/transport/udp_transport.h" |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
87 test::InputBuilder input_tx( | 97 test::InputBuilder input_tx( |
88 "Choose video sender SSRC.", DEFAULT_VIDEO_FEEDBACK_SSRC, 1, INT_MAX); | 98 "Choose video sender SSRC.", DEFAULT_VIDEO_FEEDBACK_SSRC, 1, INT_MAX); |
89 video_config->feedback_ssrc = input_tx.GetIntInput(); | 99 video_config->feedback_ssrc = input_tx.GetIntInput(); |
90 | 100 |
91 test::InputBuilder input_rx( | 101 test::InputBuilder input_rx( |
92 "Choose video receiver SSRC.", DEFAULT_VIDEO_INCOMING_SSRC, 1, INT_MAX); | 102 "Choose video receiver SSRC.", DEFAULT_VIDEO_INCOMING_SSRC, 1, INT_MAX); |
93 video_config->incoming_ssrc = input_rx.GetIntInput(); | 103 video_config->incoming_ssrc = input_rx.GetIntInput(); |
94 } | 104 } |
95 | 105 |
96 #if defined(OS_LINUX) | 106 #if defined(OS_LINUX) |
97 void GetResolution(int* height, int* width) { | 107 void GetWindowSize(int* width, int* height) { |
98 // Resolution values based on sender settings | 108 // Resolution values based on sender settings |
99 test::InputBuilder input_h( | 109 test::InputBuilder input_w( |
100 "Choose video height.", kVideoWindowHeight, 176, 1080); | 110 "Choose window width.", kVideoWindowWidth, 144, 1920); |
| 111 *width = input_w.GetIntInput(); |
| 112 |
| 113 test::InputBuilder input_h( |
| 114 "Choose window height.", kVideoWindowHeight, 176, 1080); |
101 *height = input_h.GetIntInput(); | 115 *height = input_h.GetIntInput(); |
102 | |
103 test::InputBuilder input_w( | |
104 "Choose video width.", kVideoWindowWidth, 144, 1920); | |
105 *width = input_w.GetIntInput(); | |
106 } | 116 } |
107 #endif // OS_LINUX | 117 #endif // OS_LINUX |
108 | 118 |
109 void GetPayloadtype(AudioReceiverConfig* audio_config) { | 119 void GetPayloadtype(AudioReceiverConfig* audio_config) { |
110 test::InputBuilder input("Choose audio receiver payload type.", | 120 test::InputBuilder input("Choose audio receiver payload type.", |
111 DEFAULT_AUDIO_PAYLOAD_TYPE, | 121 DEFAULT_AUDIO_PAYLOAD_TYPE, |
112 96, | 122 96, |
113 127); | 123 127); |
114 audio_config->rtp_payload_type = input.GetIntInput(); | 124 audio_config->rtp_payload_type = input.GetIntInput(); |
115 } | 125 } |
(...skipping 13 matching lines...) Expand all Loading... |
129 video_config->rtp_payload_type = input.GetIntInput(); | 139 video_config->rtp_payload_type = input.GetIntInput(); |
130 } | 140 } |
131 | 141 |
132 VideoReceiverConfig GetVideoReceiverConfig() { | 142 VideoReceiverConfig GetVideoReceiverConfig() { |
133 VideoReceiverConfig video_config = GetDefaultVideoReceiverConfig(); | 143 VideoReceiverConfig video_config = GetDefaultVideoReceiverConfig(); |
134 GetSsrcs(&video_config); | 144 GetSsrcs(&video_config); |
135 GetPayloadtype(&video_config); | 145 GetPayloadtype(&video_config); |
136 return video_config; | 146 return video_config; |
137 } | 147 } |
138 | 148 |
139 // An InProcessReceiver that renders video frames to a LinuxOutputWindow. While | 149 AudioParameters ToAudioParameters(const AudioReceiverConfig& config) { |
140 // it does receive audio frames, it does not play them. | 150 const int samples_in_10ms = config.frequency / 100; |
141 class ReceiverDisplay : public InProcessReceiver { | 151 return AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY, |
| 152 GuessChannelLayout(config.channels), |
| 153 config.frequency, 32, samples_in_10ms); |
| 154 } |
| 155 |
| 156 // An InProcessReceiver that renders video frames to a LinuxOutputWindow and |
| 157 // audio frames via Chromium's audio stack. |
| 158 // |
| 159 // InProcessReceiver pushes audio and video frames to this subclass, and these |
| 160 // frames are pushed into a queue. Then, for audio, the Chromium audio stack |
| 161 // will make polling calls on a separate, unknown thread whereby audio frames |
| 162 // are pulled out of the audio queue as needed. For video, however, NaivePlayer |
| 163 // is responsible for scheduling updates to the screen itself. For both, the |
| 164 // queues are pruned (i.e., received frames are skipped) when the system is not |
| 165 // able to play back as fast as frames are entering the queue. |
| 166 // |
| 167 // This is NOT a good reference implementation for a Cast receiver player since: |
| 168 // 1. It only skips frames to handle slower-than-expected playout, or halts |
| 169 // playback to handle frame underruns. |
| 170 // 2. It makes no attempt to synchronize the timing of playout of the video |
| 171 // frames with the audio frames. |
| 172 // 3. It does nothing to smooth or hide discontinuities in playback due to |
| 173 // timing issues or missing frames. |
| 174 class NaivePlayer : public InProcessReceiver, |
| 175 public AudioOutputStream::AudioSourceCallback { |
142 public: | 176 public: |
143 ReceiverDisplay(const scoped_refptr<CastEnvironment>& cast_environment, | 177 NaivePlayer(const scoped_refptr<CastEnvironment>& cast_environment, |
144 const net::IPEndPoint& local_end_point, | 178 const net::IPEndPoint& local_end_point, |
145 const net::IPEndPoint& remote_end_point, | 179 const net::IPEndPoint& remote_end_point, |
146 const AudioReceiverConfig& audio_config, | 180 const AudioReceiverConfig& audio_config, |
147 const VideoReceiverConfig& video_config, | 181 const VideoReceiverConfig& video_config, |
148 int width, | 182 int window_width, |
149 int height) | 183 int window_height) |
150 : InProcessReceiver(cast_environment, | 184 : InProcessReceiver(cast_environment, |
151 local_end_point, | 185 local_end_point, |
152 remote_end_point, | 186 remote_end_point, |
153 audio_config, | 187 audio_config, |
154 video_config), | 188 video_config), |
| 189 // Maximum age is the duration of 3 video frames. 3 was chosen |
| 190 // arbitrarily, but seems to work well. |
| 191 max_frame_age_(base::TimeDelta::FromSeconds(1) * 3 / |
| 192 video_config.max_frame_rate), |
155 #if defined(OS_LINUX) | 193 #if defined(OS_LINUX) |
156 render_(0, 0, width, height, "Cast_receiver"), | 194 render_(0, 0, window_width, window_height, "Cast_receiver"), |
157 #endif // OS_LINUX | 195 #endif // OS_LINUX |
158 last_playout_time_(), | 196 num_video_frames_processed_(0), |
159 last_render_time_() { | 197 num_audio_frames_processed_(0), |
160 } | 198 currently_playing_audio_frame_start_(-1) {} |
161 | 199 |
162 virtual ~ReceiverDisplay() {} | 200 virtual ~NaivePlayer() {} |
163 | 201 |
164 protected: | 202 virtual void Start() OVERRIDE { |
165 virtual void OnVideoFrame(const scoped_refptr<media::VideoFrame>& video_frame, | 203 AudioManager::Get()->GetTaskRunner()->PostTask( |
166 const base::TimeTicks& render_time, | 204 FROM_HERE, |
| 205 base::Bind(&NaivePlayer::StartAudioOutputOnAudioManagerThread, |
| 206 base::Unretained(this))); |
| 207 // Note: No need to wait for audio polling to start since the push-and-pull |
| 208 // mechanism is synchronized via the |audio_playout_queue_|. |
| 209 InProcessReceiver::Start(); |
| 210 } |
| 211 |
| 212 virtual void Stop() OVERRIDE { |
| 213 // First, stop audio output to the Chromium audio stack. |
| 214 base::WaitableEvent done(false, false); |
| 215 DCHECK(!AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread()); |
| 216 AudioManager::Get()->GetTaskRunner()->PostTask( |
| 217 FROM_HERE, |
| 218 base::Bind(&NaivePlayer::StopAudioOutputOnAudioManagerThread, |
| 219 base::Unretained(this), |
| 220 &done)); |
| 221 done.Wait(); |
| 222 |
| 223 // Now, stop receiving new frames. |
| 224 InProcessReceiver::Stop(); |
| 225 |
| 226 // Finally, clear out any frames remaining in the queues. |
| 227 while (!audio_playout_queue_.empty()) { |
| 228 const scoped_ptr<AudioBus> to_be_deleted( |
| 229 audio_playout_queue_.front().second); |
| 230 audio_playout_queue_.pop_front(); |
| 231 } |
| 232 video_playout_queue_.clear(); |
| 233 } |
| 234 |
| 235 private: |
| 236 void StartAudioOutputOnAudioManagerThread() { |
| 237 DCHECK(AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread()); |
| 238 DCHECK(!audio_output_stream_); |
| 239 audio_output_stream_.reset(AudioManager::Get()->MakeAudioOutputStreamProxy( |
| 240 ToAudioParameters(audio_config()), "")); |
| 241 if (audio_output_stream_.get() && audio_output_stream_->Open()) { |
| 242 audio_output_stream_->Start(this); |
| 243 } else { |
| 244 LOG(ERROR) << "Failed to open an audio output stream. " |
| 245 << "Audio playback disabled."; |
| 246 audio_output_stream_.reset(); |
| 247 } |
| 248 } |
| 249 |
| 250 void StopAudioOutputOnAudioManagerThread(base::WaitableEvent* done) { |
| 251 DCHECK(AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread()); |
| 252 if (audio_output_stream_.get()) { |
| 253 audio_output_stream_->Stop(); |
| 254 audio_output_stream_->Close(); |
| 255 audio_output_stream_.reset(); |
| 256 } |
| 257 done->Signal(); |
| 258 } |
| 259 |
| 260 //////////////////////////////////////////////////////////////////// |
| 261 // InProcessReceiver overrides. |
| 262 |
| 263 virtual void OnVideoFrame(const scoped_refptr<VideoFrame>& video_frame, |
| 264 const base::TimeTicks& playout_time, |
167 bool is_continuous) OVERRIDE { | 265 bool is_continuous) OVERRIDE { |
168 #ifdef OS_LINUX | 266 DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN)); |
169 render_.RenderFrame(video_frame); | 267 LOG_IF(WARNING, !is_continuous) |
170 #endif // OS_LINUX | 268 << "Video: Discontinuity in received frames."; |
171 // Print out the delta between frames. | 269 video_playout_queue_.push_back(std::make_pair(playout_time, video_frame)); |
172 if (!last_render_time_.is_null()) { | 270 ScheduleVideoPlayout(); |
173 base::TimeDelta time_diff = render_time - last_render_time_; | |
174 VLOG(2) << "Size = " << video_frame->coded_size().ToString() | |
175 << "; RenderDelay[mS] = " << time_diff.InMilliseconds(); | |
176 } | |
177 last_render_time_ = render_time; | |
178 } | 271 } |
179 | 272 |
180 virtual void OnAudioFrame(scoped_ptr<AudioBus> audio_frame, | 273 virtual void OnAudioFrame(scoped_ptr<AudioBus> audio_frame, |
181 const base::TimeTicks& playout_time, | 274 const base::TimeTicks& playout_time, |
182 bool is_continuous) OVERRIDE { | 275 bool is_continuous) OVERRIDE { |
183 // For audio just print the playout delta between audio frames. | 276 DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN)); |
184 if (!last_playout_time_.is_null()) { | 277 LOG_IF(WARNING, !is_continuous) |
185 base::TimeDelta time_diff = playout_time - last_playout_time_; | 278 << "Audio: Discontinuity in received frames."; |
186 VLOG(2) << "SampleRate = " << audio_config().frequency | 279 base::AutoLock auto_lock(audio_lock_); |
187 << "; PlayoutDelay[mS] = " << time_diff.InMilliseconds(); | 280 audio_playout_queue_.push_back( |
188 } | 281 std::make_pair(playout_time, audio_frame.release())); |
189 last_playout_time_ = playout_time; | 282 } |
190 } | 283 |
191 | 284 // End of InProcessReceiver overrides. |
| 285 //////////////////////////////////////////////////////////////////// |
| 286 |
| 287 //////////////////////////////////////////////////////////////////// |
| 288 // AudioSourceCallback implementation. |
| 289 |
| 290 virtual int OnMoreData(AudioBus* dest, AudioBuffersState buffers_state) |
| 291 OVERRIDE { |
| 292 // Note: This method is being invoked by a separate thread unknown to us |
| 293 // (i.e., outside of CastEnvironment). |
| 294 |
| 295 int samples_remaining = dest->frames(); |
| 296 |
| 297 while (samples_remaining > 0) { |
| 298 // Get next audio frame ready for playout. |
| 299 if (!currently_playing_audio_frame_.get()) { |
| 300 base::AutoLock auto_lock(audio_lock_); |
| 301 |
| 302 // Prune the queue, skipping entries that are too old. |
| 303 // TODO(miu): Use |buffers_state| to account for audio buffering delays |
| 304 // upstream. |
| 305 const base::TimeTicks earliest_time_to_play = |
| 306 cast_env()->Clock()->NowTicks() - max_frame_age_; |
| 307 while (!audio_playout_queue_.empty() && |
| 308 audio_playout_queue_.front().first < earliest_time_to_play) { |
| 309 PopOneAudioFrame(true); |
| 310 } |
| 311 if (audio_playout_queue_.empty()) |
| 312 break; |
| 313 |
| 314 currently_playing_audio_frame_ = PopOneAudioFrame(false).Pass(); |
| 315 currently_playing_audio_frame_start_ = 0; |
| 316 } |
| 317 |
| 318 // Copy some or all of the samples in |currently_playing_audio_frame_| to |
| 319 // |dest|. Once all samples in |currently_playing_audio_frame_| have been |
| 320 // consumed, release it. |
| 321 const int num_samples_to_copy = |
| 322 std::min(samples_remaining, |
| 323 currently_playing_audio_frame_->frames() - |
| 324 currently_playing_audio_frame_start_); |
| 325 currently_playing_audio_frame_->CopyPartialFramesTo( |
| 326 currently_playing_audio_frame_start_, |
| 327 num_samples_to_copy, |
| 328 0, |
| 329 dest); |
| 330 samples_remaining -= num_samples_to_copy; |
| 331 currently_playing_audio_frame_start_ += num_samples_to_copy; |
| 332 if (currently_playing_audio_frame_start_ == |
| 333 currently_playing_audio_frame_->frames()) { |
| 334 currently_playing_audio_frame_.reset(); |
| 335 } |
| 336 } |
| 337 |
| 338 // If |dest| has not been fully filled, then an underrun has occurred; and |
| 339 // fill the remainder of |dest| with zeros. |
| 340 if (samples_remaining > 0) { |
| 341 // Note: Only logging underruns after the first frame has been received. |
| 342 LOG_IF(WARNING, currently_playing_audio_frame_start_ != -1) |
| 343 << "Audio: Playback underrun of " << samples_remaining << " samples!"; |
| 344 dest->ZeroFramesPartial(dest->frames() - samples_remaining, |
| 345 samples_remaining); |
| 346 } |
| 347 |
| 348 return dest->frames(); |
| 349 } |
| 350 |
| 351 virtual int OnMoreIOData(AudioBus* source, |
| 352 AudioBus* dest, |
| 353 AudioBuffersState buffers_state) OVERRIDE { |
| 354 return OnMoreData(dest, buffers_state); |
| 355 } |
| 356 |
| 357 virtual void OnError(AudioOutputStream* stream) OVERRIDE { |
| 358 LOG(ERROR) << "AudioOutputStream reports an error. " |
| 359 << "Playback is unlikely to continue."; |
| 360 } |
| 361 |
| 362 // End of AudioSourceCallback implementation. |
| 363 //////////////////////////////////////////////////////////////////// |
| 364 |
| 365 void ScheduleVideoPlayout() { |
| 366 DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN)); |
| 367 |
| 368 // Prune the queue, skipping entries that are too old. |
| 369 const base::TimeTicks now = cast_env()->Clock()->NowTicks(); |
| 370 const base::TimeTicks earliest_time_to_play = now - max_frame_age_; |
| 371 while (!video_playout_queue_.empty() && |
| 372 video_playout_queue_.front().first < earliest_time_to_play) { |
| 373 PopOneVideoFrame(true); |
| 374 } |
| 375 |
| 376 // If the queue is not empty, schedule playout of its first frame. |
| 377 if (video_playout_queue_.empty()) { |
| 378 video_playout_timer_.Stop(); |
| 379 } else { |
| 380 video_playout_timer_.Start( |
| 381 FROM_HERE, |
| 382 video_playout_queue_.front().first - now, |
| 383 base::Bind(&NaivePlayer::PlayNextVideoFrame, |
| 384 base::Unretained(this))); |
| 385 } |
| 386 } |
| 387 |
| 388 void PlayNextVideoFrame() { |
| 389 DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN)); |
| 390 if (!video_playout_queue_.empty()) { |
| 391 const scoped_refptr<VideoFrame> video_frame = PopOneVideoFrame(false); |
| 392 #ifdef OS_LINUX |
| 393 render_.RenderFrame(video_frame); |
| 394 #endif // OS_LINUX |
| 395 } |
| 396 ScheduleVideoPlayout(); |
| 397 } |
| 398 |
| 399 scoped_refptr<VideoFrame> PopOneVideoFrame(bool is_being_skipped) { |
| 400 DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN)); |
| 401 |
| 402 if (is_being_skipped) { |
| 403 VLOG(1) << "VideoFrame[" << num_video_frames_processed_ << "]: Skipped."; |
| 404 } else { |
| 405 VLOG(1) << "VideoFrame[" << num_video_frames_processed_ << "]: Playing " |
| 406 << (cast_env()->Clock()->NowTicks() - |
| 407 video_playout_queue_.front().first).InMicroseconds() |
| 408 << " usec later than intended."; |
| 409 } |
| 410 |
| 411 const scoped_refptr<VideoFrame> ret = video_playout_queue_.front().second; |
| 412 video_playout_queue_.pop_front(); |
| 413 ++num_video_frames_processed_; |
| 414 return ret; |
| 415 } |
| 416 |
| 417 scoped_ptr<AudioBus> PopOneAudioFrame(bool was_skipped) { |
| 418 audio_lock_.AssertAcquired(); |
| 419 |
| 420 if (was_skipped) { |
| 421 VLOG(1) << "AudioFrame[" << num_audio_frames_processed_ << "]: Skipped"; |
| 422 } else { |
| 423 VLOG(1) << "AudioFrame[" << num_audio_frames_processed_ << "]: Playing " |
| 424 << (cast_env()->Clock()->NowTicks() - |
| 425 audio_playout_queue_.front().first).InMicroseconds() |
| 426 << " usec later than intended."; |
| 427 } |
| 428 |
| 429 scoped_ptr<AudioBus> ret(audio_playout_queue_.front().second); |
| 430 audio_playout_queue_.pop_front(); |
| 431 ++num_audio_frames_processed_; |
| 432 return ret.Pass(); |
| 433 } |
| 434 |
| 435 // Frames in the queue older than this (relative to NowTicks()) will be |
| 436 // dropped (i.e., playback is falling behind). |
| 437 const base::TimeDelta max_frame_age_; |
| 438 |
| 439 // Outputs created, started, and destroyed by this NaivePlayer. |
192 #ifdef OS_LINUX | 440 #ifdef OS_LINUX |
193 test::LinuxOutputWindow render_; | 441 test::LinuxOutputWindow render_; |
194 #endif // OS_LINUX | 442 #endif // OS_LINUX |
195 base::TimeTicks last_playout_time_; | 443 scoped_ptr<AudioOutputStream> audio_output_stream_; |
196 base::TimeTicks last_render_time_; | 444 |
| 445 // Video playout queue. |
| 446 typedef std::pair<base::TimeTicks, scoped_refptr<VideoFrame> > |
| 447 VideoQueueEntry; |
| 448 std::deque<VideoQueueEntry> video_playout_queue_; |
| 449 int64 num_video_frames_processed_; |
| 450 |
| 451 base::OneShotTimer<NaivePlayer> video_playout_timer_; |
| 452 |
| 453 // Audio playout queue, synchronized by |audio_lock_|. |
| 454 base::Lock audio_lock_; |
| 455 typedef std::pair<base::TimeTicks, AudioBus*> AudioQueueEntry; |
| 456 std::deque<AudioQueueEntry> audio_playout_queue_; |
| 457 int64 num_audio_frames_processed_; |
| 458 |
| 459 // These must only be used on the audio thread calling OnMoreData(). |
| 460 scoped_ptr<AudioBus> currently_playing_audio_frame_; |
| 461 int currently_playing_audio_frame_start_; |
197 }; | 462 }; |
198 | 463 |
199 } // namespace cast | 464 } // namespace cast |
200 } // namespace media | 465 } // namespace media |
201 | 466 |
202 int main(int argc, char** argv) { | 467 int main(int argc, char** argv) { |
203 base::AtExitManager at_exit; | 468 base::AtExitManager at_exit; |
204 CommandLine::Init(argc, argv); | 469 CommandLine::Init(argc, argv); |
205 InitLogging(logging::LoggingSettings()); | 470 InitLogging(logging::LoggingSettings()); |
206 | 471 |
207 scoped_refptr<media::cast::CastEnvironment> cast_environment( | 472 scoped_refptr<media::cast::CastEnvironment> cast_environment( |
208 new media::cast::StandaloneCastEnvironment); | 473 new media::cast::StandaloneCastEnvironment); |
209 | 474 |
| 475 // Start up Chromium audio system. |
| 476 media::FakeAudioLogFactory fake_audio_log_factory_; |
| 477 const scoped_ptr<media::AudioManager> audio_manager( |
| 478 media::AudioManager::Create(&fake_audio_log_factory_)); |
| 479 CHECK(media::AudioManager::Get()); |
| 480 |
210 media::cast::AudioReceiverConfig audio_config = | 481 media::cast::AudioReceiverConfig audio_config = |
211 media::cast::GetAudioReceiverConfig(); | 482 media::cast::GetAudioReceiverConfig(); |
212 media::cast::VideoReceiverConfig video_config = | 483 media::cast::VideoReceiverConfig video_config = |
213 media::cast::GetVideoReceiverConfig(); | 484 media::cast::GetVideoReceiverConfig(); |
214 | 485 |
| 486 // Determine local and remote endpoints. |
215 int remote_port, local_port; | 487 int remote_port, local_port; |
216 media::cast::GetPorts(&remote_port, &local_port); | 488 media::cast::GetPorts(&remote_port, &local_port); |
217 if (!local_port) { | 489 if (!local_port) { |
218 LOG(ERROR) << "Invalid local port."; | 490 LOG(ERROR) << "Invalid local port."; |
219 return 1; | 491 return 1; |
220 } | 492 } |
221 | |
222 std::string remote_ip_address = media::cast::GetIpAddress("Enter remote IP."); | 493 std::string remote_ip_address = media::cast::GetIpAddress("Enter remote IP."); |
223 std::string local_ip_address = media::cast::GetIpAddress("Enter local IP."); | 494 std::string local_ip_address = media::cast::GetIpAddress("Enter local IP."); |
224 net::IPAddressNumber remote_ip_number; | 495 net::IPAddressNumber remote_ip_number; |
225 net::IPAddressNumber local_ip_number; | 496 net::IPAddressNumber local_ip_number; |
226 | |
227 if (!net::ParseIPLiteralToNumber(remote_ip_address, &remote_ip_number)) { | 497 if (!net::ParseIPLiteralToNumber(remote_ip_address, &remote_ip_number)) { |
228 LOG(ERROR) << "Invalid remote IP address."; | 498 LOG(ERROR) << "Invalid remote IP address."; |
229 return 1; | 499 return 1; |
230 } | 500 } |
231 | |
232 if (!net::ParseIPLiteralToNumber(local_ip_address, &local_ip_number)) { | 501 if (!net::ParseIPLiteralToNumber(local_ip_address, &local_ip_number)) { |
233 LOG(ERROR) << "Invalid local IP address."; | 502 LOG(ERROR) << "Invalid local IP address."; |
234 return 1; | 503 return 1; |
235 } | 504 } |
236 | |
237 net::IPEndPoint remote_end_point(remote_ip_number, remote_port); | 505 net::IPEndPoint remote_end_point(remote_ip_number, remote_port); |
238 net::IPEndPoint local_end_point(local_ip_number, local_port); | 506 net::IPEndPoint local_end_point(local_ip_number, local_port); |
239 | 507 |
240 int width = 0; | 508 // Create and start the player. |
241 int height = 0; | 509 int window_width = 0; |
| 510 int window_height = 0; |
242 #if defined(OS_LINUX) | 511 #if defined(OS_LINUX) |
243 media::cast::GetResolution(&height, &width); | 512 media::cast::GetWindowSize(&window_width, &window_height); |
244 #endif // OS_LINUX | 513 #endif // OS_LINUX |
245 media::cast::ReceiverDisplay* const receiver_display = | 514 media::cast::NaivePlayer player(cast_environment, |
246 new media::cast::ReceiverDisplay(cast_environment, | 515 local_end_point, |
247 local_end_point, | 516 remote_end_point, |
248 remote_end_point, | 517 audio_config, |
249 audio_config, | 518 video_config, |
250 video_config, | 519 window_width, |
251 width, | 520 window_height); |
252 height); | 521 player.Start(); |
253 receiver_display->Start(); | |
254 | 522 |
255 base::MessageLoop().Run(); // Run forever (i.e., until SIGTERM). | 523 base::MessageLoop().Run(); // Run forever (i.e., until SIGTERM). |
256 NOTREACHED(); | 524 NOTREACHED(); |
257 return 0; | 525 return 0; |
258 } | 526 } |
OLD | NEW |