| OLD | NEW |
| 1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "chrome/renderer/media/cast_rtp_stream.h" | 5 #include "chrome/renderer/media/cast_rtp_stream.h" |
| 6 | 6 |
| 7 #include <stdint.h> | 7 #include <stdint.h> |
| 8 | 8 |
| 9 #include <algorithm> | 9 #include <algorithm> |
| 10 #include <memory> | 10 #include <memory> |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 55 // limit (60 * 250ms = 15 seconds), refresh frame requests will stop being made. | 55 // limit (60 * 250ms = 15 seconds), refresh frame requests will stop being made. |
| 56 const int kMaxConsecutiveRefreshFrames = 60; | 56 const int kMaxConsecutiveRefreshFrames = 60; |
| 57 | 57 |
| 58 FrameSenderConfig DefaultOpusConfig() { | 58 FrameSenderConfig DefaultOpusConfig() { |
| 59 FrameSenderConfig config; | 59 FrameSenderConfig config; |
| 60 config.rtp_payload_type = media::cast::RtpPayloadType::AUDIO_OPUS; | 60 config.rtp_payload_type = media::cast::RtpPayloadType::AUDIO_OPUS; |
| 61 config.sender_ssrc = 1; | 61 config.sender_ssrc = 1; |
| 62 config.receiver_ssrc = 2; | 62 config.receiver_ssrc = 2; |
| 63 config.rtp_timebase = media::cast::kDefaultAudioSamplingRate; | 63 config.rtp_timebase = media::cast::kDefaultAudioSamplingRate; |
| 64 config.channels = 2; | 64 config.channels = 2; |
| 65 // The value is 0 which means VBR. | |
| 66 config.min_bitrate = config.max_bitrate = config.start_bitrate = | 65 config.min_bitrate = config.max_bitrate = config.start_bitrate = |
| 67 media::cast::kDefaultAudioEncoderBitrate; | 66 media::cast::kDefaultAudioEncoderBitrate; |
| 68 config.max_frame_rate = 100; // 10 ms audio frames | 67 config.max_frame_rate = 100; // 10 ms audio frames |
| 69 config.codec = media::cast::CODEC_AUDIO_OPUS; | 68 config.codec = media::cast::CODEC_AUDIO_OPUS; |
| 70 return config; | 69 return config; |
| 71 } | 70 } |
| 72 | 71 |
| 73 FrameSenderConfig DefaultVp8Config() { | 72 FrameSenderConfig DefaultVp8Config() { |
| 74 FrameSenderConfig config; | 73 FrameSenderConfig config; |
| 75 config.rtp_payload_type = media::cast::RtpPayloadType::VIDEO_VP8; | 74 config.rtp_payload_type = media::cast::RtpPayloadType::VIDEO_VP8; |
| (...skipping 15 matching lines...) Expand all Loading... |
| 91 config.receiver_ssrc = 12; | 90 config.receiver_ssrc = 12; |
| 92 config.rtp_timebase = media::cast::kVideoFrequency; | 91 config.rtp_timebase = media::cast::kVideoFrequency; |
| 93 config.channels = 1; | 92 config.channels = 1; |
| 94 config.max_bitrate = media::cast::kDefaultMaxVideoBitrate; | 93 config.max_bitrate = media::cast::kDefaultMaxVideoBitrate; |
| 95 config.min_bitrate = media::cast::kDefaultMinVideoBitrate; | 94 config.min_bitrate = media::cast::kDefaultMinVideoBitrate; |
| 96 config.max_frame_rate = media::cast::kDefaultMaxFrameRate; | 95 config.max_frame_rate = media::cast::kDefaultMaxFrameRate; |
| 97 config.codec = media::cast::CODEC_VIDEO_H264; | 96 config.codec = media::cast::CODEC_VIDEO_H264; |
| 98 return config; | 97 return config; |
| 99 } | 98 } |
| 100 | 99 |
| 101 std::vector<FrameSenderConfig> SupportedAudioConfigs() { | 100 FrameSenderConfig DefaultRemotingAudioConfig() { |
| 102 // TODO(hclam): Fill in more codecs here. | 101 FrameSenderConfig config; |
| 103 return std::vector<FrameSenderConfig>(1, DefaultOpusConfig()); | 102 config.rtp_payload_type = media::cast::RtpPayloadType::REMOTE_AUDIO; |
| 103 config.sender_ssrc = 3; |
| 104 config.receiver_ssrc = 4; |
| 105 config.codec = media::cast::CODEC_AUDIO_REMOTE; |
| 106 config.rtp_timebase = media::cast::kRemotingRtpTimebase; |
| 107 config.max_bitrate = 1000000; |
| 108 config.min_bitrate = 0; |
| 109 config.channels = 2; |
| 110 config.max_frame_rate = 100; // 10 ms audio frames |
| 111 |
| 112 return config; |
| 104 } | 113 } |
| 105 | 114 |
| 106 std::vector<FrameSenderConfig> SupportedVideoConfigs() { | 115 FrameSenderConfig DefaultRemotingVideoConfig() { |
| 116 FrameSenderConfig config; |
| 117 config.rtp_payload_type = media::cast::RtpPayloadType::REMOTE_VIDEO; |
| 118 config.sender_ssrc = 13; |
| 119 config.receiver_ssrc = 14; |
| 120 config.codec = media::cast::CODEC_VIDEO_REMOTE; |
| 121 config.rtp_timebase = media::cast::kRemotingRtpTimebase; |
| 122 config.max_bitrate = 10000000; |
| 123 config.min_bitrate = 0; |
| 124 config.channels = 1; |
| 125 config.max_frame_rate = media::cast::kDefaultMaxFrameRate; |
| 126 return config; |
| 127 } |
| 128 |
| 129 std::vector<FrameSenderConfig> SupportedAudioConfigs(bool for_remoting_stream) { |
| 130 if (for_remoting_stream) |
| 131 return {DefaultRemotingAudioConfig()}; |
| 132 else |
| 133 return {DefaultOpusConfig()}; |
| 134 } |
| 135 |
| 136 std::vector<FrameSenderConfig> SupportedVideoConfigs(bool for_remoting_stream) { |
| 137 if (for_remoting_stream) |
| 138 return {DefaultRemotingVideoConfig()}; |
| 139 |
| 107 std::vector<FrameSenderConfig> supported_configs; | 140 std::vector<FrameSenderConfig> supported_configs; |
| 108 | |
| 109 // Prefer VP8 over H.264 for hardware encoder. | 141 // Prefer VP8 over H.264 for hardware encoder. |
| 110 if (CastRtpStream::IsHardwareVP8EncodingSupported()) | 142 if (CastRtpStream::IsHardwareVP8EncodingSupported()) |
| 111 supported_configs.push_back(DefaultVp8Config()); | 143 supported_configs.push_back(DefaultVp8Config()); |
| 112 if (CastRtpStream::IsHardwareH264EncodingSupported()) | 144 if (CastRtpStream::IsHardwareH264EncodingSupported()) |
| 113 supported_configs.push_back(DefaultH264Config()); | 145 supported_configs.push_back(DefaultH264Config()); |
| 114 | 146 |
| 115 // Propose the default software VP8 encoder, if no hardware encoders are | 147 // Propose the default software VP8 encoder, if no hardware encoders are |
| 116 // available. | 148 // available. |
| 117 if (supported_configs.empty()) | 149 if (supported_configs.empty()) |
| 118 supported_configs.push_back(DefaultVp8Config()); | 150 supported_configs.push_back(DefaultVp8Config()); |
| (...skipping 323 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 442 vea_profile.profile <= media::H264PROFILE_MAX) { | 474 vea_profile.profile <= media::H264PROFILE_MAX) { |
| 443 return true; | 475 return true; |
| 444 } | 476 } |
| 445 } | 477 } |
| 446 #endif // !defined(OS_MACOSX) && !defined(OS_WIN) | 478 #endif // !defined(OS_MACOSX) && !defined(OS_WIN) |
| 447 return false; | 479 return false; |
| 448 } | 480 } |
| 449 | 481 |
| 450 CastRtpStream::CastRtpStream(const blink::WebMediaStreamTrack& track, | 482 CastRtpStream::CastRtpStream(const blink::WebMediaStreamTrack& track, |
| 451 const scoped_refptr<CastSession>& session) | 483 const scoped_refptr<CastSession>& session) |
| 452 : track_(track), cast_session_(session), weak_factory_(this) {} | 484 : track_(track), |
| 485 cast_session_(session), |
| 486 is_audio_(track_.source().getType() == |
| 487 blink::WebMediaStreamSource::TypeAudio), |
| 488 weak_factory_(this) {} |
| 489 |
| 490 CastRtpStream::CastRtpStream(bool is_audio, |
| 491 const scoped_refptr<CastSession>& session) |
| 492 : cast_session_(session), is_audio_(is_audio), weak_factory_(this) {} |
| 453 | 493 |
| 454 CastRtpStream::~CastRtpStream() { | 494 CastRtpStream::~CastRtpStream() { |
| 455 Stop(); | 495 Stop(); |
| 456 } | 496 } |
| 457 | 497 |
| 458 std::vector<FrameSenderConfig> CastRtpStream::GetSupportedConfigs() { | 498 std::vector<FrameSenderConfig> CastRtpStream::GetSupportedConfigs() { |
| 459 if (IsAudio()) | 499 if (is_audio_) |
| 460 return SupportedAudioConfigs(); | 500 return SupportedAudioConfigs(track_.isNull()); |
| 461 else | 501 else |
| 462 return SupportedVideoConfigs(); | 502 return SupportedVideoConfigs(track_.isNull()); |
| 463 } | 503 } |
| 464 | 504 |
| 465 void CastRtpStream::Start(const FrameSenderConfig& config, | 505 void CastRtpStream::Start(int32_t stream_id, |
| 506 const FrameSenderConfig& config, |
| 466 const base::Closure& start_callback, | 507 const base::Closure& start_callback, |
| 467 const base::Closure& stop_callback, | 508 const base::Closure& stop_callback, |
| 468 const ErrorCallback& error_callback) { | 509 const ErrorCallback& error_callback) { |
| 469 DCHECK(!start_callback.is_null()); | 510 DCHECK(!start_callback.is_null()); |
| 470 DCHECK(!stop_callback.is_null()); | 511 DCHECK(!stop_callback.is_null()); |
| 471 DCHECK(!error_callback.is_null()); | 512 DCHECK(!error_callback.is_null()); |
| 472 | 513 |
| 473 DVLOG(1) << "CastRtpStream::Start = " << (IsAudio() ? "audio" : "video"); | 514 DVLOG(1) << "CastRtpStream::Start = " << (is_audio_ ? "audio" : "video"); |
| 474 stop_callback_ = stop_callback; | 515 stop_callback_ = stop_callback; |
| 475 error_callback_ = error_callback; | 516 error_callback_ = error_callback; |
| 476 | 517 |
| 477 if (IsAudio()) { | 518 if (track_.isNull()) { |
| 519 cast_session_->StartRemotingStream( |
| 520 stream_id, config, base::Bind(&CastRtpStream::DidEncounterError, |
| 521 weak_factory_.GetWeakPtr())); |
| 522 } else if (is_audio_) { |
| 478 // In case of error we have to go through DidEncounterError() to stop | 523 // In case of error we have to go through DidEncounterError() to stop |
| 479 // the streaming after reporting the error. | 524 // the streaming after reporting the error. |
| 480 audio_sink_.reset( | 525 audio_sink_.reset( |
| 481 new CastAudioSink(track_, config.channels, config.rtp_timebase)); | 526 new CastAudioSink(track_, config.channels, config.rtp_timebase)); |
| 482 cast_session_->StartAudio( | 527 cast_session_->StartAudio( |
| 483 config, | 528 config, |
| 484 base::Bind(&CastAudioSink::AddToTrack, audio_sink_->AsWeakPtr()), | 529 base::Bind(&CastAudioSink::AddToTrack, audio_sink_->AsWeakPtr()), |
| 485 base::Bind(&CastRtpStream::DidEncounterError, | 530 base::Bind(&CastRtpStream::DidEncounterError, |
| 486 weak_factory_.GetWeakPtr())); | 531 weak_factory_.GetWeakPtr())); |
| 487 start_callback.Run(); | |
| 488 } else { | 532 } else { |
| 489 // See the code for audio above for explanation of callbacks. | 533 // See the code for audio above for explanation of callbacks. |
| 490 video_sink_.reset(new CastVideoSink( | 534 video_sink_.reset(new CastVideoSink( |
| 491 track_, | 535 track_, |
| 492 media::BindToCurrentLoop(base::Bind(&CastRtpStream::DidEncounterError, | 536 media::BindToCurrentLoop(base::Bind(&CastRtpStream::DidEncounterError, |
| 493 weak_factory_.GetWeakPtr())))); | 537 weak_factory_.GetWeakPtr())))); |
| 494 cast_session_->StartVideo( | 538 cast_session_->StartVideo( |
| 495 config, base::Bind(&CastVideoSink::AddToTrack, video_sink_->AsWeakPtr(), | 539 config, base::Bind(&CastVideoSink::AddToTrack, video_sink_->AsWeakPtr(), |
| 496 !config.aes_key.empty()), | 540 !config.aes_key.empty()), |
| 497 base::Bind(&CastRtpStream::DidEncounterError, | 541 base::Bind(&CastRtpStream::DidEncounterError, |
| 498 weak_factory_.GetWeakPtr())); | 542 weak_factory_.GetWeakPtr())); |
| 499 start_callback.Run(); | |
| 500 } | 543 } |
| 544 start_callback.Run(); |
| 501 } | 545 } |
| 502 | 546 |
| 503 void CastRtpStream::Stop() { | 547 void CastRtpStream::Stop() { |
| 504 DVLOG(1) << "CastRtpStream::Stop = " << (IsAudio() ? "audio" : "video"); | 548 DVLOG(1) << "CastRtpStream::Stop = " << (is_audio_ ? "audio" : "video"); |
| 505 if (stop_callback_.is_null()) | 549 if (stop_callback_.is_null()) |
| 506 return; // Already stopped. | 550 return; // Already stopped. |
| 507 weak_factory_.InvalidateWeakPtrs(); | 551 weak_factory_.InvalidateWeakPtrs(); |
| 508 error_callback_.Reset(); | 552 error_callback_.Reset(); |
| 509 audio_sink_.reset(); | 553 audio_sink_.reset(); |
| 510 video_sink_.reset(); | 554 video_sink_.reset(); |
| 511 base::ResetAndReturn(&stop_callback_).Run(); | 555 base::ResetAndReturn(&stop_callback_).Run(); |
| 512 } | 556 } |
| 513 | 557 |
| 514 void CastRtpStream::ToggleLogging(bool enable) { | 558 void CastRtpStream::ToggleLogging(bool enable) { |
| 515 DVLOG(1) << "CastRtpStream::ToggleLogging(" << enable << ") = " | 559 DVLOG(1) << "CastRtpStream::ToggleLogging(" << enable |
| 516 << (IsAudio() ? "audio" : "video"); | 560 << ") = " << (is_audio_ ? "audio" : "video"); |
| 517 cast_session_->ToggleLogging(IsAudio(), enable); | 561 cast_session_->ToggleLogging(is_audio_, enable); |
| 518 } | 562 } |
| 519 | 563 |
| 520 void CastRtpStream::GetRawEvents( | 564 void CastRtpStream::GetRawEvents( |
| 521 const base::Callback<void(std::unique_ptr<base::BinaryValue>)>& callback, | 565 const base::Callback<void(std::unique_ptr<base::BinaryValue>)>& callback, |
| 522 const std::string& extra_data) { | 566 const std::string& extra_data) { |
| 523 DVLOG(1) << "CastRtpStream::GetRawEvents = " | 567 DVLOG(1) << "CastRtpStream::GetRawEvents = " |
| 524 << (IsAudio() ? "audio" : "video"); | 568 << (is_audio_ ? "audio" : "video"); |
| 525 cast_session_->GetEventLogsAndReset(IsAudio(), extra_data, callback); | 569 cast_session_->GetEventLogsAndReset(is_audio_, extra_data, callback); |
| 526 } | 570 } |
| 527 | 571 |
| 528 void CastRtpStream::GetStats( | 572 void CastRtpStream::GetStats( |
| 529 const base::Callback<void(std::unique_ptr<base::DictionaryValue>)>& | 573 const base::Callback<void(std::unique_ptr<base::DictionaryValue>)>& |
| 530 callback) { | 574 callback) { |
| 531 DVLOG(1) << "CastRtpStream::GetStats = " | 575 DVLOG(1) << "CastRtpStream::GetStats = " << (is_audio_ ? "audio" : "video"); |
| 532 << (IsAudio() ? "audio" : "video"); | 576 cast_session_->GetStatsAndReset(is_audio_, callback); |
| 533 cast_session_->GetStatsAndReset(IsAudio(), callback); | |
| 534 } | |
| 535 | |
| 536 bool CastRtpStream::IsAudio() const { | |
| 537 return track_.source().getType() == blink::WebMediaStreamSource::TypeAudio; | |
| 538 } | 577 } |
| 539 | 578 |
| 540 void CastRtpStream::DidEncounterError(const std::string& message) { | 579 void CastRtpStream::DidEncounterError(const std::string& message) { |
| 541 DCHECK(content::RenderThread::Get()); | 580 DCHECK(content::RenderThread::Get()); |
| 542 DVLOG(1) << "CastRtpStream::DidEncounterError(" << message << ") = " | 581 DVLOG(1) << "CastRtpStream::DidEncounterError(" << message |
| 543 << (IsAudio() ? "audio" : "video"); | 582 << ") = " << (is_audio_ ? "audio" : "video"); |
| 544 // Save the WeakPtr first because the error callback might delete this object. | 583 // Save the WeakPtr first because the error callback might delete this object. |
| 545 base::WeakPtr<CastRtpStream> ptr = weak_factory_.GetWeakPtr(); | 584 base::WeakPtr<CastRtpStream> ptr = weak_factory_.GetWeakPtr(); |
| 546 error_callback_.Run(message); | 585 error_callback_.Run(message); |
| 547 base::ThreadTaskRunnerHandle::Get()->PostTask( | 586 base::ThreadTaskRunnerHandle::Get()->PostTask( |
| 548 FROM_HERE, | 587 FROM_HERE, |
| 549 base::Bind(&CastRtpStream::Stop, ptr)); | 588 base::Bind(&CastRtpStream::Stop, ptr)); |
| 550 } | 589 } |
| OLD | NEW |