| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 78 inst->numberOfSimulcastStreams > 1) { | 78 inst->numberOfSimulcastStreams > 1) { |
| 79 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 79 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 80 } | 80 } |
| 81 if (inst->codecSpecific.VP8.automaticResizeOn && | 81 if (inst->codecSpecific.VP8.automaticResizeOn && |
| 82 inst->numberOfSimulcastStreams > 1) { | 82 inst->numberOfSimulcastStreams > 1) { |
| 83 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 83 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 84 } | 84 } |
| 85 return WEBRTC_VIDEO_CODEC_OK; | 85 return WEBRTC_VIDEO_CODEC_OK; |
| 86 } | 86 } |
| 87 | 87 |
| 88 struct ScreenshareTemporalLayersFactory : webrtc::TemporalLayersFactory { | |
| 89 ScreenshareTemporalLayersFactory() {} | |
| 90 virtual ~ScreenshareTemporalLayersFactory() {} | |
| 91 | |
| 92 virtual webrtc::TemporalLayers* Create(int num_temporal_layers, | |
| 93 uint8_t initial_tl0_pic_idx) const { | |
| 94 return new webrtc::ScreenshareLayers(num_temporal_layers, rand(), | |
| 95 webrtc::Clock::GetRealTimeClock()); | |
| 96 } | |
| 97 }; | |
| 98 | |
| 99 // An EncodedImageCallback implementation that forwards on calls to a | 88 // An EncodedImageCallback implementation that forwards on calls to a |
| 100 // SimulcastEncoderAdapter, but with the stream index it's registered with as | 89 // SimulcastEncoderAdapter, but with the stream index it's registered with as |
| 101 // the first parameter to Encoded. | 90 // the first parameter to Encoded. |
| 102 class AdapterEncodedImageCallback : public webrtc::EncodedImageCallback { | 91 class AdapterEncodedImageCallback : public webrtc::EncodedImageCallback { |
| 103 public: | 92 public: |
| 104 AdapterEncodedImageCallback(webrtc::SimulcastEncoderAdapter* adapter, | 93 AdapterEncodedImageCallback(webrtc::SimulcastEncoderAdapter* adapter, |
| 105 size_t stream_idx) | 94 size_t stream_idx) |
| 106 : adapter_(adapter), stream_idx_(stream_idx) {} | 95 : adapter_(adapter), stream_idx_(stream_idx) {} |
| 107 | 96 |
| 108 EncodedImageCallback::Result OnEncodedImage( | 97 EncodedImageCallback::Result OnEncodedImage( |
| (...skipping 11 matching lines...) Expand all Loading... |
| 120 | 109 |
| 121 } // namespace | 110 } // namespace |
| 122 | 111 |
| 123 namespace webrtc { | 112 namespace webrtc { |
| 124 | 113 |
| 125 SimulcastEncoderAdapter::SimulcastEncoderAdapter(VideoEncoderFactory* factory) | 114 SimulcastEncoderAdapter::SimulcastEncoderAdapter(VideoEncoderFactory* factory) |
| 126 : factory_(factory), | 115 : factory_(factory), |
| 127 encoded_complete_callback_(nullptr), | 116 encoded_complete_callback_(nullptr), |
| 128 implementation_name_("SimulcastEncoderAdapter") { | 117 implementation_name_("SimulcastEncoderAdapter") { |
| 129 memset(&codec_, 0, sizeof(webrtc::VideoCodec)); | 118 memset(&codec_, 0, sizeof(webrtc::VideoCodec)); |
| 130 rate_allocator_.reset(new SimulcastRateAllocator(codec_)); | 119 fallback_rate_allocator_.reset(new SimulcastRateAllocator(codec_)); |
| 131 } | 120 } |
| 132 | 121 |
| 133 SimulcastEncoderAdapter::~SimulcastEncoderAdapter() { | 122 SimulcastEncoderAdapter::~SimulcastEncoderAdapter() { |
| 134 Release(); | 123 Release(); |
| 135 } | 124 } |
| 136 | 125 |
| 137 int SimulcastEncoderAdapter::Release() { | 126 int SimulcastEncoderAdapter::Release() { |
| 138 // TODO(pbos): Keep the last encoder instance but call ::Release() on it, then | 127 // TODO(pbos): Keep the last encoder instance but call ::Release() on it, then |
| 139 // re-use this instance in ::InitEncode(). This means that changing | 128 // re-use this instance in ::InitEncode(). This means that changing |
| 140 // resolutions doesn't require reallocation of the first encoder, but only | 129 // resolutions doesn't require reallocation of the first encoder, but only |
| (...skipping 27 matching lines...) Expand all Loading... |
| 168 } | 157 } |
| 169 | 158 |
| 170 int number_of_streams = NumberOfStreams(*inst); | 159 int number_of_streams = NumberOfStreams(*inst); |
| 171 const bool doing_simulcast = (number_of_streams > 1); | 160 const bool doing_simulcast = (number_of_streams > 1); |
| 172 | 161 |
| 173 if (doing_simulcast && !ValidSimulcastResolutions(*inst, number_of_streams)) { | 162 if (doing_simulcast && !ValidSimulcastResolutions(*inst, number_of_streams)) { |
| 174 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 163 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 175 } | 164 } |
| 176 | 165 |
| 177 codec_ = *inst; | 166 codec_ = *inst; |
| 178 rate_allocator_.reset(new SimulcastRateAllocator(codec_)); | 167 fallback_rate_allocator_.reset(new SimulcastRateAllocator(codec_)); |
| 179 std::vector<uint32_t> start_bitrates = | 168 std::vector<uint32_t> start_bitrates; |
| 180 rate_allocator_->GetAllocation(codec_.startBitrate); | 169 BitrateAllocation allocation = fallback_rate_allocator_->GetAllocation( |
| 181 | 170 codec_.startBitrate * 1000, codec_.maxFramerate); |
| 182 // Special mode when screensharing on a single stream. | 171 for (int i = 0; i < kMaxSimulcastStreams; ++i) { |
| 183 if (number_of_streams == 1 && inst->mode == kScreensharing) { | 172 uint32_t stream_bitrate = allocation.get_spatial_layer_sum(i) / 1000; |
| 184 screensharing_tl_factory_.reset(new ScreenshareTemporalLayersFactory()); | 173 start_bitrates.push_back(stream_bitrate); |
| 185 codec_.codecSpecific.VP8.tl_factory = screensharing_tl_factory_.get(); | |
| 186 } | 174 } |
| 187 | 175 |
| 188 std::string implementation_name; | 176 std::string implementation_name; |
| 189 // Create |number_of_streams| of encoder instances and init them. | 177 // Create |number_of_streams| of encoder instances and init them. |
| 190 for (int i = 0; i < number_of_streams; ++i) { | 178 for (int i = 0; i < number_of_streams; ++i) { |
| 191 VideoCodec stream_codec; | 179 VideoCodec stream_codec; |
| 192 uint32_t start_bitrate_kbps = start_bitrates[i]; | 180 uint32_t start_bitrate_kbps = start_bitrates[i]; |
| 193 if (!doing_simulcast) { | 181 if (!doing_simulcast) { |
| 194 stream_codec = codec_; | 182 stream_codec = codec_; |
| 195 stream_codec.numberOfSimulcastStreams = 1; | 183 stream_codec.numberOfSimulcastStreams = 1; |
| (...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 337 int SimulcastEncoderAdapter::SetChannelParameters(uint32_t packet_loss, | 325 int SimulcastEncoderAdapter::SetChannelParameters(uint32_t packet_loss, |
| 338 int64_t rtt) { | 326 int64_t rtt) { |
| 339 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | 327 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { |
| 340 streaminfos_[stream_idx].encoder->SetChannelParameters(packet_loss, rtt); | 328 streaminfos_[stream_idx].encoder->SetChannelParameters(packet_loss, rtt); |
| 341 } | 329 } |
| 342 return WEBRTC_VIDEO_CODEC_OK; | 330 return WEBRTC_VIDEO_CODEC_OK; |
| 343 } | 331 } |
| 344 | 332 |
| 345 int SimulcastEncoderAdapter::SetRates(uint32_t new_bitrate_kbit, | 333 int SimulcastEncoderAdapter::SetRates(uint32_t new_bitrate_kbit, |
| 346 uint32_t new_framerate) { | 334 uint32_t new_framerate) { |
| 347 if (!Initialized()) { | 335 BitrateAllocation allocation; |
| 348 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 336 if (new_bitrate_kbit != 0) { |
| 349 } | 337 allocation = fallback_rate_allocator_->GetAllocation( |
| 350 if (new_framerate < 1) { | 338 new_bitrate_kbit * 1000, new_framerate); |
| 351 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
| 352 } | |
| 353 if (codec_.maxBitrate > 0 && new_bitrate_kbit > codec_.maxBitrate) { | |
| 354 new_bitrate_kbit = codec_.maxBitrate; | |
| 355 } | 339 } |
| 356 | 340 |
| 357 std::vector<uint32_t> stream_bitrates; | 341 return SetRateAllocation(allocation, new_framerate); |
| 358 if (new_bitrate_kbit > 0) { | 342 } |
| 343 |
| 344 int SimulcastEncoderAdapter::SetRateAllocation(const BitrateAllocation& bitrate, |
| 345 uint32_t new_framerate) { |
| 346 if (!Initialized()) |
| 347 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| 348 |
| 349 if (new_framerate < 1) |
| 350 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 351 |
| 352 if (codec_.maxBitrate > 0 && bitrate.get_sum_kbps() > codec_.maxBitrate) |
| 353 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 354 |
| 355 if (bitrate.get_sum_bps() > 0) { |
| 359 // Make sure the bitrate fits the configured min bitrates. 0 is a special | 356 // Make sure the bitrate fits the configured min bitrates. 0 is a special |
| 360 // value that means paused, though, so leave it alone. | 357 // value that means paused, though, so leave it alone. |
| 361 if (new_bitrate_kbit < codec_.minBitrate) { | 358 if (bitrate.get_sum_kbps() < codec_.minBitrate) |
| 362 new_bitrate_kbit = codec_.minBitrate; | 359 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 360 |
| 361 if (codec_.numberOfSimulcastStreams > 0 && |
| 362 bitrate.get_sum_kbps() < codec_.simulcastStream[0].minBitrate) { |
| 363 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 363 } | 364 } |
| 364 if (codec_.numberOfSimulcastStreams > 0 && | |
| 365 new_bitrate_kbit < codec_.simulcastStream[0].minBitrate) { | |
| 366 new_bitrate_kbit = codec_.simulcastStream[0].minBitrate; | |
| 367 } | |
| 368 stream_bitrates = rate_allocator_->GetAllocation(new_bitrate_kbit); | |
| 369 } | 365 } |
| 366 |
| 370 codec_.maxFramerate = new_framerate; | 367 codec_.maxFramerate = new_framerate; |
| 371 | 368 |
| 372 // Disable any stream not in the current allocation. | 369 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { |
| 373 stream_bitrates.resize(streaminfos_.size(), 0U); | 370 uint32_t stream_bitrate_kbps = |
| 371 bitrate.get_spatial_layer_sum(stream_idx) / 1000; |
| 374 | 372 |
| 375 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | |
| 376 uint32_t stream_bitrate_kbps = stream_bitrates[stream_idx]; | |
| 377 // Need a key frame if we have not sent this stream before. | 373 // Need a key frame if we have not sent this stream before. |
| 378 if (stream_bitrate_kbps > 0 && !streaminfos_[stream_idx].send_stream) { | 374 if (stream_bitrate_kbps > 0 && !streaminfos_[stream_idx].send_stream) { |
| 379 streaminfos_[stream_idx].key_frame_request = true; | 375 streaminfos_[stream_idx].key_frame_request = true; |
| 380 } | 376 } |
| 381 streaminfos_[stream_idx].send_stream = stream_bitrate_kbps > 0; | 377 streaminfos_[stream_idx].send_stream = stream_bitrate_kbps > 0; |
| 382 | 378 |
| 383 // TODO(holmer): This is a temporary hack for screensharing, where we | 379 // TODO(holmer): This is a temporary hack for screensharing, where we |
| 384 // interpret the startBitrate as the encoder target bitrate. This is | 380 // interpret the startBitrate as the encoder target bitrate. This is |
| 385 // to allow for a different max bitrate, so if the codec can't meet | 381 // to allow for a different max bitrate, so if the codec can't meet |
| 386 // the target we still allow it to overshoot up to the max before dropping | 382 // the target we still allow it to overshoot up to the max before dropping |
| 387 // frames. This hack should be improved. | 383 // frames. This hack should be improved. |
| 388 if (codec_.targetBitrate > 0 && | 384 if (codec_.targetBitrate > 0 && |
| 389 (codec_.codecSpecific.VP8.numberOfTemporalLayers == 2 || | 385 (codec_.codecSpecific.VP8.numberOfTemporalLayers == 2 || |
| 390 codec_.simulcastStream[0].numberOfTemporalLayers == 2)) { | 386 codec_.simulcastStream[0].numberOfTemporalLayers == 2)) { |
| 391 stream_bitrate_kbps = std::min(codec_.maxBitrate, stream_bitrate_kbps); | 387 stream_bitrate_kbps = std::min(codec_.maxBitrate, stream_bitrate_kbps); |
| 388 streaminfos_[stream_idx].encoder->SetRates(stream_bitrate_kbps, |
| 389 new_framerate); |
| 392 // TODO(ronghuawu): Can't change max bitrate via the VideoEncoder | 390 // TODO(ronghuawu): Can't change max bitrate via the VideoEncoder |
| 393 // interface. And VP8EncoderImpl doesn't take negative framerate. | 391 // interface. And VP8EncoderImpl doesn't take negative framerate. |
| 394 // max_bitrate = std::min(codec_.maxBitrate, stream_bitrate_kbps); | 392 // max_bitrate = std::min(codec_.maxBitrate, stream_bitrate_kbps); |
| 395 // new_framerate = -1; | 393 // new_framerate = -1; |
| 394 } else { |
| 395 // Slice the temporal layers out of the full allocation and pass it on to |
| 396 // the encoder handling the current simulcast stream. |
| 397 BitrateAllocation stream_allocation; |
| 398 for (int i = 0; i < kMaxTemporalStreams; ++i) |
| 399 stream_allocation.set_bitrate(0, i, bitrate.get_bitrate(stream_idx, i)); |
| 400 streaminfos_[stream_idx].encoder->SetRateAllocation(stream_allocation, |
| 401 new_framerate); |
| 396 } | 402 } |
| 397 | |
| 398 streaminfos_[stream_idx].encoder->SetRates(stream_bitrate_kbps, | |
| 399 new_framerate); | |
| 400 } | 403 } |
| 401 | 404 |
| 402 return WEBRTC_VIDEO_CODEC_OK; | 405 return WEBRTC_VIDEO_CODEC_OK; |
| 403 } | 406 } |
| 404 | 407 |
| 405 EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage( | 408 EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage( |
| 406 size_t stream_idx, | 409 size_t stream_idx, |
| 407 const EncodedImage& encodedImage, | 410 const EncodedImage& encodedImage, |
| 408 const CodecSpecificInfo* codecSpecificInfo, | 411 const CodecSpecificInfo* codecSpecificInfo, |
| 409 const RTPFragmentationHeader* fragmentation) { | 412 const RTPFragmentationHeader* fragmentation) { |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 469 return false; | 472 return false; |
| 470 } | 473 } |
| 471 return true; | 474 return true; |
| 472 } | 475 } |
| 473 | 476 |
| 474 const char* SimulcastEncoderAdapter::ImplementationName() const { | 477 const char* SimulcastEncoderAdapter::ImplementationName() const { |
| 475 return implementation_name_.c_str(); | 478 return implementation_name_.c_str(); |
| 476 } | 479 } |
| 477 | 480 |
| 478 } // namespace webrtc | 481 } // namespace webrtc |
| OLD | NEW |