OLD | NEW |
(Empty) | |
| 1 /* |
| 2 * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. |
| 3 * |
| 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ |
| 10 |
| 11 #include "webrtc/modules/video_coding/codecs/stereo/include/stereo_encoder_adapt
er.h" |
| 12 |
| 13 #include "webrtc/base/keep_ref_until_done.h" |
| 14 #include "webrtc/base/logging.h" |
| 15 #include "webrtc/common_video/include/video_frame.h" |
| 16 #include "webrtc/common_video/include/video_frame_buffer.h" |
| 17 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" |
| 18 #include "webrtc/modules/include/module_common_types.h" |
| 19 |
| 20 namespace webrtc { |
| 21 |
| 22 class StereoEncoderAdapter::AdapterEncodedImageCallback |
| 23 : public webrtc::EncodedImageCallback { |
| 24 public: |
| 25 AdapterEncodedImageCallback(webrtc::StereoEncoderAdapter* adapter, |
| 26 StereoCodecStream stream_idx) |
| 27 : adapter_(adapter), stream_idx_(stream_idx) {} |
| 28 |
| 29 EncodedImageCallback::Result OnEncodedImage( |
| 30 const EncodedImage& encoded_image, |
| 31 const CodecSpecificInfo* codec_specific_info, |
| 32 const RTPFragmentationHeader* fragmentation) override { |
| 33 if (!adapter_) |
| 34 return webrtc::EncodedImageCallback::Result::OK; |
| 35 return adapter_->OnEncodedImage(stream_idx_, encoded_image, |
| 36 codec_specific_info, fragmentation); |
| 37 } |
| 38 |
| 39 private: |
| 40 StereoEncoderAdapter* adapter_; |
| 41 const StereoCodecStream stream_idx_; |
| 42 }; |
| 43 |
| 44 struct StereoEncoderAdapter::EncodedImageData { |
| 45 explicit EncodedImageData(StereoCodecStream stream_idx) |
| 46 : stream_idx_(stream_idx) { |
| 47 RTC_DCHECK_EQ(kAXXStream, stream_idx); |
| 48 encodedImage_._length = 0; |
| 49 } |
| 50 EncodedImageData(StereoCodecStream stream_idx, |
| 51 const EncodedImage& encodedImage, |
| 52 const CodecSpecificInfo* codecSpecificInfo, |
| 53 const RTPFragmentationHeader* fragmentation) |
| 54 : stream_idx_(stream_idx), |
| 55 encodedImage_(encodedImage), |
| 56 codecSpecificInfo_(*codecSpecificInfo) { |
| 57 fragmentation_.CopyFrom(*fragmentation); |
| 58 } |
| 59 const StereoCodecStream stream_idx_; |
| 60 EncodedImage encodedImage_; |
| 61 const CodecSpecificInfo codecSpecificInfo_; |
| 62 RTPFragmentationHeader fragmentation_; |
| 63 |
| 64 private: |
| 65 RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(EncodedImageData); |
| 66 }; |
| 67 |
| 68 StereoEncoderAdapter::StereoEncoderAdapter(VideoEncoderFactory* factory) |
| 69 : factory_(factory), encoded_complete_callback_(nullptr) {} |
| 70 |
| 71 StereoEncoderAdapter::~StereoEncoderAdapter() { |
| 72 Release(); |
| 73 } |
| 74 |
| 75 int StereoEncoderAdapter::InitEncode(const VideoCodec* inst, |
| 76 int number_of_cores, |
| 77 size_t max_payload_size) { |
| 78 const size_t buffer_size = |
| 79 CalcBufferSize(VideoType::kI420, inst->width, inst->height); |
| 80 stereo_dummy_planes_.resize(buffer_size); |
| 81 // It is more expensive to encode 0x00, so use 0x80 instead. |
| 82 std::fill(stereo_dummy_planes_.begin(), stereo_dummy_planes_.end(), 0x80); |
| 83 |
| 84 for (size_t i = 0; i < kStereoCodecStreams; ++i) { |
| 85 VideoEncoder* encoder = factory_->Create(); |
| 86 const int rv = encoder->InitEncode(inst, number_of_cores, max_payload_size); |
| 87 if (rv) |
| 88 return rv; |
| 89 encoders_.push_back(encoder); |
| 90 adapter_callbacks_.emplace_back(new AdapterEncodedImageCallback( |
| 91 this, static_cast<StereoCodecStream>(i))); |
| 92 encoder->RegisterEncodeCompleteCallback(adapter_callbacks_.back().get()); |
| 93 } |
| 94 return WEBRTC_VIDEO_CODEC_OK; |
| 95 } |
| 96 |
| 97 int StereoEncoderAdapter::Encode(const VideoFrame& input_image, |
| 98 const CodecSpecificInfo* codec_specific_info, |
| 99 const std::vector<FrameType>* frame_types) { |
| 100 if (!encoded_complete_callback_) { |
| 101 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| 102 } |
| 103 |
| 104 // Encode AXX |
| 105 rtc::scoped_refptr<I420BufferInterface> yuva_buffer = |
| 106 input_image.video_frame_buffer()->ToI420(); |
| 107 if (yuva_buffer->HasAlpha()) { |
| 108 rtc::scoped_refptr<WrappedI420Buffer> alpha_buffer( |
| 109 new rtc::RefCountedObject<webrtc::WrappedI420Buffer>( |
| 110 input_image.width(), input_image.height(), yuva_buffer->DataA(), |
| 111 yuva_buffer->StrideA(), stereo_dummy_planes_.data(), |
| 112 yuva_buffer->StrideU(), stereo_dummy_planes_.data(), |
| 113 yuva_buffer->StrideV(), |
| 114 rtc::KeepRefUntilDone(input_image.video_frame_buffer()))); |
| 115 VideoFrame alpha_image(alpha_buffer, input_image.timestamp(), |
| 116 input_image.render_time_ms(), |
| 117 input_image.rotation()); |
| 118 encoders_[kAXXStream]->Encode(alpha_image, codec_specific_info, |
| 119 frame_types); |
| 120 } else { |
| 121 RTC_DCHECK(encoded_data_.find(input_image.timestamp()) == |
| 122 encoded_data_.end()); |
| 123 encoded_data_.emplace(std::piecewise_construct, |
| 124 std::forward_as_tuple(input_image.timestamp()), |
| 125 std::forward_as_tuple(kAXXStream)); |
| 126 } |
| 127 |
| 128 // Encode YUV |
| 129 int rv = encoders_[kYUVStream]->Encode(input_image, codec_specific_info, |
| 130 frame_types); |
| 131 // RTC_DCHECK(rv); |
| 132 return rv; |
| 133 } |
| 134 |
| 135 int StereoEncoderAdapter::RegisterEncodeCompleteCallback( |
| 136 EncodedImageCallback* callback) { |
| 137 encoded_complete_callback_ = callback; |
| 138 return WEBRTC_VIDEO_CODEC_OK; |
| 139 } |
| 140 |
| 141 int StereoEncoderAdapter::SetChannelParameters(uint32_t packet_loss, |
| 142 int64_t rtt) { |
| 143 for (auto encoder : encoders_) { |
| 144 const int rv = encoder->SetChannelParameters(packet_loss, rtt); |
| 145 if (rv) |
| 146 return rv; |
| 147 } |
| 148 return WEBRTC_VIDEO_CODEC_OK; |
| 149 } |
| 150 |
| 151 int StereoEncoderAdapter::SetRateAllocation(const BitrateAllocation& bitrate, |
| 152 uint32_t new_framerate) { |
| 153 for (auto encoder : encoders_) { |
| 154 const int rv = encoder->SetRateAllocation(bitrate, new_framerate); |
| 155 if (rv) |
| 156 return rv; |
| 157 } |
| 158 return WEBRTC_VIDEO_CODEC_OK; |
| 159 } |
| 160 |
| 161 int StereoEncoderAdapter::Release() { |
| 162 for (auto encoder : encoders_) { |
| 163 const int rv = encoder->Release(); |
| 164 if (rv) |
| 165 return rv; |
| 166 factory_->Destroy(encoder); |
| 167 } |
| 168 encoders_.clear(); |
| 169 adapter_callbacks_.clear(); |
| 170 return WEBRTC_VIDEO_CODEC_OK; |
| 171 } |
| 172 |
| 173 EncodedImageCallback::Result StereoEncoderAdapter::OnEncodedImage( |
| 174 StereoCodecStream stream_idx, |
| 175 const EncodedImage& encodedImage, |
| 176 const CodecSpecificInfo* codecSpecificInfo, |
| 177 const RTPFragmentationHeader* fragmentation) { |
| 178 const auto& other_encoded_data_it = |
| 179 encoded_data_.find(encodedImage._timeStamp); |
| 180 if (other_encoded_data_it != encoded_data_.end()) { |
| 181 const auto& other_image_data = other_encoded_data_it->second; |
| 182 EncodedImageCallback::Result res = EncodedImageCallback::Result::OK; |
| 183 if (stream_idx == kYUVStream) { |
| 184 RTC_DCHECK_EQ(kAXXStream, other_image_data.stream_idx_); |
| 185 res = SendEncodedImages(encodedImage, codecSpecificInfo, fragmentation, |
| 186 other_image_data.encodedImage_, |
| 187 &other_image_data.codecSpecificInfo_, |
| 188 &other_image_data.fragmentation_); |
| 189 } else { |
| 190 RTC_DCHECK_EQ(kYUVStream, other_image_data.stream_idx_); |
| 191 RTC_DCHECK_EQ(kAXXStream, stream_idx); |
| 192 res = SendEncodedImages(other_image_data.encodedImage_, |
| 193 &other_image_data.codecSpecificInfo_, |
| 194 &other_image_data.fragmentation_, encodedImage, |
| 195 codecSpecificInfo, fragmentation); |
| 196 } |
| 197 encoded_data_.erase(encoded_data_.begin(), other_encoded_data_it); |
| 198 return res; |
| 199 } |
| 200 RTC_DCHECK(encoded_data_.find(encodedImage._timeStamp) == |
| 201 encoded_data_.end()); |
| 202 encoded_data_.emplace( |
| 203 std::piecewise_construct, std::forward_as_tuple(encodedImage._timeStamp), |
| 204 std::forward_as_tuple(stream_idx, encodedImage, codecSpecificInfo, |
| 205 fragmentation)); |
| 206 return webrtc::EncodedImageCallback::Result::OK; |
| 207 } |
| 208 |
| 209 EncodedImageCallback::Result StereoEncoderAdapter::SendEncodedImages( |
| 210 const EncodedImage& encoded_image, |
| 211 const CodecSpecificInfo* codec_specific_info, |
| 212 const RTPFragmentationHeader* fragmentation, |
| 213 const EncodedImage& stereo_encoded_image, |
| 214 const CodecSpecificInfo* stereo_codec_specific_info, |
| 215 const RTPFragmentationHeader* stereo_fragmentation) { |
| 216 const bool has_alpha = stereo_encoded_image._length != 0; |
| 217 |
| 218 CodecSpecificInfo* yuv_codec = |
| 219 const_cast<CodecSpecificInfo*>(codec_specific_info); |
| 220 yuv_codec->codecType = kVideoCodecStereo; |
| 221 yuv_codec->codec_name = "stereo-vp9"; |
| 222 yuv_codec->stereoInfo.stereoCodecType = kVideoCodecVP9; |
| 223 if (!has_alpha) { |
| 224 yuv_codec->stereoInfo.num_frames = 0; |
| 225 return encoded_complete_callback_->OnEncodedImage(encoded_image, yuv_codec, |
| 226 fragmentation); |
| 227 } |
| 228 |
| 229 yuv_codec->stereoInfo.num_frames = 1; |
| 230 yuv_codec->stereoInfo.encoded_images[0] = &encoded_image; |
| 231 yuv_codec->stereoInfo.codec_specific_infos[0] = stereo_codec_specific_info; |
| 232 yuv_codec->stereoInfo.fragmentations[0] = stereo_fragmentation; |
| 233 return encoded_complete_callback_->OnEncodedImage(encoded_image, yuv_codec, |
| 234 fragmentation); |
| 235 } |
| 236 |
| 237 } // namespace webrtc |
OLD | NEW |