Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/renderer/media/video_track_recorder.h" | 5 #include "content/renderer/media/video_track_recorder.h" |
| 6 | 6 |
| 7 #include <utility> | 7 #include <utility> |
| 8 | 8 |
| 9 #include "base/bind.h" | 9 #include "base/bind.h" |
| 10 #include "base/logging.h" | 10 #include "base/logging.h" |
| 11 #include "base/macros.h" | 11 #include "base/macros.h" |
| 12 #include "base/sys_info.h" | 12 #include "base/sys_info.h" |
| 13 #include "base/threading/thread.h" | 13 #include "base/threading/thread.h" |
| 14 #include "base/time/time.h" | 14 #include "base/time/time.h" |
| 15 #include "base/trace_event/trace_event.h" | 15 #include "base/trace_event/trace_event.h" |
| 16 #include "media/base/video_frame.h" | 16 #include "media/base/video_frame.h" |
| 17 #include "media/base/video_util.h" | 17 #include "media/base/video_util.h" |
| 18 #include "ui/gfx/geometry/size.h" | 18 #include "ui/gfx/geometry/size.h" |
| 19 | 19 |
| 20 #if BUILDFLAG(RTC_USE_H264) | |
| 21 #include "third_party/openh264/src/codec/api/svc/codec_api.h" | |
| 22 #include "third_party/openh264/src/codec/api/svc/codec_app_def.h" | |
| 23 #include "third_party/openh264/src/codec/api/svc/codec_def.h" | |
| 24 #endif // #if BUILDFLAG(RTC_USE_H264) | |
| 25 | |
| 20 extern "C" { | 26 extern "C" { |
| 21 // VPX_CODEC_DISABLE_COMPAT excludes parts of the libvpx API that provide | 27 // VPX_CODEC_DISABLE_COMPAT excludes parts of the libvpx API that provide |
| 22 // backwards compatibility for legacy applications using the library. | 28 // backwards compatibility for legacy applications using the library. |
| 23 #define VPX_CODEC_DISABLE_COMPAT 1 | 29 #define VPX_CODEC_DISABLE_COMPAT 1 |
| 24 #include "third_party/libvpx/source/libvpx/vpx/vp8cx.h" | 30 #include "third_party/libvpx/source/libvpx/vpx/vp8cx.h" |
| 25 #include "third_party/libvpx/source/libvpx/vpx/vpx_encoder.h" | 31 #include "third_party/libvpx/source/libvpx/vpx/vpx_encoder.h" |
| 26 } | 32 } |
| 27 | 33 |
| 28 using media::VideoFrame; | 34 using media::VideoFrame; |
| 29 using media::VideoFrameMetadata; | 35 using media::VideoFrameMetadata; |
| 30 | 36 |
| 31 namespace content { | 37 namespace content { |
| 32 | 38 |
| 33 // Base class to describe a generic Encoder. This class is used to encapsulate | 39 // Base class to describe a generic Encoder, encapsulating all actual encoder |
| 34 // interactions with actual encoders, encoding and delivery of received frames. | 40 // (re)configurations, encoding and delivery of received frames. This class is |
| 35 // This class is ref-counted to allow the MediaStreamVideoTrack to hold a | 41 // ref-counted to allow the MediaStreamVideoTrack to hold a reference to it (via |
| 36 // reference to it, via the callback that MediaStreamVideoSink passes along. | 42 // the callback that MediaStreamVideoSink passes along) and to jump back and |
| 37 // Also, it is quite common that encoders run in a background thread. | 43 // forth to an internal encoder thread. Moreover, this class: |
| 44 // - is created and destroyed on its parent's thread (usually the main Render | |
| 45 // thread), |main_task_runner_|. | |
| 46 // - receives VideoFrames on |origin_task_runner_| and ruyns OnEncodedVideoCB on | |
| 47 // that thread as well. This task runner is cached on first frame arrival, and | |
| 48 // is supposed to be the render IO thread (but this is not enforced); | |
| 49 // - uses an internal |encoding_thread_| for actual encoder interactions, namely | |
| 50 // configuration, encoding (which might take some time) and destruction. | |
| 38 class VideoTrackRecorder::Encoder : public base::RefCountedThreadSafe<Encoder> { | 51 class VideoTrackRecorder::Encoder : public base::RefCountedThreadSafe<Encoder> { |
| 39 public: | 52 public: |
| 40 Encoder(const OnEncodedVideoCB& on_encoded_video_callback, | 53 Encoder(const OnEncodedVideoCB& on_encoded_video_callback, |
| 41 int32_t bits_per_second) | 54 int32_t bits_per_second) |
| 42 : paused_(false), | 55 : main_task_runner_(base::MessageLoop::current()->task_runner()), |
| 56 encoding_thread_(new base::Thread("EncodingThread")), | |
| 57 paused_(false), | |
| 43 on_encoded_video_callback_(on_encoded_video_callback), | 58 on_encoded_video_callback_(on_encoded_video_callback), |
| 44 bits_per_second_(bits_per_second) {} | 59 bits_per_second_(bits_per_second) { |
| 60 DCHECK(!on_encoded_video_callback_.is_null()); | |
| 61 } | |
| 45 | 62 |
| 46 virtual void StartFrameEncode(const scoped_refptr<VideoFrame>& frame, | 63 // Start encoding |frame|, returning via |on_encoded_video_callback_|. This |
| 47 base::TimeTicks capture_timestamp) = 0; | 64 // call will also trigger a ConfigureEncoderOnEncodingThread() upon first |
| 65 // frame arrival or parameter change, and an EncodeOnEncodingThread() to | |
| 66 // actually encode the frame. | |
| 67 void StartFrameEncode(const scoped_refptr<VideoFrame>& frame, | |
| 68 base::TimeTicks capture_timestamp); | |
| 48 | 69 |
| 49 void set_paused(bool paused) { paused_ = paused; } | 70 void SetPaused(bool paused); |
| 50 | 71 |
| 51 protected: | 72 protected: |
| 52 friend class base::RefCountedThreadSafe<Encoder>; | 73 friend class base::RefCountedThreadSafe<Encoder>; |
| 53 virtual ~Encoder() {} | 74 virtual ~Encoder() {} |
| 54 | 75 |
| 55 // While |paused_|, frames are not encoded. | 76 virtual void EncodeOnEncodingThread(const scoped_refptr<VideoFrame>& frame, |
| 77 base::TimeTicks capture_timestamp) = 0; | |
| 78 virtual void ConfigureEncoderOnEncodingThread(const gfx::Size& size) = 0; | |
| 79 | |
| 80 // Used to shutdown properly on the same thread we were created. | |
| 81 const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_; | |
| 82 | |
| 83 // Task runner where frames to encode and reply callbacks must happen. | |
| 84 scoped_refptr<base::SingleThreadTaskRunner> origin_task_runner_; | |
| 85 | |
| 86 // Thread for encoding. Active for the lifetime of VpxEncoder. | |
| 87 std::unique_ptr<base::Thread> encoding_thread_; | |
| 88 | |
| 89 // While |paused_|, frames are not encoded. Used only from |encoding_thread_|. | |
| 56 bool paused_; | 90 bool paused_; |
| 57 | 91 |
| 58 // This callback should be exercised on IO thread. | 92 // This callback should be exercised on IO thread. |
| 59 const OnEncodedVideoCB on_encoded_video_callback_; | 93 const OnEncodedVideoCB on_encoded_video_callback_; |
| 60 | 94 |
| 61 // Target bitrate or video encoding. If 0, a standard bitrate is used. | 95 // Target bitrate for video encoding. If 0, a standard bitrate is used. |
| 62 const int32_t bits_per_second_; | 96 const int32_t bits_per_second_; |
| 63 | 97 |
| 64 DISALLOW_COPY_AND_ASSIGN(Encoder); | 98 DISALLOW_COPY_AND_ASSIGN(Encoder); |
| 65 }; | 99 }; |
| 66 | 100 |
| 101 void VideoTrackRecorder::Encoder::StartFrameEncode( | |
| 102 const scoped_refptr<VideoFrame>& frame, | |
| 103 base::TimeTicks capture_timestamp) { | |
| 104 // Cache the thread sending frames on first frame arrival. | |
| 105 if (!origin_task_runner_.get()) | |
| 106 origin_task_runner_ = base::MessageLoop::current()->task_runner(); | |
| 107 DCHECK(origin_task_runner_->BelongsToCurrentThread()); | |
| 108 if (paused_) | |
| 109 return; | |
| 110 encoding_thread_->task_runner()->PostTask( | |
| 111 FROM_HERE, base::Bind(&Encoder::EncodeOnEncodingThread, | |
| 112 this, frame, capture_timestamp)); | |
| 113 } | |
| 114 | |
| 115 void VideoTrackRecorder::Encoder::SetPaused(bool paused) { | |
| 116 if (!encoding_thread_->task_runner()->BelongsToCurrentThread()) { | |
| 117 encoding_thread_->task_runner()->PostTask( | |
| 118 FROM_HERE, base::Bind(&Encoder::SetPaused, this, paused)); | |
|
emircan
2016/04/19 23:21:22
return;
mcasas
2016/04/20 00:37:13
Done.
| |
| 119 } | |
| 120 paused_ = paused; | |
| 121 } | |
| 122 | |
| 67 namespace { | 123 namespace { |
| 68 | 124 |
| 69 const vpx_codec_flags_t kNoFlags = 0; | |
| 70 | |
| 71 // Originally from remoting/codec/scoped_vpx_codec.h. | 125 // Originally from remoting/codec/scoped_vpx_codec.h. |
| 72 // TODO(mcasas): Refactor into a common location. | 126 // TODO(mcasas): Refactor into a common location. |
| 73 struct VpxCodecDeleter { | 127 struct VpxCodecDeleter { |
| 74 void operator()(vpx_codec_ctx_t* codec) { | 128 void operator()(vpx_codec_ctx_t* codec) { |
| 75 if (!codec) | 129 if (!codec) |
| 76 return; | 130 return; |
| 77 vpx_codec_err_t ret = vpx_codec_destroy(codec); | 131 vpx_codec_err_t ret = vpx_codec_destroy(codec); |
| 78 CHECK_EQ(ret, VPX_CODEC_OK); | 132 CHECK_EQ(ret, VPX_CODEC_OK); |
| 79 delete codec; | 133 delete codec; |
| 80 } | 134 } |
| 81 }; | 135 }; |
| 82 | |
| 83 typedef std::unique_ptr<vpx_codec_ctx_t, VpxCodecDeleter> ScopedVpxCodecCtxPtr; | 136 typedef std::unique_ptr<vpx_codec_ctx_t, VpxCodecDeleter> ScopedVpxCodecCtxPtr; |
| 84 | 137 |
| 85 void OnFrameEncodeCompleted( | 138 static void OnFrameEncodeCompleted( |
| 86 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_cb, | 139 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_cb, |
| 87 const scoped_refptr<VideoFrame>& frame, | 140 const scoped_refptr<VideoFrame>& frame, |
| 88 std::unique_ptr<std::string> data, | 141 std::unique_ptr<std::string> data, |
| 89 base::TimeTicks capture_timestamp, | 142 base::TimeTicks capture_timestamp, |
| 90 bool keyframe) { | 143 bool keyframe) { |
| 91 DVLOG(1) << (keyframe ? "" : "non ") << "keyframe "<< data->length() << "B, " | 144 DVLOG(1) << (keyframe ? "" : "non ") << "keyframe "<< data->length() << "B, " |
| 92 << capture_timestamp << " ms"; | 145 << capture_timestamp << " ms"; |
| 93 on_encoded_video_cb.Run(frame, std::move(data), capture_timestamp, keyframe); | 146 on_encoded_video_cb.Run(frame, std::move(data), capture_timestamp, keyframe); |
| 94 } | 147 } |
| 95 | 148 |
| 96 // Class encapsulating libvpx interactions, encoding and delivery of received | 149 static int GetNumberOfThreadsForEncoding() { |
| 97 // frames. This class: | 150 // Do not saturate CPU utilization just for encoding. On a lower-end system |
| 98 // - is created and destroyed on its parent's thread (usually the main Render | 151 // with only 1 or 2 cores, use only one thread for encoding. On systems with |
| 99 // thread); | 152 // more cores, allow half of the cores to be used for encoding. |
| 100 // - receives VideoFrames and Run()s the callbacks on |origin_task_runner_|, | 153 return std::min(8, (base::SysInfo::NumberOfProcessors() + 1) / 2); |
| 101 // which is cached on first frame arrival, and is supposed to be the render IO | 154 } |
| 102 // thread, but this is not enforced; | 155 |
| 103 // - uses an internal |encoding_thread_| for libvpx interactions, notably for | 156 // Class encapsulating all libvpx interactions for VP8/VP9 encoding. |
| 104 // encoding (which might take some time). | |
| 105 class VpxEncoder final : public VideoTrackRecorder::Encoder { | 157 class VpxEncoder final : public VideoTrackRecorder::Encoder { |
| 106 public: | 158 public: |
| 107 static void ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread, | 159 static void ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread, |
| 108 ScopedVpxCodecCtxPtr encoder); | 160 ScopedVpxCodecCtxPtr encoder); |
| 109 | 161 |
| 110 VpxEncoder( | 162 VpxEncoder( |
| 111 bool use_vp9, | 163 bool use_vp9, |
| 112 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, | 164 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, |
| 113 int32_t bits_per_second); | 165 int32_t bits_per_second); |
| 114 | 166 |
| 115 void StartFrameEncode(const scoped_refptr<VideoFrame>& frame, | |
| 116 base::TimeTicks capture_timestamp) override; | |
| 117 | |
| 118 private: | 167 private: |
| 168 // VideoTrackRecorder::Encoder | |
| 119 ~VpxEncoder() override; | 169 ~VpxEncoder() override; |
| 120 | |
| 121 void EncodeOnEncodingThread(const scoped_refptr<VideoFrame>& frame, | 170 void EncodeOnEncodingThread(const scoped_refptr<VideoFrame>& frame, |
| 122 base::TimeTicks capture_timestamp); | 171 base::TimeTicks capture_timestamp) override; |
| 123 | 172 void ConfigureEncoderOnEncodingThread(const gfx::Size& size) override; |
| 124 void ConfigureEncoding(const gfx::Size& size); | |
| 125 | 173 |
| 126 // Returns true if |codec_config_| has been filled in at least once. | 174 // Returns true if |codec_config_| has been filled in at least once. |
| 127 bool IsInitialized() const; | 175 bool IsInitialized() const; |
| 128 | 176 |
| 129 // Estimate the frame duration from |frame| and |last_frame_timestamp_|. | 177 // Estimate the frame duration from |frame| and |last_frame_timestamp_|. |
| 130 base::TimeDelta CalculateFrameDuration( | 178 base::TimeDelta EstimateFrameDuration(const scoped_refptr<VideoFrame>& frame); |
| 131 const scoped_refptr<VideoFrame>& frame); | |
| 132 | 179 |
| 133 // Force usage of VP9 for encoding, instead of VP8 which is the default. | 180 // Force usage of VP9 for encoding, instead of VP8 which is the default. |
| 134 const bool use_vp9_; | 181 const bool use_vp9_; |
| 135 | 182 |
| 136 // Used to shutdown properly on the same thread we were created. | 183 // VPx internal objects: configuration and encoder. |encoder_| is a special |
| 137 const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_; | 184 // scoped pointer to guarantee proper destruction, particularly when |
| 138 | 185 // reconfiguring due to parameters change. Only used on |encoding_thread_|. |
| 139 // Task runner where frames to encode and reply callbacks must happen. | |
| 140 scoped_refptr<base::SingleThreadTaskRunner> origin_task_runner_; | |
| 141 | |
| 142 // Thread for encoding. Active for the lifetime of VpxEncoder. All variables | |
| 143 // below this are used in this thread. | |
| 144 std::unique_ptr<base::Thread> encoding_thread_; | |
| 145 // VP8 internal objects: configuration and encoder. | |
| 146 vpx_codec_enc_cfg_t codec_config_; | 186 vpx_codec_enc_cfg_t codec_config_; |
| 147 // |encoder_| is a special scoped pointer to guarantee proper destruction. | |
| 148 // Again, it should only be accessed on |encoding_thread_|. | |
| 149 ScopedVpxCodecCtxPtr encoder_; | 187 ScopedVpxCodecCtxPtr encoder_; |
| 150 | 188 |
| 151 // The |VideoFrame::timestamp()| of the last encoded frame. This is used to | 189 // The |VideoFrame::timestamp()| of the last encoded frame. This is used to |
| 152 // predict the duration of the next frame. | 190 // predict the duration of the next frame. Only used on |encoding_thread_|. |
| 153 base::TimeDelta last_frame_timestamp_; | 191 base::TimeDelta last_frame_timestamp_; |
| 154 | 192 |
| 155 DISALLOW_COPY_AND_ASSIGN(VpxEncoder); | 193 DISALLOW_COPY_AND_ASSIGN(VpxEncoder); |
| 156 }; | 194 }; |
| 157 | 195 |
| 196 #if BUILDFLAG(RTC_USE_H264) | |
| 197 | |
| 198 struct ISVCEncoderDeleter { | |
| 199 void operator()(ISVCEncoder* codec) { | |
| 200 if (!codec) | |
| 201 return; | |
| 202 const int uninit_ret = codec->Uninitialize(); | |
| 203 CHECK_EQ(cmResultSuccess, uninit_ret); | |
| 204 WelsDestroySVCEncoder(codec); | |
| 205 } | |
| 206 }; | |
| 207 typedef std::unique_ptr<ISVCEncoder, ISVCEncoderDeleter> ScopedISVCEncoderPtr; | |
| 208 | |
| 209 // Class encapsulating all openh264 interactions for H264 encoding. | |
| 210 class H264Encoder final : public VideoTrackRecorder::Encoder { | |
| 211 public: | |
| 212 static void ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread, | |
| 213 ScopedISVCEncoderPtr encoder); | |
|
emircan
2016/04/19 23:21:21
Can we make this a (non-static) virtual method and
mcasas
2016/04/20 00:37:12
I'd love to but can't because in VpxEncoder
class
emircan
2016/04/20 17:26:20
Ok, sounds good.
| |
| 214 | |
| 215 H264Encoder( | |
| 216 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, | |
| 217 int32_t bits_per_second); | |
| 218 | |
| 219 private: | |
| 220 // VideoTrackRecorder::Encoder | |
| 221 ~H264Encoder() override; | |
| 222 void EncodeOnEncodingThread(const scoped_refptr<VideoFrame>& frame, | |
| 223 base::TimeTicks capture_timestamp) override; | |
| 224 void ConfigureEncoderOnEncodingThread(const gfx::Size& size) override; | |
| 225 | |
| 226 // |openh264_encoder_| is a special scoped pointer to guarantee proper | |
| 227 // destruction, also when reconfiguring due to parameters change. Only used on | |
| 228 // |encoding_thread_|. | |
| 229 gfx::Size configured_size_; | |
| 230 ScopedISVCEncoderPtr openh264_encoder_; | |
| 231 | |
| 232 // The |VideoFrame::timestamp()| of the first received frame. Only used on | |
| 233 // |encoding_thread_|. | |
| 234 base::TimeTicks first_frame_timestamp_; | |
| 235 | |
| 236 DISALLOW_COPY_AND_ASSIGN(H264Encoder); | |
| 237 }; | |
| 238 | |
| 239 #endif // #if BUILDFLAG(RTC_USE_H264) | |
| 240 | |
| 158 // static | 241 // static |
| 159 void VpxEncoder::ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread, | 242 void VpxEncoder::ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread, |
| 160 ScopedVpxCodecCtxPtr encoder) { | 243 ScopedVpxCodecCtxPtr encoder) { |
| 161 DCHECK(encoding_thread->IsRunning()); | 244 DCHECK(encoding_thread->IsRunning()); |
| 162 encoding_thread->Stop(); | 245 encoding_thread->Stop(); |
| 163 // Both |encoding_thread| and |encoder| will be destroyed at end-of-scope. | 246 // Both |encoding_thread| and |encoder| will be destroyed at end-of-scope. |
| 164 } | 247 } |
| 165 | 248 |
| 166 VpxEncoder::VpxEncoder( | 249 VpxEncoder::VpxEncoder( |
| 167 bool use_vp9, | 250 bool use_vp9, |
| 168 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, | 251 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, |
| 169 int32_t bits_per_second) | 252 int32_t bits_per_second) |
| 170 : Encoder(on_encoded_video_callback, bits_per_second), | 253 : Encoder(on_encoded_video_callback, bits_per_second), |
| 171 use_vp9_(use_vp9), | 254 use_vp9_(use_vp9) { |
| 172 main_task_runner_(base::MessageLoop::current()->task_runner()), | |
| 173 encoding_thread_(new base::Thread("EncodingThread")) { | |
| 174 DCHECK(!on_encoded_video_callback_.is_null()); | |
| 175 | |
| 176 codec_config_.g_timebase.den = 0; // Not initialized. | 255 codec_config_.g_timebase.den = 0; // Not initialized. |
| 177 | 256 |
| 178 DCHECK(!encoding_thread_->IsRunning()); | 257 DCHECK(!encoding_thread_->IsRunning()); |
| 179 encoding_thread_->Start(); | 258 encoding_thread_->Start(); |
| 180 } | 259 } |
| 181 | 260 |
| 182 VpxEncoder::~VpxEncoder() { | 261 VpxEncoder::~VpxEncoder() { |
| 183 main_task_runner_->PostTask(FROM_HERE, | 262 main_task_runner_->PostTask(FROM_HERE, |
| 184 base::Bind(&VpxEncoder::ShutdownEncoder, | 263 base::Bind(&VpxEncoder::ShutdownEncoder, |
| 185 base::Passed(&encoding_thread_), | 264 base::Passed(&encoding_thread_), |
| 186 base::Passed(&encoder_))); | 265 base::Passed(&encoder_))); |
| 187 } | 266 } |
| 188 | 267 |
| 189 void VpxEncoder::StartFrameEncode(const scoped_refptr<VideoFrame>& frame, | |
| 190 base::TimeTicks capture_timestamp) { | |
| 191 // Cache the thread sending frames on first frame arrival. | |
| 192 if (!origin_task_runner_.get()) | |
| 193 origin_task_runner_ = base::MessageLoop::current()->task_runner(); | |
| 194 DCHECK(origin_task_runner_->BelongsToCurrentThread()); | |
| 195 if (paused_) | |
| 196 return; | |
| 197 encoding_thread_->task_runner()->PostTask( | |
| 198 FROM_HERE, base::Bind(&VpxEncoder::EncodeOnEncodingThread, | |
| 199 this, frame, capture_timestamp)); | |
| 200 } | |
| 201 | |
| 202 void VpxEncoder::EncodeOnEncodingThread( | 268 void VpxEncoder::EncodeOnEncodingThread( |
| 203 const scoped_refptr<VideoFrame>& video_frame, | 269 const scoped_refptr<VideoFrame>& video_frame, |
| 204 base::TimeTicks capture_timestamp) { | 270 base::TimeTicks capture_timestamp) { |
| 205 TRACE_EVENT0("video", "VpxEncoder::EncodeOnEncodingThread"); | 271 TRACE_EVENT0("video", "VpxEncoder::EncodeOnEncodingThread"); |
| 206 DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread()); | 272 DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread()); |
| 207 | 273 |
| 208 if (!(video_frame->format() == media::PIXEL_FORMAT_I420 || | 274 if (!(video_frame->format() == media::PIXEL_FORMAT_I420 || |
| 209 video_frame->format() == media::PIXEL_FORMAT_YV12 || | 275 video_frame->format() == media::PIXEL_FORMAT_YV12 || |
| 210 video_frame->format() == media::PIXEL_FORMAT_YV12A)) { | 276 video_frame->format() == media::PIXEL_FORMAT_YV12A)) { |
| 211 NOTREACHED(); | 277 NOTREACHED(); |
| 212 return; | 278 return; |
| 213 } | 279 } |
| 214 scoped_refptr<media::VideoFrame> frame = video_frame; | 280 scoped_refptr<media::VideoFrame> frame = video_frame; |
| 215 // Drop alpha channel since we do not support it yet. | 281 // Drop alpha channel since we do not support it yet. |
| 216 if (frame->format() == media::PIXEL_FORMAT_YV12A) | 282 if (frame->format() == media::PIXEL_FORMAT_YV12A) |
| 217 frame = media::WrapAsI420VideoFrame(video_frame); | 283 frame = media::WrapAsI420VideoFrame(video_frame); |
| 218 | 284 |
| 219 const gfx::Size frame_size = frame->visible_rect().size(); | 285 const gfx::Size frame_size = frame->visible_rect().size(); |
| 220 if (!IsInitialized() || | 286 if (!IsInitialized() || |
| 221 gfx::Size(codec_config_.g_w, codec_config_.g_h) != frame_size) { | 287 gfx::Size(codec_config_.g_w, codec_config_.g_h) != frame_size) { |
| 222 ConfigureEncoding(frame_size); | 288 ConfigureEncoderOnEncodingThread(frame_size); |
| 223 } | 289 } |
| 224 | 290 |
| 225 vpx_image_t vpx_image; | 291 vpx_image_t vpx_image; |
| 226 vpx_image_t* const result = vpx_img_wrap(&vpx_image, | 292 vpx_image_t* const result = vpx_img_wrap(&vpx_image, |
| 227 VPX_IMG_FMT_I420, | 293 VPX_IMG_FMT_I420, |
| 228 frame_size.width(), | 294 frame_size.width(), |
| 229 frame_size.height(), | 295 frame_size.height(), |
| 230 1 /* align */, | 296 1 /* align */, |
| 231 frame->data(VideoFrame::kYPlane)); | 297 frame->data(VideoFrame::kYPlane)); |
| 232 DCHECK_EQ(result, &vpx_image); | 298 DCHECK_EQ(result, &vpx_image); |
| 233 vpx_image.planes[VPX_PLANE_Y] = frame->visible_data(VideoFrame::kYPlane); | 299 vpx_image.planes[VPX_PLANE_Y] = frame->visible_data(VideoFrame::kYPlane); |
| 234 vpx_image.planes[VPX_PLANE_U] = frame->visible_data(VideoFrame::kUPlane); | 300 vpx_image.planes[VPX_PLANE_U] = frame->visible_data(VideoFrame::kUPlane); |
| 235 vpx_image.planes[VPX_PLANE_V] = frame->visible_data(VideoFrame::kVPlane); | 301 vpx_image.planes[VPX_PLANE_V] = frame->visible_data(VideoFrame::kVPlane); |
| 236 vpx_image.stride[VPX_PLANE_Y] = frame->stride(VideoFrame::kYPlane); | 302 vpx_image.stride[VPX_PLANE_Y] = frame->stride(VideoFrame::kYPlane); |
| 237 vpx_image.stride[VPX_PLANE_U] = frame->stride(VideoFrame::kUPlane); | 303 vpx_image.stride[VPX_PLANE_U] = frame->stride(VideoFrame::kUPlane); |
| 238 vpx_image.stride[VPX_PLANE_V] = frame->stride(VideoFrame::kVPlane); | 304 vpx_image.stride[VPX_PLANE_V] = frame->stride(VideoFrame::kVPlane); |
| 239 | 305 |
| 240 const base::TimeDelta duration = CalculateFrameDuration(frame); | 306 const base::TimeDelta duration = EstimateFrameDuration(frame); |
| 241 // Encode the frame. The presentation time stamp argument here is fixed to | 307 // Encode the frame. The presentation time stamp argument here is fixed to |
| 242 // zero to force the encoder to base its single-frame bandwidth calculations | 308 // zero to force the encoder to base its single-frame bandwidth calculations |
| 243 // entirely on |predicted_frame_duration|. | 309 // entirely on |predicted_frame_duration|. |
| 244 const vpx_codec_err_t ret = vpx_codec_encode(encoder_.get(), | 310 const vpx_codec_err_t ret = vpx_codec_encode(encoder_.get(), |
| 245 &vpx_image, | 311 &vpx_image, |
| 246 0 /* pts */, | 312 0 /* pts */, |
| 247 duration.InMicroseconds(), | 313 duration.InMicroseconds(), |
| 248 kNoFlags, | 314 0 /* flags */, |
| 249 VPX_DL_REALTIME); | 315 VPX_DL_REALTIME); |
| 250 DCHECK_EQ(ret, VPX_CODEC_OK) << vpx_codec_err_to_string(ret) << ", #" | 316 DCHECK_EQ(ret, VPX_CODEC_OK) << vpx_codec_err_to_string(ret) << ", #" |
| 251 << vpx_codec_error(encoder_.get()) << " -" | 317 << vpx_codec_error(encoder_.get()) << " -" |
| 252 << vpx_codec_error_detail(encoder_.get()); | 318 << vpx_codec_error_detail(encoder_.get()); |
| 253 | 319 |
| 254 std::unique_ptr<std::string> data(new std::string); | 320 std::unique_ptr<std::string> data(new std::string); |
| 255 bool keyframe = false; | 321 bool keyframe = false; |
| 256 vpx_codec_iter_t iter = NULL; | 322 vpx_codec_iter_t iter = NULL; |
| 257 const vpx_codec_cx_pkt_t* pkt = NULL; | 323 const vpx_codec_cx_pkt_t* pkt = NULL; |
| 258 while ((pkt = vpx_codec_get_cx_data(encoder_.get(), &iter)) != NULL) { | 324 while ((pkt = vpx_codec_get_cx_data(encoder_.get(), &iter)) != NULL) { |
| 259 if (pkt->kind != VPX_CODEC_CX_FRAME_PKT) | 325 if (pkt->kind != VPX_CODEC_CX_FRAME_PKT) |
| 260 continue; | 326 continue; |
| 261 data->assign(static_cast<char*>(pkt->data.frame.buf), pkt->data.frame.sz); | 327 data->assign(static_cast<char*>(pkt->data.frame.buf), pkt->data.frame.sz); |
| 262 keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0; | 328 keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0; |
| 263 break; | 329 break; |
| 264 } | 330 } |
| 265 origin_task_runner_->PostTask(FROM_HERE, | 331 origin_task_runner_->PostTask(FROM_HERE, |
| 266 base::Bind(OnFrameEncodeCompleted, | 332 base::Bind(OnFrameEncodeCompleted, |
| 267 on_encoded_video_callback_, | 333 on_encoded_video_callback_, |
| 268 frame, | 334 frame, |
| 269 base::Passed(&data), | 335 base::Passed(&data), |
| 270 capture_timestamp, | 336 capture_timestamp, |
| 271 keyframe)); | 337 keyframe)); |
| 272 } | 338 } |
| 273 | 339 |
| 274 void VpxEncoder::ConfigureEncoding(const gfx::Size& size) { | 340 void VpxEncoder::ConfigureEncoderOnEncodingThread(const gfx::Size& size) { |
| 275 if (IsInitialized()) { | 341 if (IsInitialized()) { |
| 276 // TODO(mcasas) VP8 quirk/optimisation: If the new |size| is strictly less- | 342 // TODO(mcasas) VP8 quirk/optimisation: If the new |size| is strictly less- |
| 277 // than-or-equal than the old size, in terms of area, the existing encoder | 343 // than-or-equal than the old size, in terms of area, the existing encoder |
| 278 // instance could be reused after changing |codec_config_.{g_w,g_h}|. | 344 // instance could be reused after changing |codec_config_.{g_w,g_h}|. |
| 279 DVLOG(1) << "Destroying/Re-Creating encoder for new frame size: " | 345 DVLOG(1) << "Destroying/Re-Creating encoder for new frame size: " |
| 280 << gfx::Size(codec_config_.g_w, codec_config_.g_h).ToString() | 346 << gfx::Size(codec_config_.g_w, codec_config_.g_h).ToString() |
| 281 << " --> " << size.ToString() << (use_vp9_ ? " vp9" : " vp8"); | 347 << " --> " << size.ToString() << (use_vp9_ ? " vp9" : " vp8"); |
| 282 encoder_.reset(); | 348 encoder_.reset(); |
| 283 } | 349 } |
| 284 | 350 |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 329 // max distance out of necessity. | 395 // max distance out of necessity. |
| 330 // Note that due to http://crbug.com/440223, it might be necessary to force a | 396 // Note that due to http://crbug.com/440223, it might be necessary to force a |
| 331 // key frame after 10,000frames since decoding fails after 30,000 non-key | 397 // key frame after 10,000frames since decoding fails after 30,000 non-key |
| 332 // frames. | 398 // frames. |
| 333 // Forcing a keyframe in regular intervals also allows seeking in the | 399 // Forcing a keyframe in regular intervals also allows seeking in the |
| 334 // resulting recording with decent performance. | 400 // resulting recording with decent performance. |
| 335 codec_config_.kf_mode = VPX_KF_AUTO; | 401 codec_config_.kf_mode = VPX_KF_AUTO; |
| 336 codec_config_.kf_min_dist = 0; | 402 codec_config_.kf_min_dist = 0; |
| 337 codec_config_.kf_max_dist = 100; | 403 codec_config_.kf_max_dist = 100; |
| 338 | 404 |
| 339 // Do not saturate CPU utilization just for encoding. On a lower-end system | 405 codec_config_.g_threads = GetNumberOfThreadsForEncoding(); |
| 340 // with only 1 or 2 cores, use only one thread for encoding. On systems with | |
| 341 // more cores, allow half of the cores to be used for encoding. | |
| 342 codec_config_.g_threads = | |
| 343 std::min(8, (base::SysInfo::NumberOfProcessors() + 1) / 2); | |
| 344 | 406 |
| 345 // Number of frames to consume before producing output. | 407 // Number of frames to consume before producing output. |
| 346 codec_config_.g_lag_in_frames = 0; | 408 codec_config_.g_lag_in_frames = 0; |
| 347 | 409 |
| 348 DCHECK(!encoder_); | 410 DCHECK(!encoder_); |
| 349 encoder_.reset(new vpx_codec_ctx_t); | 411 encoder_.reset(new vpx_codec_ctx_t); |
| 350 const vpx_codec_err_t ret = vpx_codec_enc_init(encoder_.get(), interface, | 412 const vpx_codec_err_t ret = vpx_codec_enc_init(encoder_.get(), interface, |
| 351 &codec_config_, kNoFlags); | 413 &codec_config_, 0 /* flags */); |
| 352 DCHECK_EQ(VPX_CODEC_OK, ret); | 414 DCHECK_EQ(VPX_CODEC_OK, ret); |
| 353 | 415 |
| 354 if (use_vp9_) { | 416 if (use_vp9_) { |
| 355 // Values of VP8E_SET_CPUUSED greater than 0 will increase encoder speed at | 417 // Values of VP8E_SET_CPUUSED greater than 0 will increase encoder speed at |
| 356 // the expense of quality up to a maximum value of 8 for VP9, by tuning the | 418 // the expense of quality up to a maximum value of 8 for VP9, by tuning the |
| 357 // target time spent encoding the frame. Go from 8 to 5 (values for real | 419 // target time spent encoding the frame. Go from 8 to 5 (values for real |
| 358 // time encoding) depending on the amount of cores available in the system. | 420 // time encoding) depending on the amount of cores available in the system. |
| 359 const int kCpuUsed = | 421 const int kCpuUsed = |
| 360 std::max(5, 8 - base::SysInfo::NumberOfProcessors() / 2); | 422 std::max(5, 8 - base::SysInfo::NumberOfProcessors() / 2); |
| 361 result = vpx_codec_control(encoder_.get(), VP8E_SET_CPUUSED, kCpuUsed); | 423 result = vpx_codec_control(encoder_.get(), VP8E_SET_CPUUSED, kCpuUsed); |
| 362 DLOG_IF(WARNING, VPX_CODEC_OK != result) << "VP8E_SET_CPUUSED failed"; | 424 DLOG_IF(WARNING, VPX_CODEC_OK != result) << "VP8E_SET_CPUUSED failed"; |
| 363 } | 425 } |
| 364 } | 426 } |
| 365 | 427 |
| 366 bool VpxEncoder::IsInitialized() const { | 428 bool VpxEncoder::IsInitialized() const { |
| 367 DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread()); | 429 DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread()); |
| 368 return codec_config_.g_timebase.den != 0; | 430 return codec_config_.g_timebase.den != 0; |
| 369 } | 431 } |
| 370 | 432 |
| 371 base::TimeDelta VpxEncoder::CalculateFrameDuration( | 433 base::TimeDelta VpxEncoder::EstimateFrameDuration( |
| 372 const scoped_refptr<VideoFrame>& frame) { | 434 const scoped_refptr<VideoFrame>& frame) { |
| 373 DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread()); | 435 DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread()); |
| 374 | 436 |
| 375 using base::TimeDelta; | 437 using base::TimeDelta; |
| 376 TimeDelta predicted_frame_duration; | 438 TimeDelta predicted_frame_duration; |
| 377 if (!frame->metadata()->GetTimeDelta(VideoFrameMetadata::FRAME_DURATION, | 439 if (!frame->metadata()->GetTimeDelta(VideoFrameMetadata::FRAME_DURATION, |
| 378 &predicted_frame_duration) || | 440 &predicted_frame_duration) || |
| 379 predicted_frame_duration <= TimeDelta()) { | 441 predicted_frame_duration <= TimeDelta()) { |
| 380 // The source of the video frame did not provide the frame duration. Use | 442 // The source of the video frame did not provide the frame duration. Use |
| 381 // the actual amount of time between the current and previous frame as a | 443 // the actual amount of time between the current and previous frame as a |
| 382 // prediction for the next frame's duration. | 444 // prediction for the next frame's duration. |
| 383 // TODO(mcasas): This duration estimation could lead to artifacts if the | 445 // TODO(mcasas): This duration estimation could lead to artifacts if the |
| 384 // cadence of the received stream is compromised (e.g. camera freeze, pause, | 446 // cadence of the received stream is compromised (e.g. camera freeze, pause, |
| 385 // remote packet loss). Investigate using GetFrameRate() in this case. | 447 // remote packet loss). Investigate using GetFrameRate() in this case. |
| 386 predicted_frame_duration = frame->timestamp() - last_frame_timestamp_; | 448 predicted_frame_duration = frame->timestamp() - last_frame_timestamp_; |
| 387 } | 449 } |
| 388 last_frame_timestamp_ = frame->timestamp(); | 450 last_frame_timestamp_ = frame->timestamp(); |
| 389 // Make sure |predicted_frame_duration| is in a safe range of values. | 451 // Make sure |predicted_frame_duration| is in a safe range of values. |
| 390 const TimeDelta kMaxFrameDuration = TimeDelta::FromSecondsD(1.0 / 8); | 452 const TimeDelta kMaxFrameDuration = TimeDelta::FromSecondsD(1.0 / 8); |
| 391 const TimeDelta kMinFrameDuration = TimeDelta::FromMilliseconds(1); | 453 const TimeDelta kMinFrameDuration = TimeDelta::FromMilliseconds(1); |
| 392 return std::min(kMaxFrameDuration, std::max(predicted_frame_duration, | 454 return std::min(kMaxFrameDuration, std::max(predicted_frame_duration, |
| 393 kMinFrameDuration)); | 455 kMinFrameDuration)); |
| 394 } | 456 } |
| 395 | 457 |
| 458 #if BUILDFLAG(RTC_USE_H264) | |
| 459 | |
| 460 // static | |
| 461 void H264Encoder::ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread, | |
| 462 ScopedISVCEncoderPtr encoder) { | |
| 463 DCHECK(encoding_thread->IsRunning()); | |
| 464 encoding_thread->Stop(); | |
| 465 // Both |encoding_thread| and |encoder| will be destroyed at end-of-scope. | |
| 466 } | |
| 467 | |
| 468 H264Encoder::H264Encoder( | |
| 469 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, | |
| 470 int32_t bits_per_second) | |
| 471 : Encoder(on_encoded_video_callback, bits_per_second) { | |
| 472 DCHECK(!encoding_thread_->IsRunning()); | |
| 473 encoding_thread_->Start(); | |
| 474 } | |
| 475 | |
| 476 H264Encoder::~H264Encoder() { | |
| 477 main_task_runner_->PostTask(FROM_HERE, | |
| 478 base::Bind(&H264Encoder::ShutdownEncoder, | |
| 479 base::Passed(&encoding_thread_), | |
| 480 base::Passed(&openh264_encoder_))); | |
| 481 } | |
| 482 | |
| 483 void H264Encoder::EncodeOnEncodingThread( | |
| 484 const scoped_refptr<VideoFrame>& video_frame, | |
| 485 base::TimeTicks capture_timestamp) { | |
| 486 TRACE_EVENT0("video", "H264Encoder::EncodeOnEncodingThread"); | |
| 487 DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread()); | |
| 488 | |
| 489 if (!(video_frame->format() == media::PIXEL_FORMAT_I420 || | |
| 490 video_frame->format() == media::PIXEL_FORMAT_YV12 || | |
| 491 video_frame->format() == media::PIXEL_FORMAT_YV12A)) { | |
| 492 NOTREACHED(); | |
| 493 return; | |
| 494 } | |
| 495 scoped_refptr<media::VideoFrame> frame = video_frame; | |
| 496 // Drop alpha channel since we do not support it yet. | |
| 497 if (frame->format() == media::PIXEL_FORMAT_YV12A) | |
| 498 frame = media::WrapAsI420VideoFrame(video_frame); | |
|
emircan
2016/04/19 23:21:21
Move l.489-498 and l.274-283 into VideoTrackRecord
mcasas
2016/04/20 00:37:12
Done.
| |
| 499 | |
| 500 const gfx::Size frame_size = frame->visible_rect().size(); | |
| 501 if (!openh264_encoder_ || configured_size_ != frame_size) { | |
| 502 ConfigureEncoderOnEncodingThread(frame_size); | |
| 503 first_frame_timestamp_ = capture_timestamp; | |
| 504 } | |
| 505 | |
| 506 SSourcePicture picture = {}; | |
| 507 picture.iPicWidth = frame_size.width(); | |
| 508 picture.iPicHeight = frame_size.height(); | |
| 509 picture.iColorFormat = EVideoFormatType::videoFormatI420; | |
| 510 picture.uiTimeStamp = | |
| 511 (capture_timestamp - first_frame_timestamp_).InMilliseconds(); | |
| 512 picture.iStride[0] = frame->stride(VideoFrame::kYPlane); | |
| 513 picture.iStride[1] = frame->stride(VideoFrame::kUPlane); | |
| 514 picture.iStride[2] = frame->stride(VideoFrame::kVPlane); | |
| 515 picture.pData[0] = frame->data(VideoFrame::kYPlane); | |
| 516 picture.pData[1] = frame->data(VideoFrame::kUPlane); | |
| 517 picture.pData[2] = frame->data(VideoFrame::kVPlane); | |
| 518 | |
| 519 SFrameBSInfo info = {}; | |
| 520 if (openh264_encoder_->EncodeFrame(&picture, &info) != cmResultSuccess) { | |
| 521 NOTREACHED() << "OpenH264 encoding failed"; | |
| 522 return; | |
| 523 } | |
| 524 | |
| 525 std::unique_ptr<std::string> data(new std::string); | |
| 526 const uint8_t kNALStartCode[4] = {0, 0, 0, 1}; | |
| 527 for (int layer = 0; layer < info.iLayerNum; ++layer) { | |
| 528 const SLayerBSInfo& layerInfo = info.sLayerInfo[layer]; | |
| 529 // Iterate NAL units making up this layer, noting fragments. | |
| 530 size_t layer_len = 0; | |
| 531 for (int nal = 0; nal < layerInfo.iNalCount; ++nal) { | |
| 532 // The following DCHECKs make sure that the header of each NAl is OK. | |
| 533 DCHECK_GE(layerInfo.pNalLengthInByte[nal], 4); | |
| 534 DCHECK_EQ(kNALStartCode[0], layerInfo.pBsBuf[layer_len+0]); | |
| 535 DCHECK_EQ(kNALStartCode[1], layerInfo.pBsBuf[layer_len+1]); | |
| 536 DCHECK_EQ(kNALStartCode[2], layerInfo.pBsBuf[layer_len+2]); | |
| 537 DCHECK_EQ(kNALStartCode[3], layerInfo.pBsBuf[layer_len+3]); | |
| 538 | |
| 539 layer_len += layerInfo.pNalLengthInByte[nal]; | |
| 540 } | |
| 541 // Copy the entire layer's data (including NAL start codes). | |
| 542 data->append(reinterpret_cast<char*>(layerInfo.pBsBuf), layer_len); | |
| 543 } | |
| 544 | |
| 545 const bool is_key_frame = info.eFrameType == videoFrameTypeIDR; | |
| 546 origin_task_runner_->PostTask( | |
| 547 FROM_HERE, | |
| 548 base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_, frame, | |
| 549 base::Passed(&data), capture_timestamp, is_key_frame)); | |
| 550 } | |
| 551 | |
| 552 void H264Encoder::ConfigureEncoderOnEncodingThread(const gfx::Size& size) { | |
|
emircan
2016/04/19 23:21:22
DCHECK(encoding_thread_->task_runner()->BelongsToC
mcasas
2016/04/20 00:37:13
Done.
| |
| 553 ISVCEncoder* temp_encoder = nullptr; | |
| 554 if (WelsCreateSVCEncoder(&temp_encoder) != 0) { | |
| 555 NOTREACHED() << "Failed to create OpenH264 encoder"; | |
| 556 return; | |
| 557 } | |
| 558 openh264_encoder_.reset(temp_encoder); | |
| 559 configured_size_ = size; | |
| 560 | |
| 561 #if DCHECK_IS_ON() | |
| 562 int trace_level = WELS_LOG_INFO; | |
| 563 openh264_encoder_->SetOption(ENCODER_OPTION_TRACE_LEVEL, &trace_level); | |
| 564 #endif | |
| 565 | |
| 566 SEncParamExt init_params; | |
| 567 openh264_encoder_->GetDefaultParams(&init_params); | |
| 568 init_params.iUsageType = CAMERA_VIDEO_REAL_TIME; | |
| 569 | |
| 570 DCHECK_EQ(AUTO_REF_PIC_COUNT, init_params.iNumRefFrame); | |
| 571 DCHECK(!init_params.bSimulcastAVC); | |
| 572 | |
| 573 init_params.uiIntraPeriod = 100; // Same as for VpxEncoder. | |
| 574 init_params.iPicWidth = size.width(); | |
| 575 init_params.iPicHeight = size.height(); | |
| 576 | |
| 577 DCHECK_EQ(RC_QUALITY_MODE, init_params.iRCMode); | |
| 578 DCHECK_EQ(0, init_params.iPaddingFlag); | |
| 579 DCHECK_EQ(UNSPECIFIED_BIT_RATE, init_params.iTargetBitrate); | |
| 580 DCHECK_EQ(UNSPECIFIED_BIT_RATE, init_params.iMaxBitrate); | |
| 581 if (bits_per_second_ > 0) { | |
| 582 init_params.iRCMode = RC_BITRATE_MODE; | |
| 583 init_params.iTargetBitrate = bits_per_second_; | |
| 584 } else { | |
| 585 init_params.iRCMode = RC_OFF_MODE; | |
| 586 } | |
| 587 | |
| 588 // Threading model: 0 means auto, here we select explicitly. | |
| 589 init_params.iMultipleThreadIdc = GetNumberOfThreadsForEncoding(); | |
| 590 | |
| 591 // TODO(mcasas): consider reducing complexity if there are few CPUs available. | |
| 592 DCHECK_EQ(MEDIUM_COMPLEXITY, init_params.iComplexityMode); | |
| 593 DCHECK(!init_params.bEnableDenoise); | |
| 594 DCHECK(init_params.bEnableFrameSkip); | |
| 595 | |
| 596 // The base spatial layer 0 is the only one we use. | |
| 597 DCHECK_EQ(1, init_params.iSpatialLayerNum); | |
| 598 init_params.sSpatialLayers[0].iVideoWidth = init_params.iPicWidth; | |
| 599 init_params.sSpatialLayers[0].iVideoHeight = init_params.iPicHeight; | |
| 600 init_params.sSpatialLayers[0].iSpatialBitrate = init_params.iTargetBitrate; | |
| 601 // Slice num according to number of threads. | |
| 602 init_params.sSpatialLayers[0].sSliceCfg.uiSliceMode = SM_AUTO_SLICE; | |
| 603 | |
| 604 if (openh264_encoder_->InitializeExt(&init_params) != cmResultSuccess) { | |
| 605 NOTREACHED() << "Failed to initialize OpenH264 encoder"; | |
| 606 return; | |
| 607 } | |
| 608 | |
| 609 int pixel_format = EVideoFormatType::videoFormatI420; | |
| 610 openh264_encoder_->SetOption(ENCODER_OPTION_DATAFORMAT, &pixel_format); | |
| 611 } | |
| 612 #endif //#if BUILDFLAG(RTC_USE_H264) | |
| 613 | |
| 396 } // anonymous namespace | 614 } // anonymous namespace |
| 397 | 615 |
| 398 VideoTrackRecorder::VideoTrackRecorder( | 616 VideoTrackRecorder::VideoTrackRecorder( |
| 399 CodecId codec, | 617 CodecId codec, |
| 400 const blink::WebMediaStreamTrack& track, | 618 const blink::WebMediaStreamTrack& track, |
| 401 const OnEncodedVideoCB& on_encoded_video_callback, | 619 const OnEncodedVideoCB& on_encoded_video_callback, |
| 402 int32_t bits_per_second) | 620 int32_t bits_per_second) |
| 403 : track_(track), | 621 : track_(track) { |
| 404 encoder_(new VpxEncoder(codec == CodecId::VP9, | |
| 405 on_encoded_video_callback, | |
| 406 bits_per_second)) { | |
| 407 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | 622 DCHECK(main_render_thread_checker_.CalledOnValidThread()); |
| 408 DCHECK(!track_.isNull()); | 623 DCHECK(!track_.isNull()); |
| 409 DCHECK(track_.getExtraData()); | 624 DCHECK(track_.getExtraData()); |
| 410 | 625 |
| 626 switch (codec) { | |
| 627 #if BUILDFLAG(RTC_USE_H264) | |
| 628 case CodecId::H264: | |
| 629 encoder_ = new H264Encoder(on_encoded_video_callback, bits_per_second); | |
| 630 break; | |
| 631 #endif | |
| 632 case CodecId::VP8: | |
| 633 case CodecId::VP9: | |
| 634 encoder_ = new VpxEncoder(codec == CodecId::VP9, | |
| 635 on_encoded_video_callback, bits_per_second); | |
| 636 break; | |
| 637 default: | |
| 638 NOTREACHED() << "Unsupported codec"; | |
| 639 } | |
| 640 | |
| 411 // StartFrameEncode() will be called on Render IO thread. | 641 // StartFrameEncode() will be called on Render IO thread. |
| 412 MediaStreamVideoSink::ConnectToTrack( | 642 MediaStreamVideoSink::ConnectToTrack( |
| 413 track_, | 643 track_, |
| 414 base::Bind(&VideoTrackRecorder::Encoder::StartFrameEncode, encoder_)); | 644 base::Bind(&VideoTrackRecorder::Encoder::StartFrameEncode, encoder_)); |
| 415 } | 645 } |
| 416 | 646 |
| 417 VideoTrackRecorder::~VideoTrackRecorder() { | 647 VideoTrackRecorder::~VideoTrackRecorder() { |
| 418 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | 648 DCHECK(main_render_thread_checker_.CalledOnValidThread()); |
| 419 MediaStreamVideoSink::DisconnectFromTrack(); | 649 MediaStreamVideoSink::DisconnectFromTrack(); |
| 420 track_.reset(); | 650 track_.reset(); |
| 421 } | 651 } |
| 422 | 652 |
| 423 void VideoTrackRecorder::Pause() { | 653 void VideoTrackRecorder::Pause() { |
| 424 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | 654 DCHECK(main_render_thread_checker_.CalledOnValidThread()); |
| 425 DCHECK(encoder_); | 655 DCHECK(encoder_); |
| 426 encoder_->set_paused(true); | 656 encoder_->SetPaused(true); |
| 427 } | 657 } |
| 428 | 658 |
| 429 void VideoTrackRecorder::Resume() { | 659 void VideoTrackRecorder::Resume() { |
| 430 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | 660 DCHECK(main_render_thread_checker_.CalledOnValidThread()); |
| 431 DCHECK(encoder_); | 661 DCHECK(encoder_); |
| 432 encoder_->set_paused(false); | 662 encoder_->SetPaused(false); |
| 433 } | 663 } |
| 434 | 664 |
| 435 void VideoTrackRecorder::OnVideoFrameForTesting( | 665 void VideoTrackRecorder::OnVideoFrameForTesting( |
| 436 const scoped_refptr<media::VideoFrame>& frame, | 666 const scoped_refptr<media::VideoFrame>& frame, |
| 437 base::TimeTicks timestamp) { | 667 base::TimeTicks timestamp) { |
| 438 encoder_->StartFrameEncode(frame, timestamp); | 668 encoder_->StartFrameEncode(frame, timestamp); |
| 439 } | 669 } |
| 440 | 670 |
| 441 } // namespace content | 671 } // namespace content |
| OLD | NEW |