OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/video_track_recorder.h" | 5 #include "content/renderer/media/video_track_recorder.h" |
6 | 6 |
7 #include <utility> | 7 #include <utility> |
8 | 8 |
9 #include "base/bind.h" | 9 #include "base/bind.h" |
10 #include "base/logging.h" | 10 #include "base/logging.h" |
11 #include "base/macros.h" | 11 #include "base/macros.h" |
12 #include "base/sys_info.h" | 12 #include "base/sys_info.h" |
13 #include "base/threading/thread.h" | 13 #include "base/threading/thread.h" |
14 #include "base/time/time.h" | 14 #include "base/time/time.h" |
15 #include "base/trace_event/trace_event.h" | 15 #include "base/trace_event/trace_event.h" |
16 #include "media/base/video_frame.h" | 16 #include "media/base/video_frame.h" |
17 #include "media/base/video_util.h" | 17 #include "media/base/video_util.h" |
18 #include "ui/gfx/geometry/size.h" | 18 #include "ui/gfx/geometry/size.h" |
19 | 19 |
| 20 #if BUILDFLAG(RTC_USE_H264) |
| 21 #include "third_party/openh264/src/codec/api/svc/codec_api.h" |
| 22 #include "third_party/openh264/src/codec/api/svc/codec_app_def.h" |
| 23 #include "third_party/openh264/src/codec/api/svc/codec_def.h" |
| 24 #endif // #if BUILDFLAG(RTC_USE_H264) |
| 25 |
20 extern "C" { | 26 extern "C" { |
21 // VPX_CODEC_DISABLE_COMPAT excludes parts of the libvpx API that provide | 27 // VPX_CODEC_DISABLE_COMPAT excludes parts of the libvpx API that provide |
22 // backwards compatibility for legacy applications using the library. | 28 // backwards compatibility for legacy applications using the library. |
23 #define VPX_CODEC_DISABLE_COMPAT 1 | 29 #define VPX_CODEC_DISABLE_COMPAT 1 |
24 #include "third_party/libvpx/source/libvpx/vpx/vp8cx.h" | 30 #include "third_party/libvpx/source/libvpx/vpx/vp8cx.h" |
25 #include "third_party/libvpx/source/libvpx/vpx/vpx_encoder.h" | 31 #include "third_party/libvpx/source/libvpx/vpx/vpx_encoder.h" |
26 } | 32 } |
27 | 33 |
28 using media::VideoFrame; | 34 using media::VideoFrame; |
29 using media::VideoFrameMetadata; | 35 using media::VideoFrameMetadata; |
30 | 36 |
31 namespace content { | 37 namespace content { |
32 | 38 |
33 // Base class to describe a generic Encoder. This class is used to encapsulate | 39 // Base class to describe a generic Encoder, encapsulating all actual encoder |
34 // interactions with actual encoders, encoding and delivery of received frames. | 40 // (re)configurations, encoding and delivery of received frames. This class is |
35 // This class is ref-counted to allow the MediaStreamVideoTrack to hold a | 41 // ref-counted to allow the MediaStreamVideoTrack to hold a reference to it (via |
36 // reference to it, via the callback that MediaStreamVideoSink passes along. | 42 // the callback that MediaStreamVideoSink passes along) and to jump back and |
37 // Also, it is quite common that encoders run in a background thread. | 43 // forth to an internal encoder thread. Moreover, this class: |
| 44 // - is created and destroyed on its parent's thread (usually the main Render |
| 45 // thread), |main_task_runner_|. |
| 46 // - receives VideoFrames on |origin_task_runner_| and runs OnEncodedVideoCB on |
| 47 // that thread as well. This task runner is cached on first frame arrival, and |
| 48 // is supposed to be the render IO thread (but this is not enforced); |
| 49 // - uses an internal |encoding_thread_| for actual encoder interactions, namely |
| 50 // configuration, encoding (which might take some time) and destruction. |
38 class VideoTrackRecorder::Encoder : public base::RefCountedThreadSafe<Encoder> { | 51 class VideoTrackRecorder::Encoder : public base::RefCountedThreadSafe<Encoder> { |
39 public: | 52 public: |
40 Encoder(const OnEncodedVideoCB& on_encoded_video_callback, | 53 Encoder(const OnEncodedVideoCB& on_encoded_video_callback, |
41 int32_t bits_per_second) | 54 int32_t bits_per_second) |
42 : paused_(false), | 55 : main_task_runner_(base::MessageLoop::current()->task_runner()), |
| 56 encoding_thread_(new base::Thread("EncodingThread")), |
| 57 paused_(false), |
43 on_encoded_video_callback_(on_encoded_video_callback), | 58 on_encoded_video_callback_(on_encoded_video_callback), |
44 bits_per_second_(bits_per_second) {} | 59 bits_per_second_(bits_per_second) { |
| 60 DCHECK(!on_encoded_video_callback_.is_null()); |
| 61 } |
45 | 62 |
46 virtual void StartFrameEncode(const scoped_refptr<VideoFrame>& frame, | 63 // Start encoding |frame|, returning via |on_encoded_video_callback_|. This |
47 base::TimeTicks capture_timestamp) = 0; | 64 // call will also trigger a ConfigureEncoderOnEncodingThread() upon first |
| 65 // frame arrival or parameter change, and an EncodeOnEncodingThread() to |
| 66 // actually encode the frame. |
| 67 void StartFrameEncode(const scoped_refptr<VideoFrame>& frame, |
| 68 base::TimeTicks capture_timestamp); |
48 | 69 |
49 void set_paused(bool paused) { paused_ = paused; } | 70 void SetPaused(bool paused); |
50 | 71 |
51 protected: | 72 protected: |
52 friend class base::RefCountedThreadSafe<Encoder>; | 73 friend class base::RefCountedThreadSafe<Encoder>; |
53 virtual ~Encoder() {} | 74 virtual ~Encoder() {} |
54 | 75 |
55 // While |paused_|, frames are not encoded. | 76 virtual void EncodeOnEncodingThread(const scoped_refptr<VideoFrame>& frame, |
| 77 base::TimeTicks capture_timestamp) = 0; |
| 78 virtual void ConfigureEncoderOnEncodingThread(const gfx::Size& size) = 0; |
| 79 |
| 80 // Used to shutdown properly on the same thread we were created. |
| 81 const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_; |
| 82 |
| 83 // Task runner where frames to encode and reply callbacks must happen. |
| 84 scoped_refptr<base::SingleThreadTaskRunner> origin_task_runner_; |
| 85 |
| 86 // Thread for encoding. Active for the lifetime of VpxEncoder. |
| 87 std::unique_ptr<base::Thread> encoding_thread_; |
| 88 |
| 89 // While |paused_|, frames are not encoded. Used only from |encoding_thread_|. |
56 bool paused_; | 90 bool paused_; |
57 | 91 |
58 // This callback should be exercised on IO thread. | 92 // This callback should be exercised on IO thread. |
59 const OnEncodedVideoCB on_encoded_video_callback_; | 93 const OnEncodedVideoCB on_encoded_video_callback_; |
60 | 94 |
61 // Target bitrate or video encoding. If 0, a standard bitrate is used. | 95 // Target bitrate for video encoding. If 0, a standard bitrate is used. |
62 const int32_t bits_per_second_; | 96 const int32_t bits_per_second_; |
63 | 97 |
64 DISALLOW_COPY_AND_ASSIGN(Encoder); | 98 DISALLOW_COPY_AND_ASSIGN(Encoder); |
65 }; | 99 }; |
66 | 100 |
| 101 void VideoTrackRecorder::Encoder::StartFrameEncode( |
| 102 const scoped_refptr<VideoFrame>& video_frame, |
| 103 base::TimeTicks capture_timestamp) { |
| 104 // Cache the thread sending frames on first frame arrival. |
| 105 if (!origin_task_runner_.get()) |
| 106 origin_task_runner_ = base::MessageLoop::current()->task_runner(); |
| 107 DCHECK(origin_task_runner_->BelongsToCurrentThread()); |
| 108 if (paused_) |
| 109 return; |
| 110 |
| 111 if (!(video_frame->format() == media::PIXEL_FORMAT_I420 || |
| 112 video_frame->format() == media::PIXEL_FORMAT_YV12 || |
| 113 video_frame->format() == media::PIXEL_FORMAT_YV12A)) { |
| 114 NOTREACHED(); |
| 115 return; |
| 116 } |
| 117 scoped_refptr<media::VideoFrame> frame = video_frame; |
| 118 // Drop alpha channel since we do not support it yet. |
| 119 if (frame->format() == media::PIXEL_FORMAT_YV12A) |
| 120 frame = media::WrapAsI420VideoFrame(video_frame); |
| 121 |
| 122 encoding_thread_->task_runner()->PostTask( |
| 123 FROM_HERE, base::Bind(&Encoder::EncodeOnEncodingThread, |
| 124 this, frame, capture_timestamp)); |
| 125 } |
| 126 |
| 127 void VideoTrackRecorder::Encoder::SetPaused(bool paused) { |
| 128 if (!encoding_thread_->task_runner()->BelongsToCurrentThread()) { |
| 129 encoding_thread_->task_runner()->PostTask( |
| 130 FROM_HERE, base::Bind(&Encoder::SetPaused, this, paused)); |
| 131 return; |
| 132 } |
| 133 paused_ = paused; |
| 134 } |
| 135 |
67 namespace { | 136 namespace { |
68 | 137 |
69 const vpx_codec_flags_t kNoFlags = 0; | |
70 | |
71 // Originally from remoting/codec/scoped_vpx_codec.h. | 138 // Originally from remoting/codec/scoped_vpx_codec.h. |
72 // TODO(mcasas): Refactor into a common location. | 139 // TODO(mcasas): Refactor into a common location. |
73 struct VpxCodecDeleter { | 140 struct VpxCodecDeleter { |
74 void operator()(vpx_codec_ctx_t* codec) { | 141 void operator()(vpx_codec_ctx_t* codec) { |
75 if (!codec) | 142 if (!codec) |
76 return; | 143 return; |
77 vpx_codec_err_t ret = vpx_codec_destroy(codec); | 144 vpx_codec_err_t ret = vpx_codec_destroy(codec); |
78 CHECK_EQ(ret, VPX_CODEC_OK); | 145 CHECK_EQ(ret, VPX_CODEC_OK); |
79 delete codec; | 146 delete codec; |
80 } | 147 } |
81 }; | 148 }; |
82 | |
83 typedef std::unique_ptr<vpx_codec_ctx_t, VpxCodecDeleter> ScopedVpxCodecCtxPtr; | 149 typedef std::unique_ptr<vpx_codec_ctx_t, VpxCodecDeleter> ScopedVpxCodecCtxPtr; |
84 | 150 |
85 void OnFrameEncodeCompleted( | 151 static void OnFrameEncodeCompleted( |
86 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_cb, | 152 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_cb, |
87 const scoped_refptr<VideoFrame>& frame, | 153 const scoped_refptr<VideoFrame>& frame, |
88 std::unique_ptr<std::string> data, | 154 std::unique_ptr<std::string> data, |
89 base::TimeTicks capture_timestamp, | 155 base::TimeTicks capture_timestamp, |
90 bool keyframe) { | 156 bool keyframe) { |
91 DVLOG(1) << (keyframe ? "" : "non ") << "keyframe "<< data->length() << "B, " | 157 DVLOG(1) << (keyframe ? "" : "non ") << "keyframe "<< data->length() << "B, " |
92 << capture_timestamp << " ms"; | 158 << capture_timestamp << " ms"; |
93 on_encoded_video_cb.Run(frame, std::move(data), capture_timestamp, keyframe); | 159 on_encoded_video_cb.Run(frame, std::move(data), capture_timestamp, keyframe); |
94 } | 160 } |
95 | 161 |
96 // Class encapsulating libvpx interactions, encoding and delivery of received | 162 static int GetNumberOfThreadsForEncoding() { |
97 // frames. This class: | 163 // Do not saturate CPU utilization just for encoding. On a lower-end system |
98 // - is created and destroyed on its parent's thread (usually the main Render | 164 // with only 1 or 2 cores, use only one thread for encoding. On systems with |
99 // thread); | 165 // more cores, allow half of the cores to be used for encoding. |
100 // - receives VideoFrames and Run()s the callbacks on |origin_task_runner_|, | 166 return std::min(8, (base::SysInfo::NumberOfProcessors() + 1) / 2); |
101 // which is cached on first frame arrival, and is supposed to be the render IO | 167 } |
102 // thread, but this is not enforced; | 168 |
103 // - uses an internal |encoding_thread_| for libvpx interactions, notably for | 169 // Class encapsulating all libvpx interactions for VP8/VP9 encoding. |
104 // encoding (which might take some time). | |
105 class VpxEncoder final : public VideoTrackRecorder::Encoder { | 170 class VpxEncoder final : public VideoTrackRecorder::Encoder { |
106 public: | 171 public: |
107 static void ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread, | 172 static void ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread, |
108 ScopedVpxCodecCtxPtr encoder); | 173 ScopedVpxCodecCtxPtr encoder); |
109 | 174 |
110 VpxEncoder( | 175 VpxEncoder( |
111 bool use_vp9, | 176 bool use_vp9, |
112 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, | 177 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, |
113 int32_t bits_per_second); | 178 int32_t bits_per_second); |
114 | 179 |
115 void StartFrameEncode(const scoped_refptr<VideoFrame>& frame, | |
116 base::TimeTicks capture_timestamp) override; | |
117 | |
118 private: | 180 private: |
| 181 // VideoTrackRecorder::Encoder |
119 ~VpxEncoder() override; | 182 ~VpxEncoder() override; |
120 | |
121 void EncodeOnEncodingThread(const scoped_refptr<VideoFrame>& frame, | 183 void EncodeOnEncodingThread(const scoped_refptr<VideoFrame>& frame, |
122 base::TimeTicks capture_timestamp); | 184 base::TimeTicks capture_timestamp) override; |
123 | 185 void ConfigureEncoderOnEncodingThread(const gfx::Size& size) override; |
124 void ConfigureEncoding(const gfx::Size& size); | |
125 | 186 |
126 // Returns true if |codec_config_| has been filled in at least once. | 187 // Returns true if |codec_config_| has been filled in at least once. |
127 bool IsInitialized() const; | 188 bool IsInitialized() const; |
128 | 189 |
129 // Estimate the frame duration from |frame| and |last_frame_timestamp_|. | 190 // Estimate the frame duration from |frame| and |last_frame_timestamp_|. |
130 base::TimeDelta CalculateFrameDuration( | 191 base::TimeDelta EstimateFrameDuration(const scoped_refptr<VideoFrame>& frame); |
131 const scoped_refptr<VideoFrame>& frame); | |
132 | 192 |
133 // Force usage of VP9 for encoding, instead of VP8 which is the default. | 193 // Force usage of VP9 for encoding, instead of VP8 which is the default. |
134 const bool use_vp9_; | 194 const bool use_vp9_; |
135 | 195 |
136 // Used to shutdown properly on the same thread we were created. | 196 // VPx internal objects: configuration and encoder. |encoder_| is a special |
137 const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_; | 197 // scoped pointer to guarantee proper destruction, particularly when |
138 | 198 // reconfiguring due to parameters change. Only used on |encoding_thread_|. |
139 // Task runner where frames to encode and reply callbacks must happen. | |
140 scoped_refptr<base::SingleThreadTaskRunner> origin_task_runner_; | |
141 | |
142 // Thread for encoding. Active for the lifetime of VpxEncoder. All variables | |
143 // below this are used in this thread. | |
144 std::unique_ptr<base::Thread> encoding_thread_; | |
145 // VP8 internal objects: configuration and encoder. | |
146 vpx_codec_enc_cfg_t codec_config_; | 199 vpx_codec_enc_cfg_t codec_config_; |
147 // |encoder_| is a special scoped pointer to guarantee proper destruction. | |
148 // Again, it should only be accessed on |encoding_thread_|. | |
149 ScopedVpxCodecCtxPtr encoder_; | 200 ScopedVpxCodecCtxPtr encoder_; |
150 | 201 |
151 // The |VideoFrame::timestamp()| of the last encoded frame. This is used to | 202 // The |VideoFrame::timestamp()| of the last encoded frame. This is used to |
152 // predict the duration of the next frame. | 203 // predict the duration of the next frame. Only used on |encoding_thread_|. |
153 base::TimeDelta last_frame_timestamp_; | 204 base::TimeDelta last_frame_timestamp_; |
154 | 205 |
155 DISALLOW_COPY_AND_ASSIGN(VpxEncoder); | 206 DISALLOW_COPY_AND_ASSIGN(VpxEncoder); |
156 }; | 207 }; |
157 | 208 |
| 209 #if BUILDFLAG(RTC_USE_H264) |
| 210 |
| 211 struct ISVCEncoderDeleter { |
| 212 void operator()(ISVCEncoder* codec) { |
| 213 if (!codec) |
| 214 return; |
| 215 const int uninit_ret = codec->Uninitialize(); |
| 216 CHECK_EQ(cmResultSuccess, uninit_ret); |
| 217 WelsDestroySVCEncoder(codec); |
| 218 } |
| 219 }; |
| 220 typedef std::unique_ptr<ISVCEncoder, ISVCEncoderDeleter> ScopedISVCEncoderPtr; |
| 221 |
| 222 // Class encapsulating all openh264 interactions for H264 encoding. |
| 223 class H264Encoder final : public VideoTrackRecorder::Encoder { |
| 224 public: |
| 225 static void ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread, |
| 226 ScopedISVCEncoderPtr encoder); |
| 227 |
| 228 H264Encoder( |
| 229 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, |
| 230 int32_t bits_per_second); |
| 231 |
| 232 private: |
| 233 // VideoTrackRecorder::Encoder |
| 234 ~H264Encoder() override; |
| 235 void EncodeOnEncodingThread(const scoped_refptr<VideoFrame>& frame, |
| 236 base::TimeTicks capture_timestamp) override; |
| 237 void ConfigureEncoderOnEncodingThread(const gfx::Size& size) override; |
| 238 |
| 239 // |openh264_encoder_| is a special scoped pointer to guarantee proper |
| 240 // destruction, also when reconfiguring due to parameters change. Only used on |
| 241 // |encoding_thread_|. |
| 242 gfx::Size configured_size_; |
| 243 ScopedISVCEncoderPtr openh264_encoder_; |
| 244 |
| 245 // The |VideoFrame::timestamp()| of the first received frame. Only used on |
| 246 // |encoding_thread_|. |
| 247 base::TimeTicks first_frame_timestamp_; |
| 248 |
| 249 DISALLOW_COPY_AND_ASSIGN(H264Encoder); |
| 250 }; |
| 251 |
| 252 #endif // #if BUILDFLAG(RTC_USE_H264) |
| 253 |
158 // static | 254 // static |
159 void VpxEncoder::ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread, | 255 void VpxEncoder::ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread, |
160 ScopedVpxCodecCtxPtr encoder) { | 256 ScopedVpxCodecCtxPtr encoder) { |
161 DCHECK(encoding_thread->IsRunning()); | 257 DCHECK(encoding_thread->IsRunning()); |
162 encoding_thread->Stop(); | 258 encoding_thread->Stop(); |
163 // Both |encoding_thread| and |encoder| will be destroyed at end-of-scope. | 259 // Both |encoding_thread| and |encoder| will be destroyed at end-of-scope. |
164 } | 260 } |
165 | 261 |
166 VpxEncoder::VpxEncoder( | 262 VpxEncoder::VpxEncoder( |
167 bool use_vp9, | 263 bool use_vp9, |
168 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, | 264 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, |
169 int32_t bits_per_second) | 265 int32_t bits_per_second) |
170 : Encoder(on_encoded_video_callback, bits_per_second), | 266 : Encoder(on_encoded_video_callback, bits_per_second), |
171 use_vp9_(use_vp9), | 267 use_vp9_(use_vp9) { |
172 main_task_runner_(base::MessageLoop::current()->task_runner()), | |
173 encoding_thread_(new base::Thread("EncodingThread")) { | |
174 DCHECK(!on_encoded_video_callback_.is_null()); | |
175 | |
176 codec_config_.g_timebase.den = 0; // Not initialized. | 268 codec_config_.g_timebase.den = 0; // Not initialized. |
177 | 269 |
178 DCHECK(!encoding_thread_->IsRunning()); | 270 DCHECK(!encoding_thread_->IsRunning()); |
179 encoding_thread_->Start(); | 271 encoding_thread_->Start(); |
180 } | 272 } |
181 | 273 |
182 VpxEncoder::~VpxEncoder() { | 274 VpxEncoder::~VpxEncoder() { |
183 main_task_runner_->PostTask(FROM_HERE, | 275 main_task_runner_->PostTask(FROM_HERE, |
184 base::Bind(&VpxEncoder::ShutdownEncoder, | 276 base::Bind(&VpxEncoder::ShutdownEncoder, |
185 base::Passed(&encoding_thread_), | 277 base::Passed(&encoding_thread_), |
186 base::Passed(&encoder_))); | 278 base::Passed(&encoder_))); |
187 } | 279 } |
188 | 280 |
189 void VpxEncoder::StartFrameEncode(const scoped_refptr<VideoFrame>& frame, | 281 void VpxEncoder::EncodeOnEncodingThread(const scoped_refptr<VideoFrame>& frame, |
190 base::TimeTicks capture_timestamp) { | 282 base::TimeTicks capture_timestamp) { |
191 // Cache the thread sending frames on first frame arrival. | |
192 if (!origin_task_runner_.get()) | |
193 origin_task_runner_ = base::MessageLoop::current()->task_runner(); | |
194 DCHECK(origin_task_runner_->BelongsToCurrentThread()); | |
195 if (paused_) | |
196 return; | |
197 encoding_thread_->task_runner()->PostTask( | |
198 FROM_HERE, base::Bind(&VpxEncoder::EncodeOnEncodingThread, | |
199 this, frame, capture_timestamp)); | |
200 } | |
201 | |
202 void VpxEncoder::EncodeOnEncodingThread( | |
203 const scoped_refptr<VideoFrame>& video_frame, | |
204 base::TimeTicks capture_timestamp) { | |
205 TRACE_EVENT0("video", "VpxEncoder::EncodeOnEncodingThread"); | 283 TRACE_EVENT0("video", "VpxEncoder::EncodeOnEncodingThread"); |
206 DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread()); | 284 DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread()); |
207 | 285 |
208 if (!(video_frame->format() == media::PIXEL_FORMAT_I420 || | |
209 video_frame->format() == media::PIXEL_FORMAT_YV12 || | |
210 video_frame->format() == media::PIXEL_FORMAT_YV12A)) { | |
211 NOTREACHED(); | |
212 return; | |
213 } | |
214 scoped_refptr<media::VideoFrame> frame = video_frame; | |
215 // Drop alpha channel since we do not support it yet. | |
216 if (frame->format() == media::PIXEL_FORMAT_YV12A) | |
217 frame = media::WrapAsI420VideoFrame(video_frame); | |
218 | |
219 const gfx::Size frame_size = frame->visible_rect().size(); | 286 const gfx::Size frame_size = frame->visible_rect().size(); |
220 if (!IsInitialized() || | 287 if (!IsInitialized() || |
221 gfx::Size(codec_config_.g_w, codec_config_.g_h) != frame_size) { | 288 gfx::Size(codec_config_.g_w, codec_config_.g_h) != frame_size) { |
222 ConfigureEncoding(frame_size); | 289 ConfigureEncoderOnEncodingThread(frame_size); |
223 } | 290 } |
224 | 291 |
225 vpx_image_t vpx_image; | 292 vpx_image_t vpx_image; |
226 vpx_image_t* const result = vpx_img_wrap(&vpx_image, | 293 vpx_image_t* const result = vpx_img_wrap(&vpx_image, |
227 VPX_IMG_FMT_I420, | 294 VPX_IMG_FMT_I420, |
228 frame_size.width(), | 295 frame_size.width(), |
229 frame_size.height(), | 296 frame_size.height(), |
230 1 /* align */, | 297 1 /* align */, |
231 frame->data(VideoFrame::kYPlane)); | 298 frame->data(VideoFrame::kYPlane)); |
232 DCHECK_EQ(result, &vpx_image); | 299 DCHECK_EQ(result, &vpx_image); |
233 vpx_image.planes[VPX_PLANE_Y] = frame->visible_data(VideoFrame::kYPlane); | 300 vpx_image.planes[VPX_PLANE_Y] = frame->visible_data(VideoFrame::kYPlane); |
234 vpx_image.planes[VPX_PLANE_U] = frame->visible_data(VideoFrame::kUPlane); | 301 vpx_image.planes[VPX_PLANE_U] = frame->visible_data(VideoFrame::kUPlane); |
235 vpx_image.planes[VPX_PLANE_V] = frame->visible_data(VideoFrame::kVPlane); | 302 vpx_image.planes[VPX_PLANE_V] = frame->visible_data(VideoFrame::kVPlane); |
236 vpx_image.stride[VPX_PLANE_Y] = frame->stride(VideoFrame::kYPlane); | 303 vpx_image.stride[VPX_PLANE_Y] = frame->stride(VideoFrame::kYPlane); |
237 vpx_image.stride[VPX_PLANE_U] = frame->stride(VideoFrame::kUPlane); | 304 vpx_image.stride[VPX_PLANE_U] = frame->stride(VideoFrame::kUPlane); |
238 vpx_image.stride[VPX_PLANE_V] = frame->stride(VideoFrame::kVPlane); | 305 vpx_image.stride[VPX_PLANE_V] = frame->stride(VideoFrame::kVPlane); |
239 | 306 |
240 const base::TimeDelta duration = CalculateFrameDuration(frame); | 307 const base::TimeDelta duration = EstimateFrameDuration(frame); |
241 // Encode the frame. The presentation time stamp argument here is fixed to | 308 // Encode the frame. The presentation time stamp argument here is fixed to |
242 // zero to force the encoder to base its single-frame bandwidth calculations | 309 // zero to force the encoder to base its single-frame bandwidth calculations |
243 // entirely on |predicted_frame_duration|. | 310 // entirely on |predicted_frame_duration|. |
244 const vpx_codec_err_t ret = vpx_codec_encode(encoder_.get(), | 311 const vpx_codec_err_t ret = vpx_codec_encode(encoder_.get(), |
245 &vpx_image, | 312 &vpx_image, |
246 0 /* pts */, | 313 0 /* pts */, |
247 duration.InMicroseconds(), | 314 duration.InMicroseconds(), |
248 kNoFlags, | 315 0 /* flags */, |
249 VPX_DL_REALTIME); | 316 VPX_DL_REALTIME); |
250 DCHECK_EQ(ret, VPX_CODEC_OK) << vpx_codec_err_to_string(ret) << ", #" | 317 DCHECK_EQ(ret, VPX_CODEC_OK) << vpx_codec_err_to_string(ret) << ", #" |
251 << vpx_codec_error(encoder_.get()) << " -" | 318 << vpx_codec_error(encoder_.get()) << " -" |
252 << vpx_codec_error_detail(encoder_.get()); | 319 << vpx_codec_error_detail(encoder_.get()); |
253 | 320 |
254 std::unique_ptr<std::string> data(new std::string); | 321 std::unique_ptr<std::string> data(new std::string); |
255 bool keyframe = false; | 322 bool keyframe = false; |
256 vpx_codec_iter_t iter = NULL; | 323 vpx_codec_iter_t iter = NULL; |
257 const vpx_codec_cx_pkt_t* pkt = NULL; | 324 const vpx_codec_cx_pkt_t* pkt = NULL; |
258 while ((pkt = vpx_codec_get_cx_data(encoder_.get(), &iter)) != NULL) { | 325 while ((pkt = vpx_codec_get_cx_data(encoder_.get(), &iter)) != NULL) { |
259 if (pkt->kind != VPX_CODEC_CX_FRAME_PKT) | 326 if (pkt->kind != VPX_CODEC_CX_FRAME_PKT) |
260 continue; | 327 continue; |
261 data->assign(static_cast<char*>(pkt->data.frame.buf), pkt->data.frame.sz); | 328 data->assign(static_cast<char*>(pkt->data.frame.buf), pkt->data.frame.sz); |
262 keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0; | 329 keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0; |
263 break; | 330 break; |
264 } | 331 } |
265 origin_task_runner_->PostTask(FROM_HERE, | 332 origin_task_runner_->PostTask(FROM_HERE, |
266 base::Bind(OnFrameEncodeCompleted, | 333 base::Bind(OnFrameEncodeCompleted, |
267 on_encoded_video_callback_, | 334 on_encoded_video_callback_, |
268 frame, | 335 frame, |
269 base::Passed(&data), | 336 base::Passed(&data), |
270 capture_timestamp, | 337 capture_timestamp, |
271 keyframe)); | 338 keyframe)); |
272 } | 339 } |
273 | 340 |
274 void VpxEncoder::ConfigureEncoding(const gfx::Size& size) { | 341 void VpxEncoder::ConfigureEncoderOnEncodingThread(const gfx::Size& size) { |
| 342 DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread()); |
275 if (IsInitialized()) { | 343 if (IsInitialized()) { |
276 // TODO(mcasas) VP8 quirk/optimisation: If the new |size| is strictly less- | 344 // TODO(mcasas) VP8 quirk/optimisation: If the new |size| is strictly less- |
277 // than-or-equal than the old size, in terms of area, the existing encoder | 345 // than-or-equal than the old size, in terms of area, the existing encoder |
278 // instance could be reused after changing |codec_config_.{g_w,g_h}|. | 346 // instance could be reused after changing |codec_config_.{g_w,g_h}|. |
279 DVLOG(1) << "Destroying/Re-Creating encoder for new frame size: " | 347 DVLOG(1) << "Destroying/Re-Creating encoder for new frame size: " |
280 << gfx::Size(codec_config_.g_w, codec_config_.g_h).ToString() | 348 << gfx::Size(codec_config_.g_w, codec_config_.g_h).ToString() |
281 << " --> " << size.ToString() << (use_vp9_ ? " vp9" : " vp8"); | 349 << " --> " << size.ToString() << (use_vp9_ ? " vp9" : " vp8"); |
282 encoder_.reset(); | 350 encoder_.reset(); |
283 } | 351 } |
284 | 352 |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
329 // max distance out of necessity. | 397 // max distance out of necessity. |
330 // Note that due to http://crbug.com/440223, it might be necessary to force a | 398 // Note that due to http://crbug.com/440223, it might be necessary to force a |
331 // key frame after 10,000frames since decoding fails after 30,000 non-key | 399 // key frame after 10,000frames since decoding fails after 30,000 non-key |
332 // frames. | 400 // frames. |
333 // Forcing a keyframe in regular intervals also allows seeking in the | 401 // Forcing a keyframe in regular intervals also allows seeking in the |
334 // resulting recording with decent performance. | 402 // resulting recording with decent performance. |
335 codec_config_.kf_mode = VPX_KF_AUTO; | 403 codec_config_.kf_mode = VPX_KF_AUTO; |
336 codec_config_.kf_min_dist = 0; | 404 codec_config_.kf_min_dist = 0; |
337 codec_config_.kf_max_dist = 100; | 405 codec_config_.kf_max_dist = 100; |
338 | 406 |
339 // Do not saturate CPU utilization just for encoding. On a lower-end system | 407 codec_config_.g_threads = GetNumberOfThreadsForEncoding(); |
340 // with only 1 or 2 cores, use only one thread for encoding. On systems with | |
341 // more cores, allow half of the cores to be used for encoding. | |
342 codec_config_.g_threads = | |
343 std::min(8, (base::SysInfo::NumberOfProcessors() + 1) / 2); | |
344 | 408 |
345 // Number of frames to consume before producing output. | 409 // Number of frames to consume before producing output. |
346 codec_config_.g_lag_in_frames = 0; | 410 codec_config_.g_lag_in_frames = 0; |
347 | 411 |
348 DCHECK(!encoder_); | 412 DCHECK(!encoder_); |
349 encoder_.reset(new vpx_codec_ctx_t); | 413 encoder_.reset(new vpx_codec_ctx_t); |
350 const vpx_codec_err_t ret = vpx_codec_enc_init(encoder_.get(), interface, | 414 const vpx_codec_err_t ret = vpx_codec_enc_init(encoder_.get(), interface, |
351 &codec_config_, kNoFlags); | 415 &codec_config_, 0 /* flags */); |
352 DCHECK_EQ(VPX_CODEC_OK, ret); | 416 DCHECK_EQ(VPX_CODEC_OK, ret); |
353 | 417 |
354 if (use_vp9_) { | 418 if (use_vp9_) { |
355 // Values of VP8E_SET_CPUUSED greater than 0 will increase encoder speed at | 419 // Values of VP8E_SET_CPUUSED greater than 0 will increase encoder speed at |
356 // the expense of quality up to a maximum value of 8 for VP9, by tuning the | 420 // the expense of quality up to a maximum value of 8 for VP9, by tuning the |
357 // target time spent encoding the frame. Go from 8 to 5 (values for real | 421 // target time spent encoding the frame. Go from 8 to 5 (values for real |
358 // time encoding) depending on the amount of cores available in the system. | 422 // time encoding) depending on the amount of cores available in the system. |
359 const int kCpuUsed = | 423 const int kCpuUsed = |
360 std::max(5, 8 - base::SysInfo::NumberOfProcessors() / 2); | 424 std::max(5, 8 - base::SysInfo::NumberOfProcessors() / 2); |
361 result = vpx_codec_control(encoder_.get(), VP8E_SET_CPUUSED, kCpuUsed); | 425 result = vpx_codec_control(encoder_.get(), VP8E_SET_CPUUSED, kCpuUsed); |
362 DLOG_IF(WARNING, VPX_CODEC_OK != result) << "VP8E_SET_CPUUSED failed"; | 426 DLOG_IF(WARNING, VPX_CODEC_OK != result) << "VP8E_SET_CPUUSED failed"; |
363 } | 427 } |
364 } | 428 } |
365 | 429 |
366 bool VpxEncoder::IsInitialized() const { | 430 bool VpxEncoder::IsInitialized() const { |
367 DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread()); | 431 DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread()); |
368 return codec_config_.g_timebase.den != 0; | 432 return codec_config_.g_timebase.den != 0; |
369 } | 433 } |
370 | 434 |
371 base::TimeDelta VpxEncoder::CalculateFrameDuration( | 435 base::TimeDelta VpxEncoder::EstimateFrameDuration( |
372 const scoped_refptr<VideoFrame>& frame) { | 436 const scoped_refptr<VideoFrame>& frame) { |
373 DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread()); | 437 DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread()); |
374 | 438 |
375 using base::TimeDelta; | 439 using base::TimeDelta; |
376 TimeDelta predicted_frame_duration; | 440 TimeDelta predicted_frame_duration; |
377 if (!frame->metadata()->GetTimeDelta(VideoFrameMetadata::FRAME_DURATION, | 441 if (!frame->metadata()->GetTimeDelta(VideoFrameMetadata::FRAME_DURATION, |
378 &predicted_frame_duration) || | 442 &predicted_frame_duration) || |
379 predicted_frame_duration <= TimeDelta()) { | 443 predicted_frame_duration <= TimeDelta()) { |
380 // The source of the video frame did not provide the frame duration. Use | 444 // The source of the video frame did not provide the frame duration. Use |
381 // the actual amount of time between the current and previous frame as a | 445 // the actual amount of time between the current and previous frame as a |
382 // prediction for the next frame's duration. | 446 // prediction for the next frame's duration. |
383 // TODO(mcasas): This duration estimation could lead to artifacts if the | 447 // TODO(mcasas): This duration estimation could lead to artifacts if the |
384 // cadence of the received stream is compromised (e.g. camera freeze, pause, | 448 // cadence of the received stream is compromised (e.g. camera freeze, pause, |
385 // remote packet loss). Investigate using GetFrameRate() in this case. | 449 // remote packet loss). Investigate using GetFrameRate() in this case. |
386 predicted_frame_duration = frame->timestamp() - last_frame_timestamp_; | 450 predicted_frame_duration = frame->timestamp() - last_frame_timestamp_; |
387 } | 451 } |
388 last_frame_timestamp_ = frame->timestamp(); | 452 last_frame_timestamp_ = frame->timestamp(); |
389 // Make sure |predicted_frame_duration| is in a safe range of values. | 453 // Make sure |predicted_frame_duration| is in a safe range of values. |
390 const TimeDelta kMaxFrameDuration = TimeDelta::FromSecondsD(1.0 / 8); | 454 const TimeDelta kMaxFrameDuration = TimeDelta::FromSecondsD(1.0 / 8); |
391 const TimeDelta kMinFrameDuration = TimeDelta::FromMilliseconds(1); | 455 const TimeDelta kMinFrameDuration = TimeDelta::FromMilliseconds(1); |
392 return std::min(kMaxFrameDuration, std::max(predicted_frame_duration, | 456 return std::min(kMaxFrameDuration, std::max(predicted_frame_duration, |
393 kMinFrameDuration)); | 457 kMinFrameDuration)); |
394 } | 458 } |
395 | 459 |
| 460 #if BUILDFLAG(RTC_USE_H264) |
| 461 |
| 462 // static |
| 463 void H264Encoder::ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread, |
| 464 ScopedISVCEncoderPtr encoder) { |
| 465 DCHECK(encoding_thread->IsRunning()); |
| 466 encoding_thread->Stop(); |
| 467 // Both |encoding_thread| and |encoder| will be destroyed at end-of-scope. |
| 468 } |
| 469 |
| 470 H264Encoder::H264Encoder( |
| 471 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, |
| 472 int32_t bits_per_second) |
| 473 : Encoder(on_encoded_video_callback, bits_per_second) { |
| 474 DCHECK(!encoding_thread_->IsRunning()); |
| 475 encoding_thread_->Start(); |
| 476 } |
| 477 |
| 478 H264Encoder::~H264Encoder() { |
| 479 main_task_runner_->PostTask(FROM_HERE, |
| 480 base::Bind(&H264Encoder::ShutdownEncoder, |
| 481 base::Passed(&encoding_thread_), |
| 482 base::Passed(&openh264_encoder_))); |
| 483 } |
| 484 |
| 485 void H264Encoder::EncodeOnEncodingThread(const scoped_refptr<VideoFrame>& frame, |
| 486 base::TimeTicks capture_timestamp) { |
| 487 TRACE_EVENT0("video", "H264Encoder::EncodeOnEncodingThread"); |
| 488 DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread()); |
| 489 |
| 490 const gfx::Size frame_size = frame->visible_rect().size(); |
| 491 if (!openh264_encoder_ || configured_size_ != frame_size) { |
| 492 ConfigureEncoderOnEncodingThread(frame_size); |
| 493 first_frame_timestamp_ = capture_timestamp; |
| 494 } |
| 495 |
| 496 SSourcePicture picture = {}; |
| 497 picture.iPicWidth = frame_size.width(); |
| 498 picture.iPicHeight = frame_size.height(); |
| 499 picture.iColorFormat = EVideoFormatType::videoFormatI420; |
| 500 picture.uiTimeStamp = |
| 501 (capture_timestamp - first_frame_timestamp_).InMilliseconds(); |
| 502 picture.iStride[0] = frame->stride(VideoFrame::kYPlane); |
| 503 picture.iStride[1] = frame->stride(VideoFrame::kUPlane); |
| 504 picture.iStride[2] = frame->stride(VideoFrame::kVPlane); |
| 505 picture.pData[0] = frame->visible_data(VideoFrame::kYPlane); |
| 506 picture.pData[1] = frame->visible_data(VideoFrame::kUPlane); |
| 507 picture.pData[2] = frame->visible_data(VideoFrame::kVPlane); |
| 508 |
| 509 SFrameBSInfo info = {}; |
| 510 if (openh264_encoder_->EncodeFrame(&picture, &info) != cmResultSuccess) { |
| 511 NOTREACHED() << "OpenH264 encoding failed"; |
| 512 return; |
| 513 } |
| 514 |
| 515 std::unique_ptr<std::string> data(new std::string); |
| 516 const uint8_t kNALStartCode[4] = {0, 0, 0, 1}; |
| 517 for (int layer = 0; layer < info.iLayerNum; ++layer) { |
| 518 const SLayerBSInfo& layerInfo = info.sLayerInfo[layer]; |
| 519 // Iterate NAL units making up this layer, noting fragments. |
| 520 size_t layer_len = 0; |
| 521 for (int nal = 0; nal < layerInfo.iNalCount; ++nal) { |
| 522 // The following DCHECKs make sure that the header of each NAL unit is OK. |
| 523 DCHECK_GE(layerInfo.pNalLengthInByte[nal], 4); |
| 524 DCHECK_EQ(kNALStartCode[0], layerInfo.pBsBuf[layer_len+0]); |
| 525 DCHECK_EQ(kNALStartCode[1], layerInfo.pBsBuf[layer_len+1]); |
| 526 DCHECK_EQ(kNALStartCode[2], layerInfo.pBsBuf[layer_len+2]); |
| 527 DCHECK_EQ(kNALStartCode[3], layerInfo.pBsBuf[layer_len+3]); |
| 528 |
| 529 layer_len += layerInfo.pNalLengthInByte[nal]; |
| 530 } |
| 531 // Copy the entire layer's data (including NAL start codes). |
| 532 data->append(reinterpret_cast<char*>(layerInfo.pBsBuf), layer_len); |
| 533 } |
| 534 |
| 535 const bool is_key_frame = info.eFrameType == videoFrameTypeIDR; |
| 536 origin_task_runner_->PostTask( |
| 537 FROM_HERE, |
| 538 base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_, frame, |
| 539 base::Passed(&data), capture_timestamp, is_key_frame)); |
| 540 } |
| 541 |
| 542 void H264Encoder::ConfigureEncoderOnEncodingThread(const gfx::Size& size) { |
| 543 DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread()); |
| 544 ISVCEncoder* temp_encoder = nullptr; |
| 545 if (WelsCreateSVCEncoder(&temp_encoder) != 0) { |
| 546 NOTREACHED() << "Failed to create OpenH264 encoder"; |
| 547 return; |
| 548 } |
| 549 openh264_encoder_.reset(temp_encoder); |
| 550 configured_size_ = size; |
| 551 |
| 552 #if DCHECK_IS_ON() |
| 553 int trace_level = WELS_LOG_INFO; |
| 554 openh264_encoder_->SetOption(ENCODER_OPTION_TRACE_LEVEL, &trace_level); |
| 555 #endif |
| 556 |
| 557 SEncParamExt init_params; |
| 558 openh264_encoder_->GetDefaultParams(&init_params); |
| 559 init_params.iUsageType = CAMERA_VIDEO_REAL_TIME; |
| 560 |
| 561 DCHECK_EQ(AUTO_REF_PIC_COUNT, init_params.iNumRefFrame); |
| 562 DCHECK(!init_params.bSimulcastAVC); |
| 563 |
| 564 init_params.uiIntraPeriod = 100; // Same as for VpxEncoder. |
| 565 init_params.iPicWidth = size.width(); |
| 566 init_params.iPicHeight = size.height(); |
| 567 |
| 568 DCHECK_EQ(RC_QUALITY_MODE, init_params.iRCMode); |
| 569 DCHECK_EQ(0, init_params.iPaddingFlag); |
| 570 DCHECK_EQ(UNSPECIFIED_BIT_RATE, init_params.iTargetBitrate); |
| 571 DCHECK_EQ(UNSPECIFIED_BIT_RATE, init_params.iMaxBitrate); |
| 572 if (bits_per_second_ > 0) { |
| 573 init_params.iRCMode = RC_BITRATE_MODE; |
| 574 init_params.iTargetBitrate = bits_per_second_; |
| 575 } else { |
| 576 init_params.iRCMode = RC_OFF_MODE; |
| 577 } |
| 578 |
| 579 // Threading model: Set to 1 due to https://crbug.com/583348. |
| 580 init_params.iMultipleThreadIdc = 1; |
| 581 |
| 582 // TODO(mcasas): consider reducing complexity if there are few CPUs available. |
| 583 DCHECK_EQ(MEDIUM_COMPLEXITY, init_params.iComplexityMode); |
| 584 DCHECK(!init_params.bEnableDenoise); |
| 585 DCHECK(init_params.bEnableFrameSkip); |
| 586 |
| 587 // The base spatial layer 0 is the only one we use. |
| 588 DCHECK_EQ(1, init_params.iSpatialLayerNum); |
| 589 init_params.sSpatialLayers[0].iVideoWidth = init_params.iPicWidth; |
| 590 init_params.sSpatialLayers[0].iVideoHeight = init_params.iPicHeight; |
| 591 init_params.sSpatialLayers[0].iSpatialBitrate = init_params.iTargetBitrate; |
| 592 // Slice num according to number of threads. |
| 593 init_params.sSpatialLayers[0].sSliceCfg.uiSliceMode = SM_AUTO_SLICE; |
| 594 |
| 595 if (openh264_encoder_->InitializeExt(&init_params) != cmResultSuccess) { |
| 596 NOTREACHED() << "Failed to initialize OpenH264 encoder"; |
| 597 return; |
| 598 } |
| 599 |
| 600 int pixel_format = EVideoFormatType::videoFormatI420; |
| 601 openh264_encoder_->SetOption(ENCODER_OPTION_DATAFORMAT, &pixel_format); |
| 602 } |
| 603 #endif //#if BUILDFLAG(RTC_USE_H264) |
| 604 |
396 } // anonymous namespace | 605 } // anonymous namespace |
397 | 606 |
398 VideoTrackRecorder::VideoTrackRecorder( | 607 VideoTrackRecorder::VideoTrackRecorder( |
399 CodecId codec, | 608 CodecId codec, |
400 const blink::WebMediaStreamTrack& track, | 609 const blink::WebMediaStreamTrack& track, |
401 const OnEncodedVideoCB& on_encoded_video_callback, | 610 const OnEncodedVideoCB& on_encoded_video_callback, |
402 int32_t bits_per_second) | 611 int32_t bits_per_second) |
403 : track_(track), | 612 : track_(track) { |
404 encoder_(new VpxEncoder(codec == CodecId::VP9, | |
405 on_encoded_video_callback, | |
406 bits_per_second)) { | |
407 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | 613 DCHECK(main_render_thread_checker_.CalledOnValidThread()); |
408 DCHECK(!track_.isNull()); | 614 DCHECK(!track_.isNull()); |
409 DCHECK(track_.getExtraData()); | 615 DCHECK(track_.getExtraData()); |
410 | 616 |
| 617 switch (codec) { |
| 618 #if BUILDFLAG(RTC_USE_H264) |
| 619 case CodecId::H264: |
| 620 encoder_ = new H264Encoder(on_encoded_video_callback, bits_per_second); |
| 621 break; |
| 622 #endif |
| 623 case CodecId::VP8: |
| 624 case CodecId::VP9: |
| 625 encoder_ = new VpxEncoder(codec == CodecId::VP9, |
| 626 on_encoded_video_callback, bits_per_second); |
| 627 break; |
| 628 default: |
| 629 NOTREACHED() << "Unsupported codec"; |
| 630 } |
| 631 |
411 // StartFrameEncode() will be called on Render IO thread. | 632 // StartFrameEncode() will be called on Render IO thread. |
412 MediaStreamVideoSink::ConnectToTrack( | 633 MediaStreamVideoSink::ConnectToTrack( |
413 track_, | 634 track_, |
414 base::Bind(&VideoTrackRecorder::Encoder::StartFrameEncode, encoder_)); | 635 base::Bind(&VideoTrackRecorder::Encoder::StartFrameEncode, encoder_)); |
415 } | 636 } |
416 | 637 |
417 VideoTrackRecorder::~VideoTrackRecorder() { | 638 VideoTrackRecorder::~VideoTrackRecorder() { |
418 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | 639 DCHECK(main_render_thread_checker_.CalledOnValidThread()); |
419 MediaStreamVideoSink::DisconnectFromTrack(); | 640 MediaStreamVideoSink::DisconnectFromTrack(); |
420 track_.reset(); | 641 track_.reset(); |
421 } | 642 } |
422 | 643 |
423 void VideoTrackRecorder::Pause() { | 644 void VideoTrackRecorder::Pause() { |
424 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | 645 DCHECK(main_render_thread_checker_.CalledOnValidThread()); |
425 DCHECK(encoder_); | 646 DCHECK(encoder_); |
426 encoder_->set_paused(true); | 647 encoder_->SetPaused(true); |
427 } | 648 } |
428 | 649 |
429 void VideoTrackRecorder::Resume() { | 650 void VideoTrackRecorder::Resume() { |
430 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | 651 DCHECK(main_render_thread_checker_.CalledOnValidThread()); |
431 DCHECK(encoder_); | 652 DCHECK(encoder_); |
432 encoder_->set_paused(false); | 653 encoder_->SetPaused(false); |
433 } | 654 } |
434 | 655 |
435 void VideoTrackRecorder::OnVideoFrameForTesting( | 656 void VideoTrackRecorder::OnVideoFrameForTesting( |
436 const scoped_refptr<media::VideoFrame>& frame, | 657 const scoped_refptr<media::VideoFrame>& frame, |
437 base::TimeTicks timestamp) { | 658 base::TimeTicks timestamp) { |
438 encoder_->StartFrameEncode(frame, timestamp); | 659 encoder_->StartFrameEncode(frame, timestamp); |
439 } | 660 } |
440 | 661 |
441 } // namespace content | 662 } // namespace content |
OLD | NEW |