Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(350)

Side by Side Diff: content/renderer/media_recorder/video_track_recorder.cc

Issue 2691373005: Support alpha channel recording for VPX in MediaRecorder (Closed)
Patch Set: Created 3 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media_recorder/video_track_recorder.h" 5 #include "content/renderer/media_recorder/video_track_recorder.h"
6 6
7 #include <utility> 7 #include <utility>
8 8
9 #include "base/bind.h" 9 #include "base/bind.h"
10 #include "base/logging.h" 10 #include "base/logging.h"
(...skipping 187 matching lines...) Expand 10 before | Expand all | Expand 10 after
198 bits_per_second_(bits_per_second) { 198 bits_per_second_(bits_per_second) {
199 DCHECK(!on_encoded_video_callback_.is_null()); 199 DCHECK(!on_encoded_video_callback_.is_null());
200 if (encoding_task_runner_) 200 if (encoding_task_runner_)
201 return; 201 return;
202 encoding_thread_.reset(new base::Thread("EncodingThread")); 202 encoding_thread_.reset(new base::Thread("EncodingThread"));
203 encoding_thread_->Start(); 203 encoding_thread_->Start();
204 encoding_task_runner_ = encoding_thread_->task_runner(); 204 encoding_task_runner_ = encoding_thread_->task_runner();
205 } 205 }
206 206
207 // Start encoding |frame|, returning via |on_encoded_video_callback_|. This 207 // Start encoding |frame|, returning via |on_encoded_video_callback_|. This
208 // call will also trigger a ConfigureEncoderOnEncodingTaskRunner() upon first 208 // call will also trigger an encode configuration upon first frame arrival
209 // frame arrival or parameter change, and an EncodeOnEncodingTaskRunner() to 209 // or parameter change, and an EncodeOnEncodingTaskRunner() to actually
210 // actually encode the frame. If the |frame|'s data is not directly available 210 // encode the frame. If the |frame|'s data is not directly available (e.g.
211 // (e.g. it's a texture) then RetrieveFrameOnMainThread() is called, and if 211 // it's a texture) then RetrieveFrameOnMainThread() is called, and if even
212 // even that fails, black frames are sent instead. 212 // that fails, black frames are sent instead.
213 void StartFrameEncode(const scoped_refptr<VideoFrame>& frame, 213 void StartFrameEncode(const scoped_refptr<VideoFrame>& frame,
214 base::TimeTicks capture_timestamp); 214 base::TimeTicks capture_timestamp);
215 void RetrieveFrameOnMainThread(const scoped_refptr<VideoFrame>& video_frame, 215 void RetrieveFrameOnMainThread(const scoped_refptr<VideoFrame>& video_frame,
216 base::TimeTicks capture_timestamp); 216 base::TimeTicks capture_timestamp);
217 217
218 void SetPaused(bool paused); 218 void SetPaused(bool paused);
219 virtual bool CanEncodeAlphaChannel() { return false; }
219 220
220 protected: 221 protected:
221 friend class base::RefCountedThreadSafe<Encoder>; 222 friend class base::RefCountedThreadSafe<Encoder>;
222 virtual ~Encoder() { 223 virtual ~Encoder() {
223 main_task_runner_->DeleteSoon(FROM_HERE, video_renderer_.release()); 224 main_task_runner_->DeleteSoon(FROM_HERE, video_renderer_.release());
224 } 225 }
225 226
226 virtual void EncodeOnEncodingTaskRunner( 227 virtual void EncodeOnEncodingTaskRunner(
227 scoped_refptr<VideoFrame> frame, 228 scoped_refptr<VideoFrame> frame,
228 base::TimeTicks capture_timestamp) = 0; 229 base::TimeTicks capture_timestamp) = 0;
229 virtual void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) = 0;
230 230
231 // Used to shutdown properly on the same thread we were created. 231 // Used to shutdown properly on the same thread we were created.
232 const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_; 232 const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
233 233
234 // Task runner where frames to encode and reply callbacks must happen. 234 // Task runner where frames to encode and reply callbacks must happen.
235 scoped_refptr<base::SingleThreadTaskRunner> origin_task_runner_; 235 scoped_refptr<base::SingleThreadTaskRunner> origin_task_runner_;
236 236
237 // Task runner where encoding interactions happen. 237 // Task runner where encoding interactions happen.
238 scoped_refptr<base::SingleThreadTaskRunner> encoding_task_runner_; 238 scoped_refptr<base::SingleThreadTaskRunner> encoding_task_runner_;
239 239
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
276 } 276 }
277 277
278 if (video_frame->HasTextures()) { 278 if (video_frame->HasTextures()) {
279 main_task_runner_->PostTask( 279 main_task_runner_->PostTask(
280 FROM_HERE, base::Bind(&Encoder::RetrieveFrameOnMainThread, this, 280 FROM_HERE, base::Bind(&Encoder::RetrieveFrameOnMainThread, this,
281 video_frame, capture_timestamp)); 281 video_frame, capture_timestamp));
282 return; 282 return;
283 } 283 }
284 284
285 scoped_refptr<media::VideoFrame> frame = video_frame; 285 scoped_refptr<media::VideoFrame> frame = video_frame;
286 // Drop alpha channel since we do not support it yet. 286 // Drop alpha channel if |encoder does not support it yet.
vignesh 2017/03/08 17:26:14 Supernit: accidental |.
emircan 2017/03/08 23:53:58 Done.
287 if (frame->format() == media::PIXEL_FORMAT_YV12A) 287 if (!CanEncodeAlphaChannel() && frame->format() == media::PIXEL_FORMAT_YV12A)
288 frame = media::WrapAsI420VideoFrame(video_frame); 288 frame = media::WrapAsI420VideoFrame(video_frame);
289 289
290 encoding_task_runner_->PostTask( 290 encoding_task_runner_->PostTask(
291 FROM_HERE, base::Bind(&Encoder::EncodeOnEncodingTaskRunner, this, frame, 291 FROM_HERE, base::Bind(&Encoder::EncodeOnEncodingTaskRunner, this, frame,
292 capture_timestamp)); 292 capture_timestamp));
293 } 293 }
294 294
295 void VideoTrackRecorder::Encoder::RetrieveFrameOnMainThread( 295 void VideoTrackRecorder::Encoder::RetrieveFrameOnMainThread(
296 const scoped_refptr<VideoFrame>& video_frame, 296 const scoped_refptr<VideoFrame>& video_frame,
297 base::TimeTicks capture_timestamp) { 297 base::TimeTicks capture_timestamp) {
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after
391 CHECK_EQ(ret, VPX_CODEC_OK); 391 CHECK_EQ(ret, VPX_CODEC_OK);
392 delete codec; 392 delete codec;
393 } 393 }
394 }; 394 };
395 typedef std::unique_ptr<vpx_codec_ctx_t, VpxCodecDeleter> ScopedVpxCodecCtxPtr; 395 typedef std::unique_ptr<vpx_codec_ctx_t, VpxCodecDeleter> ScopedVpxCodecCtxPtr;
396 396
397 static void OnFrameEncodeCompleted( 397 static void OnFrameEncodeCompleted(
398 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_cb, 398 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_cb,
399 const media::WebmMuxer::VideoParameters& params, 399 const media::WebmMuxer::VideoParameters& params,
400 std::unique_ptr<std::string> data, 400 std::unique_ptr<std::string> data,
401 std::unique_ptr<std::string> alpha_data,
401 base::TimeTicks capture_timestamp, 402 base::TimeTicks capture_timestamp,
402 bool keyframe) { 403 bool keyframe) {
403 DVLOG(1) << (keyframe ? "" : "non ") << "keyframe "<< data->length() << "B, " 404 DVLOG(1) << (keyframe ? "" : "non ") << "keyframe "<< data->length() << "B, "
404 << capture_timestamp << " ms"; 405 << capture_timestamp << " ms";
405 on_encoded_video_cb.Run(params, std::move(data), capture_timestamp, keyframe); 406 on_encoded_video_cb.Run(params, std::move(data), std::move(alpha_data),
407 capture_timestamp, keyframe);
406 } 408 }
407 409
408 static int GetNumberOfThreadsForEncoding() { 410 static int GetNumberOfThreadsForEncoding() {
409 // Do not saturate CPU utilization just for encoding. On a lower-end system 411 // Do not saturate CPU utilization just for encoding. On a lower-end system
410 // with only 1 or 2 cores, use only one thread for encoding. On systems with 412 // with only 1 or 2 cores, use only one thread for encoding. On systems with
411 // more cores, allow half of the cores to be used for encoding. 413 // more cores, allow half of the cores to be used for encoding.
412 return std::min(8, (base::SysInfo::NumberOfProcessors() + 1) / 2); 414 return std::min(8, (base::SysInfo::NumberOfProcessors() + 1) / 2);
413 } 415 }
414 416
415 // Class encapsulating VideoEncodeAccelerator interactions. 417 // Class encapsulating VideoEncodeAccelerator interactions.
(...skipping 24 matching lines...) Expand all
440 using VideoParamsAndTimestamp = 442 using VideoParamsAndTimestamp =
441 std::pair<media::WebmMuxer::VideoParameters, base::TimeTicks>; 443 std::pair<media::WebmMuxer::VideoParameters, base::TimeTicks>;
442 444
443 void UseOutputBitstreamBufferId(int32_t bitstream_buffer_id); 445 void UseOutputBitstreamBufferId(int32_t bitstream_buffer_id);
444 void FrameFinished(std::unique_ptr<base::SharedMemory> shm); 446 void FrameFinished(std::unique_ptr<base::SharedMemory> shm);
445 447
446 // VideoTrackRecorder::Encoder implementation. 448 // VideoTrackRecorder::Encoder implementation.
447 ~VEAEncoder() override; 449 ~VEAEncoder() override;
448 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame, 450 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame,
449 base::TimeTicks capture_timestamp) override; 451 base::TimeTicks capture_timestamp) override;
450 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) override; 452
453 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size);
451 454
452 media::GpuVideoAcceleratorFactories* const gpu_factories_; 455 media::GpuVideoAcceleratorFactories* const gpu_factories_;
453 456
454 const media::VideoCodecProfile codec_; 457 const media::VideoCodecProfile codec_;
455 458
456 // The underlying VEA to perform encoding on. 459 // The underlying VEA to perform encoding on.
457 std::unique_ptr<media::VideoEncodeAccelerator> video_encoder_; 460 std::unique_ptr<media::VideoEncodeAccelerator> video_encoder_;
458 461
459 // Shared memory buffers for output with the VEA. 462 // Shared memory buffers for output with the VEA.
460 std::vector<std::unique_ptr<base::SharedMemory>> output_buffers_; 463 std::vector<std::unique_ptr<base::SharedMemory>> output_buffers_;
(...skipping 26 matching lines...) Expand all
487 VpxEncoder( 490 VpxEncoder(
488 bool use_vp9, 491 bool use_vp9,
489 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, 492 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback,
490 int32_t bits_per_second); 493 int32_t bits_per_second);
491 494
492 private: 495 private:
493 // VideoTrackRecorder::Encoder implementation. 496 // VideoTrackRecorder::Encoder implementation.
494 ~VpxEncoder() override; 497 ~VpxEncoder() override;
495 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame, 498 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame,
496 base::TimeTicks capture_timestamp) override; 499 base::TimeTicks capture_timestamp) override;
497 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) override; 500 bool CanEncodeAlphaChannel() override { return true; }
498 501
499 // Returns true if |codec_config_| has been filled in at least once. 502 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size,
500 bool IsInitialized() const; 503 vpx_codec_enc_cfg_t* codec_config,
504 ScopedVpxCodecCtxPtr* encoder);
505 void DoEncode(vpx_codec_ctx_t* encoder,
506 const gfx::Size& frame_size,
507 uint8_t* const data,
508 uint8_t* const y_plane,
509 int y_stride,
510 uint8_t* const u_plane,
511 int u_stride,
512 uint8_t* const v_plane,
513 int v_stride,
514 const base::TimeDelta& duration,
515 bool force_keyframe,
516 std::string* output_data,
517 bool* keyframe);
518
519 // Returns true if |codec_config| has been filled in at least once.
520 bool IsInitialized(const vpx_codec_enc_cfg_t& codec_config) const;
501 521
502 // Estimate the frame duration from |frame| and |last_frame_timestamp_|. 522 // Estimate the frame duration from |frame| and |last_frame_timestamp_|.
503 base::TimeDelta EstimateFrameDuration(const scoped_refptr<VideoFrame>& frame); 523 base::TimeDelta EstimateFrameDuration(const scoped_refptr<VideoFrame>& frame);
504 524
505 // Force usage of VP9 for encoding, instead of VP8 which is the default. 525 // Force usage of VP9 for encoding, instead of VP8 which is the default.
506 const bool use_vp9_; 526 const bool use_vp9_;
507 527
508 // VPx internal objects: configuration and encoder. |encoder_| is a special 528 // VPx internal objects: configuration and encoder. |encoder_| is a special
509 // scoped pointer to guarantee proper destruction, particularly when 529 // scoped pointer to guarantee proper destruction, particularly when
510 // reconfiguring due to parameters change. Only used on |encoding_thread_|. 530 // reconfiguring due to parameters change. Only used on |encoding_thread_|.
511 vpx_codec_enc_cfg_t codec_config_; 531 vpx_codec_enc_cfg_t codec_config_;
512 ScopedVpxCodecCtxPtr encoder_; 532 ScopedVpxCodecCtxPtr encoder_;
513 533
534 vpx_codec_enc_cfg_t alpha_codec_config_;
535 ScopedVpxCodecCtxPtr alpha_encoder_;
536
537 std::vector<uint8_t> alpha_dummy_planes_;
538 size_t v_plane_offset_;
539 size_t u_plane_stride_;
540 size_t v_plane_stride_;
541 bool last_frame_alpha_ = false;
542
514 // The |VideoFrame::timestamp()| of the last encoded frame. This is used to 543 // The |VideoFrame::timestamp()| of the last encoded frame. This is used to
515 // predict the duration of the next frame. Only used on |encoding_thread_|. 544 // predict the duration of the next frame. Only used on |encoding_thread_|.
516 base::TimeDelta last_frame_timestamp_; 545 base::TimeDelta last_frame_timestamp_;
517 546
518 DISALLOW_COPY_AND_ASSIGN(VpxEncoder); 547 DISALLOW_COPY_AND_ASSIGN(VpxEncoder);
519 }; 548 };
520 549
521 #if BUILDFLAG(RTC_USE_H264) 550 #if BUILDFLAG(RTC_USE_H264)
522 551
523 struct ISVCEncoderDeleter { 552 struct ISVCEncoderDeleter {
(...skipping 15 matching lines...) Expand all
539 568
540 H264Encoder( 569 H264Encoder(
541 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, 570 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback,
542 int32_t bits_per_second); 571 int32_t bits_per_second);
543 572
544 private: 573 private:
545 // VideoTrackRecorder::Encoder implementation. 574 // VideoTrackRecorder::Encoder implementation.
546 ~H264Encoder() override; 575 ~H264Encoder() override;
547 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame, 576 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame,
548 base::TimeTicks capture_timestamp) override; 577 base::TimeTicks capture_timestamp) override;
549 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) override; 578
579 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size);
550 580
551 // |openh264_encoder_| is a special scoped pointer to guarantee proper 581 // |openh264_encoder_| is a special scoped pointer to guarantee proper
552 // destruction, also when reconfiguring due to parameters change. Only used on 582 // destruction, also when reconfiguring due to parameters change. Only used on
553 // |encoding_thread_|. 583 // |encoding_thread_|.
554 gfx::Size configured_size_; 584 gfx::Size configured_size_;
555 ScopedISVCEncoderPtr openh264_encoder_; 585 ScopedISVCEncoderPtr openh264_encoder_;
556 586
557 // The |VideoFrame::timestamp()| of the first received frame. Only used on 587 // The |VideoFrame::timestamp()| of the first received frame. Only used on
558 // |encoding_thread_|. 588 // |encoding_thread_|.
559 base::TimeTicks first_frame_timestamp_; 589 base::TimeTicks first_frame_timestamp_;
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
621 base::SharedMemory* output_buffer = 651 base::SharedMemory* output_buffer =
622 output_buffers_[bitstream_buffer_id].get(); 652 output_buffers_[bitstream_buffer_id].get();
623 653
624 std::unique_ptr<std::string> data(new std::string); 654 std::unique_ptr<std::string> data(new std::string);
625 data->append(reinterpret_cast<char*>(output_buffer->memory()), payload_size); 655 data->append(reinterpret_cast<char*>(output_buffer->memory()), payload_size);
626 656
627 const auto front_frame = frames_in_encode_.front(); 657 const auto front_frame = frames_in_encode_.front();
628 frames_in_encode_.pop(); 658 frames_in_encode_.pop();
629 origin_task_runner_->PostTask( 659 origin_task_runner_->PostTask(
630 FROM_HERE, base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_, 660 FROM_HERE, base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_,
631 front_frame.first, base::Passed(&data), 661 front_frame.first, base::Passed(&data), nullptr,
632 front_frame.second, keyframe)); 662 front_frame.second, keyframe));
633 UseOutputBitstreamBufferId(bitstream_buffer_id); 663 UseOutputBitstreamBufferId(bitstream_buffer_id);
634 } 664 }
635 665
636 void VEAEncoder::NotifyError(media::VideoEncodeAccelerator::Error error) { 666 void VEAEncoder::NotifyError(media::VideoEncodeAccelerator::Error error) {
637 DVLOG(3) << __func__; 667 DVLOG(3) << __func__;
638 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 668 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
639 669
640 // TODO(emircan): Notify the owner via a callback. 670 // TODO(emircan): Notify the owner via a callback.
641 error_notified_ = true; 671 error_notified_ = true;
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
790 base::Passed(&encoder_))); 820 base::Passed(&encoder_)));
791 } 821 }
792 822
793 void VpxEncoder::EncodeOnEncodingTaskRunner( 823 void VpxEncoder::EncodeOnEncodingTaskRunner(
794 scoped_refptr<VideoFrame> frame, 824 scoped_refptr<VideoFrame> frame,
795 base::TimeTicks capture_timestamp) { 825 base::TimeTicks capture_timestamp) {
796 TRACE_EVENT0("video", "VpxEncoder::EncodeOnEncodingTaskRunner"); 826 TRACE_EVENT0("video", "VpxEncoder::EncodeOnEncodingTaskRunner");
797 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 827 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
798 828
799 const gfx::Size frame_size = frame->visible_rect().size(); 829 const gfx::Size frame_size = frame->visible_rect().size();
800 if (!IsInitialized() || 830 const base::TimeDelta duration = EstimateFrameDuration(frame);
831 const media::WebmMuxer::VideoParameters video_params(frame);
832 bool force_keyframe = false;
833
834 if (!IsInitialized(codec_config_) ||
801 gfx::Size(codec_config_.g_w, codec_config_.g_h) != frame_size) { 835 gfx::Size(codec_config_.g_w, codec_config_.g_h) != frame_size) {
802 ConfigureEncoderOnEncodingTaskRunner(frame_size); 836 ConfigureEncoderOnEncodingTaskRunner(frame_size, &codec_config_, &encoder_);
803 } 837 }
838
839 if (frame->format() == media::PIXEL_FORMAT_YV12A) {
840 if (!IsInitialized(alpha_codec_config_) ||
841 gfx::Size(alpha_codec_config_.g_w, alpha_codec_config_.g_h) !=
842 frame_size) {
843 ConfigureEncoderOnEncodingTaskRunner(frame_size, &alpha_codec_config_,
844 &alpha_encoder_);
845 // It is more expensive to encode 0x00, so use 0x80 instead.
vignesh 2017/03/08 17:26:14 nit: Move this line just above the std::fill below
emircan 2017/03/08 23:53:58 Done.
846 u_plane_stride_ = media::VideoFrame::RowBytes(
847 VideoFrame::kUPlane, frame->format(), frame_size.width());
848 v_plane_stride_ = media::VideoFrame::RowBytes(
849 VideoFrame::kVPlane, frame->format(), frame_size.width());
850 v_plane_offset_ = media::VideoFrame::PlaneSize(
851 frame->format(), VideoFrame::kUPlane, frame_size)
852 .GetArea();
853 alpha_dummy_planes_.resize(
854 v_plane_offset_ + media::VideoFrame::PlaneSize(frame->format(),
855 VideoFrame::kVPlane,
856 frame_size)
857 .GetArea());
858 std::fill(alpha_dummy_planes_.begin(), alpha_dummy_planes_.end(), 128);
vignesh 2017/03/08 17:26:14 nit: can just use 0x80 here and avoid the mental c
emircan 2017/03/08 23:53:58 Done.
859 }
860 // If we introduced a new alpha frame, force keyframe on.
861 if (!last_frame_alpha_)
862 force_keyframe = true;
863 last_frame_alpha_ = true;
864 } else {
865 last_frame_alpha_ = false;
866 }
867
868 std::unique_ptr<std::string> data(new std::string);
869 bool keyframe = false;
870 DoEncode(encoder_.get(), frame_size, frame->data(VideoFrame::kYPlane),
871 frame->visible_data(VideoFrame::kYPlane),
872 frame->stride(VideoFrame::kYPlane),
873 frame->visible_data(VideoFrame::kUPlane),
874 frame->stride(VideoFrame::kUPlane),
875 frame->visible_data(VideoFrame::kVPlane),
876 frame->stride(VideoFrame::kVPlane), duration, force_keyframe,
877 data.get(), &keyframe);
878
879 std::unique_ptr<std::string> alpha_data(new std::string);
880 if (frame->format() == media::PIXEL_FORMAT_YV12A) {
881 if (!IsInitialized(alpha_codec_config_) ||
vignesh 2017/03/08 17:26:14 Why is this initialization block needed? The same
emircan 2017/03/08 23:53:58 Ouch, I missed it after a refactor. Removing this
882 gfx::Size(alpha_codec_config_.g_w, alpha_codec_config_.g_h) !=
883 frame_size) {
884 ConfigureEncoderOnEncodingTaskRunner(frame_size, &alpha_codec_config_,
885 &alpha_encoder_);
886 // It is more expensive to encode 0x00, so use 0x80 instead.
887 u_plane_stride_ = media::VideoFrame::RowBytes(
888 VideoFrame::kUPlane, frame->format(), frame_size.width());
889 v_plane_stride_ = media::VideoFrame::RowBytes(
890 VideoFrame::kVPlane, frame->format(), frame_size.width());
891 v_plane_offset_ = media::VideoFrame::PlaneSize(
892 frame->format(), VideoFrame::kUPlane, frame_size)
893 .GetArea();
894 alpha_dummy_planes_.resize(
895 v_plane_offset_ + media::VideoFrame::PlaneSize(frame->format(),
896 VideoFrame::kVPlane,
897 frame_size)
898 .GetArea());
899 std::fill(alpha_dummy_planes_.begin(), alpha_dummy_planes_.end(), 128);
900 }
901 bool alpha_keyframe = false;
902 DoEncode(alpha_encoder_.get(), frame_size, frame->data(VideoFrame::kAPlane),
903 frame->visible_data(VideoFrame::kAPlane),
904 frame->stride(VideoFrame::kAPlane), alpha_dummy_planes_.data(),
905 u_plane_stride_, alpha_dummy_planes_.data() + v_plane_offset_,
906 v_plane_stride_, duration, keyframe, alpha_data.get(),
907 &alpha_keyframe);
908 DCHECK_EQ(keyframe, alpha_keyframe);
909 }
910 frame = nullptr;
911
912 origin_task_runner_->PostTask(
913 FROM_HERE,
914 base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_,
915 video_params, base::Passed(&data), base::Passed(&alpha_data),
916 capture_timestamp, keyframe));
917 }
918
919 void VpxEncoder::DoEncode(vpx_codec_ctx_t* encoder,
vignesh 2017/03/08 17:26:14 nit: *const ?
emircan 2017/03/08 23:53:58 Done.
920 const gfx::Size& frame_size,
921 uint8_t* const data,
922 uint8_t* const y_plane,
923 int y_stride,
vignesh 2017/03/08 17:26:14 nit: const. ditto for other strides & |force_keyf
emircan 2017/03/08 23:53:58 AFAIK we dont use const for primitive function par
vignesh 2017/03/09 07:13:46 Acknowledged.
924 uint8_t* const u_plane,
925 int u_stride,
926 uint8_t* const v_plane,
927 int v_stride,
928 const base::TimeDelta& duration,
929 bool force_keyframe,
930 std::string* output_data,
931 bool* keyframe) {
vignesh 2017/03/08 17:26:14 nit: *const Ditto in the line above.
emircan 2017/03/08 23:53:58 Done.
932 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
804 933
805 vpx_image_t vpx_image; 934 vpx_image_t vpx_image;
806 vpx_image_t* const result = vpx_img_wrap(&vpx_image, 935 vpx_image_t* const result =
807 VPX_IMG_FMT_I420, 936 vpx_img_wrap(&vpx_image, VPX_IMG_FMT_I420, frame_size.width(),
808 frame_size.width(), 937 frame_size.height(), 1 /* align */, data);
809 frame_size.height(),
810 1 /* align */,
811 frame->data(VideoFrame::kYPlane));
812 DCHECK_EQ(result, &vpx_image); 938 DCHECK_EQ(result, &vpx_image);
813 vpx_image.planes[VPX_PLANE_Y] = frame->visible_data(VideoFrame::kYPlane); 939 vpx_image.planes[VPX_PLANE_Y] = y_plane;
814 vpx_image.planes[VPX_PLANE_U] = frame->visible_data(VideoFrame::kUPlane); 940 vpx_image.planes[VPX_PLANE_U] = u_plane;
815 vpx_image.planes[VPX_PLANE_V] = frame->visible_data(VideoFrame::kVPlane); 941 vpx_image.planes[VPX_PLANE_V] = v_plane;
816 vpx_image.stride[VPX_PLANE_Y] = frame->stride(VideoFrame::kYPlane); 942 vpx_image.stride[VPX_PLANE_Y] = y_stride;
817 vpx_image.stride[VPX_PLANE_U] = frame->stride(VideoFrame::kUPlane); 943 vpx_image.stride[VPX_PLANE_U] = u_stride;
818 vpx_image.stride[VPX_PLANE_V] = frame->stride(VideoFrame::kVPlane); 944 vpx_image.stride[VPX_PLANE_V] = v_stride;
819 945
820 const base::TimeDelta duration = EstimateFrameDuration(frame); 946 const vpx_codec_flags_t flags = force_keyframe ? VPX_EFLAG_FORCE_KF : 0;
821 // Encode the frame. The presentation time stamp argument here is fixed to 947 // Encode the frame. The presentation time stamp argument here is fixed to
822 // zero to force the encoder to base its single-frame bandwidth calculations 948 // zero to force the encoder to base its single-frame bandwidth calculations
823 // entirely on |predicted_frame_duration|. 949 // entirely on |predicted_frame_duration|.
824 const vpx_codec_err_t ret = vpx_codec_encode(encoder_.get(), 950 const vpx_codec_err_t ret =
825 &vpx_image, 951 vpx_codec_encode(encoder, &vpx_image, 0 /* pts */,
826 0 /* pts */, 952 duration.InMicroseconds(), flags, VPX_DL_REALTIME);
827 duration.InMicroseconds(), 953 DCHECK_EQ(ret, VPX_CODEC_OK)
828 0 /* flags */, 954 << vpx_codec_err_to_string(ret) << ", #" << vpx_codec_error(encoder)
829 VPX_DL_REALTIME); 955 << " -" << vpx_codec_error_detail(encoder);
830 DCHECK_EQ(ret, VPX_CODEC_OK) << vpx_codec_err_to_string(ret) << ", #" 956
831 << vpx_codec_error(encoder_.get()) << " -" 957 *keyframe = false;
832 << vpx_codec_error_detail(encoder_.get());
833
834 const media::WebmMuxer::VideoParameters video_params(frame);
835 frame = nullptr;
836
837 std::unique_ptr<std::string> data(new std::string);
838 bool keyframe = false;
839 vpx_codec_iter_t iter = NULL; 958 vpx_codec_iter_t iter = NULL;
840 const vpx_codec_cx_pkt_t* pkt = NULL; 959 const vpx_codec_cx_pkt_t* pkt = NULL;
841 while ((pkt = vpx_codec_get_cx_data(encoder_.get(), &iter)) != NULL) { 960 while ((pkt = vpx_codec_get_cx_data(encoder, &iter)) != NULL) {
842 if (pkt->kind != VPX_CODEC_CX_FRAME_PKT) 961 if (pkt->kind != VPX_CODEC_CX_FRAME_PKT)
843 continue; 962 continue;
844 data->assign(static_cast<char*>(pkt->data.frame.buf), pkt->data.frame.sz); 963 output_data->assign(static_cast<char*>(pkt->data.frame.buf),
845 keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0; 964 pkt->data.frame.sz);
965 *keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0;
846 break; 966 break;
847 } 967 }
848 origin_task_runner_->PostTask(FROM_HERE,
849 base::Bind(OnFrameEncodeCompleted,
850 on_encoded_video_callback_,
851 video_params,
852 base::Passed(&data),
853 capture_timestamp,
854 keyframe));
855 } 968 }
856 969
857 void VpxEncoder::ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) { 970 void VpxEncoder::ConfigureEncoderOnEncodingTaskRunner(
971 const gfx::Size& size,
972 vpx_codec_enc_cfg_t* codec_config,
973 ScopedVpxCodecCtxPtr* encoder) {
858 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 974 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
859 if (IsInitialized()) { 975 if (IsInitialized(*codec_config)) {
860 // TODO(mcasas) VP8 quirk/optimisation: If the new |size| is strictly less- 976 // TODO(mcasas) VP8 quirk/optimisation: If the new |size| is strictly less-
861 // than-or-equal than the old size, in terms of area, the existing encoder 977 // than-or-equal than the old size, in terms of area, the existing encoder
862 // instance could be reused after changing |codec_config_.{g_w,g_h}|. 978 // instance could be reused after changing |codec_config->{g_w,g_h}|.
863 DVLOG(1) << "Destroying/Re-Creating encoder for new frame size: " 979 DVLOG(1) << "Destroying/Re-Creating encoder for new frame size: "
864 << gfx::Size(codec_config_.g_w, codec_config_.g_h).ToString() 980 << gfx::Size(codec_config->g_w, codec_config->g_h).ToString()
865 << " --> " << size.ToString() << (use_vp9_ ? " vp9" : " vp8"); 981 << " --> " << size.ToString() << (use_vp9_ ? " vp9" : " vp8");
866 encoder_.reset(); 982 encoder->reset();
867 } 983 }
868 984
869 const vpx_codec_iface_t* codec_interface = 985 const vpx_codec_iface_t* codec_interface =
870 use_vp9_ ? vpx_codec_vp9_cx() : vpx_codec_vp8_cx(); 986 use_vp9_ ? vpx_codec_vp9_cx() : vpx_codec_vp8_cx();
871 vpx_codec_err_t result = vpx_codec_enc_config_default( 987 vpx_codec_err_t result = vpx_codec_enc_config_default(
872 codec_interface, &codec_config_, 0 /* reserved */); 988 codec_interface, codec_config, 0 /* reserved */);
873 DCHECK_EQ(VPX_CODEC_OK, result); 989 DCHECK_EQ(VPX_CODEC_OK, result);
874 990
875 DCHECK_EQ(320u, codec_config_.g_w); 991 DCHECK_EQ(320u, codec_config->g_w);
876 DCHECK_EQ(240u, codec_config_.g_h); 992 DCHECK_EQ(240u, codec_config->g_h);
877 DCHECK_EQ(256u, codec_config_.rc_target_bitrate); 993 DCHECK_EQ(256u, codec_config->rc_target_bitrate);
878 // Use the selected bitrate or adjust default bit rate to account for the 994 // Use the selected bitrate or adjust default bit rate to account for the
879 // actual size. Note: |rc_target_bitrate| units are kbit per second. 995 // actual size. Note: |rc_target_bitrate| units are kbit per second.
880 if (bits_per_second_ > 0) { 996 if (bits_per_second_ > 0) {
881 codec_config_.rc_target_bitrate = bits_per_second_ / 1000; 997 codec_config->rc_target_bitrate = bits_per_second_ / 1000;
882 } else { 998 } else {
883 codec_config_.rc_target_bitrate = size.GetArea() * 999 codec_config->rc_target_bitrate = size.GetArea() *
884 codec_config_.rc_target_bitrate / 1000 codec_config->rc_target_bitrate /
885 codec_config_.g_w / codec_config_.g_h; 1001 codec_config->g_w / codec_config->g_h;
886 } 1002 }
887 // Both VP8/VP9 configuration should be Variable BitRate by default. 1003 // Both VP8/VP9 configuration should be Variable BitRate by default.
888 DCHECK_EQ(VPX_VBR, codec_config_.rc_end_usage); 1004 DCHECK_EQ(VPX_VBR, codec_config->rc_end_usage);
889 if (use_vp9_) { 1005 if (use_vp9_) {
890 // Number of frames to consume before producing output. 1006 // Number of frames to consume before producing output.
891 codec_config_.g_lag_in_frames = 0; 1007 codec_config->g_lag_in_frames = 0;
892 1008
893 // DCHECK that the profile selected by default is I420 (magic number 0). 1009 // DCHECK that the profile selected by default is I420 (magic number 0).
894 DCHECK_EQ(0u, codec_config_.g_profile); 1010 DCHECK_EQ(0u, codec_config->g_profile);
895 } else { 1011 } else {
896 // VP8 always produces frames instantaneously. 1012 // VP8 always produces frames instantaneously.
897 DCHECK_EQ(0u, codec_config_.g_lag_in_frames); 1013 DCHECK_EQ(0u, codec_config->g_lag_in_frames);
898 } 1014 }
899 1015
900 DCHECK(size.width()); 1016 DCHECK(size.width());
901 DCHECK(size.height()); 1017 DCHECK(size.height());
902 codec_config_.g_w = size.width(); 1018 codec_config->g_w = size.width();
903 codec_config_.g_h = size.height(); 1019 codec_config->g_h = size.height();
904 codec_config_.g_pass = VPX_RC_ONE_PASS; 1020 codec_config->g_pass = VPX_RC_ONE_PASS;
905 1021
906 // Timebase is the smallest interval used by the stream, can be set to the 1022 // Timebase is the smallest interval used by the stream, can be set to the
907 // frame rate or to e.g. microseconds. 1023 // frame rate or to e.g. microseconds.
908 codec_config_.g_timebase.num = 1; 1024 codec_config->g_timebase.num = 1;
909 codec_config_.g_timebase.den = base::Time::kMicrosecondsPerSecond; 1025 codec_config->g_timebase.den = base::Time::kMicrosecondsPerSecond;
910 1026
911 // Let the encoder decide where to place the Keyframes, between min and max. 1027 // Let the encoder decide where to place the Keyframes, between min and max.
912 // In VPX_KF_AUTO mode libvpx will sometimes emit keyframes regardless of min/ 1028 // In VPX_KF_AUTO mode libvpx will sometimes emit keyframes regardless of min/
913 // max distance out of necessity. 1029 // max distance out of necessity.
914 // Note that due to http://crbug.com/440223, it might be necessary to force a 1030 // Note that due to http://crbug.com/440223, it might be necessary to force a
915 // key frame after 10,000frames since decoding fails after 30,000 non-key 1031 // key frame after 10,000frames since decoding fails after 30,000 non-key
916 // frames. 1032 // frames.
917 // Forcing a keyframe in regular intervals also allows seeking in the 1033 // Forcing a keyframe in regular intervals also allows seeking in the
918 // resulting recording with decent performance. 1034 // resulting recording with decent performance.
919 codec_config_.kf_mode = VPX_KF_AUTO; 1035 codec_config->kf_mode = VPX_KF_AUTO;
920 codec_config_.kf_min_dist = 0; 1036 codec_config->kf_min_dist = 0;
921 codec_config_.kf_max_dist = 100; 1037 codec_config->kf_max_dist = 100;
922 1038
923 codec_config_.g_threads = GetNumberOfThreadsForEncoding(); 1039 codec_config->g_threads = GetNumberOfThreadsForEncoding();
924 1040
925 // Number of frames to consume before producing output. 1041 // Number of frames to consume before producing output.
926 codec_config_.g_lag_in_frames = 0; 1042 codec_config->g_lag_in_frames = 0;
927 1043
928 DCHECK(!encoder_); 1044 encoder->reset(new vpx_codec_ctx_t);
929 encoder_.reset(new vpx_codec_ctx_t);
930 const vpx_codec_err_t ret = vpx_codec_enc_init( 1045 const vpx_codec_err_t ret = vpx_codec_enc_init(
931 encoder_.get(), codec_interface, &codec_config_, 0 /* flags */); 1046 encoder->get(), codec_interface, codec_config, 0 /* flags */);
932 DCHECK_EQ(VPX_CODEC_OK, ret); 1047 DCHECK_EQ(VPX_CODEC_OK, ret);
933 1048
934 if (use_vp9_) { 1049 if (use_vp9_) {
935 // Values of VP8E_SET_CPUUSED greater than 0 will increase encoder speed at 1050 // Values of VP8E_SET_CPUUSED greater than 0 will increase encoder speed at
936 // the expense of quality up to a maximum value of 8 for VP9, by tuning the 1051 // the expense of quality up to a maximum value of 8 for VP9, by tuning the
937 // target time spent encoding the frame. Go from 8 to 5 (values for real 1052 // target time spent encoding the frame. Go from 8 to 5 (values for real
938 // time encoding) depending on the amount of cores available in the system. 1053 // time encoding) depending on the amount of cores available in the system.
939 const int kCpuUsed = 1054 const int kCpuUsed =
940 std::max(5, 8 - base::SysInfo::NumberOfProcessors() / 2); 1055 std::max(5, 8 - base::SysInfo::NumberOfProcessors() / 2);
941 result = vpx_codec_control(encoder_.get(), VP8E_SET_CPUUSED, kCpuUsed); 1056 result = vpx_codec_control(encoder->get(), VP8E_SET_CPUUSED, kCpuUsed);
942 DLOG_IF(WARNING, VPX_CODEC_OK != result) << "VP8E_SET_CPUUSED failed"; 1057 DLOG_IF(WARNING, VPX_CODEC_OK != result) << "VP8E_SET_CPUUSED failed";
943 } 1058 }
944 } 1059 }
945 1060
946 bool VpxEncoder::IsInitialized() const { 1061 bool VpxEncoder::IsInitialized(const vpx_codec_enc_cfg_t& codec_config) const {
947 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 1062 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
948 return codec_config_.g_timebase.den != 0; 1063 return codec_config.g_timebase.den != 0;
949 } 1064 }
950 1065
951 base::TimeDelta VpxEncoder::EstimateFrameDuration( 1066 base::TimeDelta VpxEncoder::EstimateFrameDuration(
952 const scoped_refptr<VideoFrame>& frame) { 1067 const scoped_refptr<VideoFrame>& frame) {
953 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 1068 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
954 1069
955 using base::TimeDelta; 1070 using base::TimeDelta;
956 TimeDelta predicted_frame_duration; 1071 TimeDelta predicted_frame_duration;
957 if (!frame->metadata()->GetTimeDelta(VideoFrameMetadata::FRAME_DURATION, 1072 if (!frame->metadata()->GetTimeDelta(VideoFrameMetadata::FRAME_DURATION,
958 &predicted_frame_duration) || 1073 &predicted_frame_duration) ||
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
1046 1161
1047 layer_len += layerInfo.pNalLengthInByte[nal]; 1162 layer_len += layerInfo.pNalLengthInByte[nal];
1048 } 1163 }
1049 // Copy the entire layer's data (including NAL start codes). 1164 // Copy the entire layer's data (including NAL start codes).
1050 data->append(reinterpret_cast<char*>(layerInfo.pBsBuf), layer_len); 1165 data->append(reinterpret_cast<char*>(layerInfo.pBsBuf), layer_len);
1051 } 1166 }
1052 1167
1053 const bool is_key_frame = info.eFrameType == videoFrameTypeIDR; 1168 const bool is_key_frame = info.eFrameType == videoFrameTypeIDR;
1054 origin_task_runner_->PostTask( 1169 origin_task_runner_->PostTask(
1055 FROM_HERE, base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_, 1170 FROM_HERE, base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_,
1056 video_params, base::Passed(&data), 1171 video_params, base::Passed(&data), nullptr,
1057 capture_timestamp, is_key_frame)); 1172 capture_timestamp, is_key_frame));
1058 } 1173 }
1059 1174
1060 void H264Encoder::ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) { 1175 void H264Encoder::ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) {
1061 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 1176 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
1062 ISVCEncoder* temp_encoder = nullptr; 1177 ISVCEncoder* temp_encoder = nullptr;
1063 if (WelsCreateSVCEncoder(&temp_encoder) != 0) { 1178 if (WelsCreateSVCEncoder(&temp_encoder) != 0) {
1064 NOTREACHED() << "Failed to create OpenH264 encoder"; 1179 NOTREACHED() << "Failed to create OpenH264 encoder";
1065 return; 1180 return;
1066 } 1181 }
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after
1229 if (paused_before_init_) 1344 if (paused_before_init_)
1230 encoder_->SetPaused(paused_before_init_); 1345 encoder_->SetPaused(paused_before_init_);
1231 1346
1232 // StartFrameEncode() will be called on Render IO thread. 1347 // StartFrameEncode() will be called on Render IO thread.
1233 MediaStreamVideoSink::ConnectToTrack( 1348 MediaStreamVideoSink::ConnectToTrack(
1234 track_, 1349 track_,
1235 base::Bind(&VideoTrackRecorder::Encoder::StartFrameEncode, encoder_), 1350 base::Bind(&VideoTrackRecorder::Encoder::StartFrameEncode, encoder_),
1236 false); 1351 false);
1237 } 1352 }
1238 1353
1354 bool VideoTrackRecorder::CanEncodeAlphaChannelForTesting() {
1355 DCHECK(encoder_);
1356 return encoder_->CanEncodeAlphaChannel();
1357 }
1358
1239 } // namespace content 1359 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698