Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: content/renderer/media_recorder/video_track_recorder.cc

Issue 2691373005: Support alpha channel recording for VPX in MediaRecorder (Closed)
Patch Set: Modify webm_muxer_unittest. Created 3 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media_recorder/video_track_recorder.h" 5 #include "content/renderer/media_recorder/video_track_recorder.h"
6 6
7 #include <utility> 7 #include <utility>
8 8
9 #include "base/bind.h" 9 #include "base/bind.h"
10 #include "base/logging.h" 10 #include "base/logging.h"
(...skipping 187 matching lines...) Expand 10 before | Expand all | Expand 10 after
198 bits_per_second_(bits_per_second) { 198 bits_per_second_(bits_per_second) {
199 DCHECK(!on_encoded_video_callback_.is_null()); 199 DCHECK(!on_encoded_video_callback_.is_null());
200 if (encoding_task_runner_) 200 if (encoding_task_runner_)
201 return; 201 return;
202 encoding_thread_.reset(new base::Thread("EncodingThread")); 202 encoding_thread_.reset(new base::Thread("EncodingThread"));
203 encoding_thread_->Start(); 203 encoding_thread_->Start();
204 encoding_task_runner_ = encoding_thread_->task_runner(); 204 encoding_task_runner_ = encoding_thread_->task_runner();
205 } 205 }
206 206
207 // Start encoding |frame|, returning via |on_encoded_video_callback_|. This 207 // Start encoding |frame|, returning via |on_encoded_video_callback_|. This
208 // call will also trigger a ConfigureEncoderOnEncodingTaskRunner() upon first 208 // call will also trigger an encode configuration upon first frame arrival
209 // frame arrival or parameter change, and an EncodeOnEncodingTaskRunner() to 209 // or parameter change, and an EncodeOnEncodingTaskRunner() to actually
210 // actually encode the frame. If the |frame|'s data is not directly available 210 // encode the frame. If the |frame|'s data is not directly available (e.g.
211 // (e.g. it's a texture) then RetrieveFrameOnMainThread() is called, and if 211 // it's a texture) then RetrieveFrameOnMainThread() is called, and if even
212 // even that fails, black frames are sent instead. 212 // that fails, black frames are sent instead.
213 void StartFrameEncode(const scoped_refptr<VideoFrame>& frame, 213 void StartFrameEncode(const scoped_refptr<VideoFrame>& frame,
214 base::TimeTicks capture_timestamp); 214 base::TimeTicks capture_timestamp);
215 void RetrieveFrameOnMainThread(const scoped_refptr<VideoFrame>& video_frame, 215 void RetrieveFrameOnMainThread(const scoped_refptr<VideoFrame>& video_frame,
216 base::TimeTicks capture_timestamp); 216 base::TimeTicks capture_timestamp);
217 217
218 void SetPaused(bool paused); 218 void SetPaused(bool paused);
219 virtual bool CanEncodeAlphaChannel() { return false; }
219 220
220 protected: 221 protected:
221 friend class base::RefCountedThreadSafe<Encoder>; 222 friend class base::RefCountedThreadSafe<Encoder>;
222 virtual ~Encoder() { 223 virtual ~Encoder() {
223 main_task_runner_->DeleteSoon(FROM_HERE, video_renderer_.release()); 224 main_task_runner_->DeleteSoon(FROM_HERE, video_renderer_.release());
224 } 225 }
225 226
226 virtual void EncodeOnEncodingTaskRunner( 227 virtual void EncodeOnEncodingTaskRunner(
227 scoped_refptr<VideoFrame> frame, 228 scoped_refptr<VideoFrame> frame,
228 base::TimeTicks capture_timestamp) = 0; 229 base::TimeTicks capture_timestamp) = 0;
229 virtual void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) = 0;
230 230
231 // Used to shutdown properly on the same thread we were created. 231 // Used to shutdown properly on the same thread we were created.
232 const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_; 232 const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
233 233
234 // Task runner where frames to encode and reply callbacks must happen. 234 // Task runner where frames to encode and reply callbacks must happen.
235 scoped_refptr<base::SingleThreadTaskRunner> origin_task_runner_; 235 scoped_refptr<base::SingleThreadTaskRunner> origin_task_runner_;
236 236
237 // Task runner where encoding interactions happen. 237 // Task runner where encoding interactions happen.
238 scoped_refptr<base::SingleThreadTaskRunner> encoding_task_runner_; 238 scoped_refptr<base::SingleThreadTaskRunner> encoding_task_runner_;
239 239
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
276 } 276 }
277 277
278 if (video_frame->HasTextures()) { 278 if (video_frame->HasTextures()) {
279 main_task_runner_->PostTask( 279 main_task_runner_->PostTask(
280 FROM_HERE, base::Bind(&Encoder::RetrieveFrameOnMainThread, this, 280 FROM_HERE, base::Bind(&Encoder::RetrieveFrameOnMainThread, this,
281 video_frame, capture_timestamp)); 281 video_frame, capture_timestamp));
282 return; 282 return;
283 } 283 }
284 284
285 scoped_refptr<media::VideoFrame> frame = video_frame; 285 scoped_refptr<media::VideoFrame> frame = video_frame;
286 // Drop alpha channel since we do not support it yet. 286 // Drop alpha channel if the encoder does not support it yet.
287 if (frame->format() == media::PIXEL_FORMAT_YV12A) 287 if (!CanEncodeAlphaChannel() && frame->format() == media::PIXEL_FORMAT_YV12A)
288 frame = media::WrapAsI420VideoFrame(video_frame); 288 frame = media::WrapAsI420VideoFrame(video_frame);
289 289
290 encoding_task_runner_->PostTask( 290 encoding_task_runner_->PostTask(
291 FROM_HERE, base::Bind(&Encoder::EncodeOnEncodingTaskRunner, this, frame, 291 FROM_HERE, base::Bind(&Encoder::EncodeOnEncodingTaskRunner, this, frame,
292 capture_timestamp)); 292 capture_timestamp));
293 } 293 }
294 294
295 void VideoTrackRecorder::Encoder::RetrieveFrameOnMainThread( 295 void VideoTrackRecorder::Encoder::RetrieveFrameOnMainThread(
296 const scoped_refptr<VideoFrame>& video_frame, 296 const scoped_refptr<VideoFrame>& video_frame,
297 base::TimeTicks capture_timestamp) { 297 base::TimeTicks capture_timestamp) {
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after
391 CHECK_EQ(ret, VPX_CODEC_OK); 391 CHECK_EQ(ret, VPX_CODEC_OK);
392 delete codec; 392 delete codec;
393 } 393 }
394 }; 394 };
395 typedef std::unique_ptr<vpx_codec_ctx_t, VpxCodecDeleter> ScopedVpxCodecCtxPtr; 395 typedef std::unique_ptr<vpx_codec_ctx_t, VpxCodecDeleter> ScopedVpxCodecCtxPtr;
396 396
397 static void OnFrameEncodeCompleted( 397 static void OnFrameEncodeCompleted(
398 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_cb, 398 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_cb,
399 const media::WebmMuxer::VideoParameters& params, 399 const media::WebmMuxer::VideoParameters& params,
400 std::unique_ptr<std::string> data, 400 std::unique_ptr<std::string> data,
401 std::unique_ptr<std::string> alpha_data,
401 base::TimeTicks capture_timestamp, 402 base::TimeTicks capture_timestamp,
402 bool keyframe) { 403 bool keyframe) {
403 DVLOG(1) << (keyframe ? "" : "non ") << "keyframe "<< data->length() << "B, " 404 DVLOG(1) << (keyframe ? "" : "non ") << "keyframe "<< data->length() << "B, "
404 << capture_timestamp << " ms"; 405 << capture_timestamp << " ms";
405 on_encoded_video_cb.Run(params, std::move(data), capture_timestamp, keyframe); 406 on_encoded_video_cb.Run(params, std::move(data), std::move(alpha_data),
407 capture_timestamp, keyframe);
406 } 408 }
407 409
408 static int GetNumberOfThreadsForEncoding() { 410 static int GetNumberOfThreadsForEncoding() {
409 // Do not saturate CPU utilization just for encoding. On a lower-end system 411 // Do not saturate CPU utilization just for encoding. On a lower-end system
410 // with only 1 or 2 cores, use only one thread for encoding. On systems with 412 // with only 1 or 2 cores, use only one thread for encoding. On systems with
411 // more cores, allow half of the cores to be used for encoding. 413 // more cores, allow half of the cores to be used for encoding.
412 return std::min(8, (base::SysInfo::NumberOfProcessors() + 1) / 2); 414 return std::min(8, (base::SysInfo::NumberOfProcessors() + 1) / 2);
413 } 415 }
414 416
415 // Class encapsulating VideoEncodeAccelerator interactions. 417 // Class encapsulating VideoEncodeAccelerator interactions.
(...skipping 24 matching lines...) Expand all
440 using VideoParamsAndTimestamp = 442 using VideoParamsAndTimestamp =
441 std::pair<media::WebmMuxer::VideoParameters, base::TimeTicks>; 443 std::pair<media::WebmMuxer::VideoParameters, base::TimeTicks>;
442 444
443 void UseOutputBitstreamBufferId(int32_t bitstream_buffer_id); 445 void UseOutputBitstreamBufferId(int32_t bitstream_buffer_id);
444 void FrameFinished(std::unique_ptr<base::SharedMemory> shm); 446 void FrameFinished(std::unique_ptr<base::SharedMemory> shm);
445 447
446 // VideoTrackRecorder::Encoder implementation. 448 // VideoTrackRecorder::Encoder implementation.
447 ~VEAEncoder() override; 449 ~VEAEncoder() override;
448 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame, 450 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame,
449 base::TimeTicks capture_timestamp) override; 451 base::TimeTicks capture_timestamp) override;
450 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) override; 452
453 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size);
451 454
452 media::GpuVideoAcceleratorFactories* const gpu_factories_; 455 media::GpuVideoAcceleratorFactories* const gpu_factories_;
453 456
454 const media::VideoCodecProfile codec_; 457 const media::VideoCodecProfile codec_;
455 458
456 // The underlying VEA to perform encoding on. 459 // The underlying VEA to perform encoding on.
457 std::unique_ptr<media::VideoEncodeAccelerator> video_encoder_; 460 std::unique_ptr<media::VideoEncodeAccelerator> video_encoder_;
458 461
459 // Shared memory buffers for output with the VEA. 462 // Shared memory buffers for output with the VEA.
460 std::vector<std::unique_ptr<base::SharedMemory>> output_buffers_; 463 std::vector<std::unique_ptr<base::SharedMemory>> output_buffers_;
(...skipping 26 matching lines...) Expand all
487 VpxEncoder( 490 VpxEncoder(
488 bool use_vp9, 491 bool use_vp9,
489 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, 492 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback,
490 int32_t bits_per_second); 493 int32_t bits_per_second);
491 494
492 private: 495 private:
493 // VideoTrackRecorder::Encoder implementation. 496 // VideoTrackRecorder::Encoder implementation.
494 ~VpxEncoder() override; 497 ~VpxEncoder() override;
495 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame, 498 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame,
496 base::TimeTicks capture_timestamp) override; 499 base::TimeTicks capture_timestamp) override;
497 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) override; 500 bool CanEncodeAlphaChannel() override { return true; }
498 501
499 // Returns true if |codec_config_| has been filled in at least once. 502 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size,
500 bool IsInitialized() const; 503 vpx_codec_enc_cfg_t* codec_config,
504 ScopedVpxCodecCtxPtr* encoder);
505 void DoEncode(vpx_codec_ctx_t* const encoder,
506 const gfx::Size& frame_size,
507 uint8_t* const data,
508 uint8_t* const y_plane,
509 int y_stride,
510 uint8_t* const u_plane,
511 int u_stride,
512 uint8_t* const v_plane,
513 int v_stride,
514 const base::TimeDelta& duration,
515 bool force_keyframe,
516 std::string* const output_data,
517 bool* const keyframe);
518
519 // Returns true if |codec_config| has been filled in at least once.
520 bool IsInitialized(const vpx_codec_enc_cfg_t& codec_config) const;
501 521
502 // Estimate the frame duration from |frame| and |last_frame_timestamp_|. 522 // Estimate the frame duration from |frame| and |last_frame_timestamp_|.
503 base::TimeDelta EstimateFrameDuration(const scoped_refptr<VideoFrame>& frame); 523 base::TimeDelta EstimateFrameDuration(const scoped_refptr<VideoFrame>& frame);
504 524
505 // Force usage of VP9 for encoding, instead of VP8 which is the default. 525 // Force usage of VP9 for encoding, instead of VP8 which is the default.
506 const bool use_vp9_; 526 const bool use_vp9_;
507 527
508 // VPx internal objects: configuration and encoder. |encoder_| is a special 528 // VPx internal objects: configuration and encoder. |encoder_| is a special
509 // scoped pointer to guarantee proper destruction, particularly when 529 // scoped pointer to guarantee proper destruction, particularly when
510 // reconfiguring due to parameters change. Only used on |encoding_thread_|. 530 // reconfiguring due to parameters change. Only used on |encoding_thread_|.
511 vpx_codec_enc_cfg_t codec_config_; 531 vpx_codec_enc_cfg_t codec_config_;
512 ScopedVpxCodecCtxPtr encoder_; 532 ScopedVpxCodecCtxPtr encoder_;
513 533
534 vpx_codec_enc_cfg_t alpha_codec_config_;
535 ScopedVpxCodecCtxPtr alpha_encoder_;
536
537 std::vector<uint8_t> alpha_dummy_planes_;
538 size_t v_plane_offset_;
539 size_t u_plane_stride_;
540 size_t v_plane_stride_;
541 bool last_frame_alpha_ = false;
542
514 // The |VideoFrame::timestamp()| of the last encoded frame. This is used to 543 // The |VideoFrame::timestamp()| of the last encoded frame. This is used to
515 // predict the duration of the next frame. Only used on |encoding_thread_|. 544 // predict the duration of the next frame. Only used on |encoding_thread_|.
516 base::TimeDelta last_frame_timestamp_; 545 base::TimeDelta last_frame_timestamp_;
517 546
518 DISALLOW_COPY_AND_ASSIGN(VpxEncoder); 547 DISALLOW_COPY_AND_ASSIGN(VpxEncoder);
519 }; 548 };
520 549
521 #if BUILDFLAG(RTC_USE_H264) 550 #if BUILDFLAG(RTC_USE_H264)
522 551
523 struct ISVCEncoderDeleter { 552 struct ISVCEncoderDeleter {
(...skipping 15 matching lines...) Expand all
539 568
540 H264Encoder( 569 H264Encoder(
541 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, 570 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback,
542 int32_t bits_per_second); 571 int32_t bits_per_second);
543 572
544 private: 573 private:
545 // VideoTrackRecorder::Encoder implementation. 574 // VideoTrackRecorder::Encoder implementation.
546 ~H264Encoder() override; 575 ~H264Encoder() override;
547 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame, 576 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame,
548 base::TimeTicks capture_timestamp) override; 577 base::TimeTicks capture_timestamp) override;
549 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) override; 578
579 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size);
550 580
551 // |openh264_encoder_| is a special scoped pointer to guarantee proper 581 // |openh264_encoder_| is a special scoped pointer to guarantee proper
552 // destruction, also when reconfiguring due to parameters change. Only used on 582 // destruction, also when reconfiguring due to parameters change. Only used on
553 // |encoding_thread_|. 583 // |encoding_thread_|.
554 gfx::Size configured_size_; 584 gfx::Size configured_size_;
555 ScopedISVCEncoderPtr openh264_encoder_; 585 ScopedISVCEncoderPtr openh264_encoder_;
556 586
557 // The |VideoFrame::timestamp()| of the first received frame. Only used on 587 // The |VideoFrame::timestamp()| of the first received frame. Only used on
558 // |encoding_thread_|. 588 // |encoding_thread_|.
559 base::TimeTicks first_frame_timestamp_; 589 base::TimeTicks first_frame_timestamp_;
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
621 base::SharedMemory* output_buffer = 651 base::SharedMemory* output_buffer =
622 output_buffers_[bitstream_buffer_id].get(); 652 output_buffers_[bitstream_buffer_id].get();
623 653
624 std::unique_ptr<std::string> data(new std::string); 654 std::unique_ptr<std::string> data(new std::string);
625 data->append(reinterpret_cast<char*>(output_buffer->memory()), payload_size); 655 data->append(reinterpret_cast<char*>(output_buffer->memory()), payload_size);
626 656
627 const auto front_frame = frames_in_encode_.front(); 657 const auto front_frame = frames_in_encode_.front();
628 frames_in_encode_.pop(); 658 frames_in_encode_.pop();
629 origin_task_runner_->PostTask( 659 origin_task_runner_->PostTask(
630 FROM_HERE, base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_, 660 FROM_HERE, base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_,
631 front_frame.first, base::Passed(&data), 661 front_frame.first, base::Passed(&data), nullptr,
632 front_frame.second, keyframe)); 662 front_frame.second, keyframe));
633 UseOutputBitstreamBufferId(bitstream_buffer_id); 663 UseOutputBitstreamBufferId(bitstream_buffer_id);
634 } 664 }
635 665
636 void VEAEncoder::NotifyError(media::VideoEncodeAccelerator::Error error) { 666 void VEAEncoder::NotifyError(media::VideoEncodeAccelerator::Error error) {
637 DVLOG(3) << __func__; 667 DVLOG(3) << __func__;
638 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 668 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
639 669
640 // TODO(emircan): Notify the owner via a callback. 670 // TODO(emircan): Notify the owner via a callback.
641 error_notified_ = true; 671 error_notified_ = true;
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
790 base::Passed(&encoder_))); 820 base::Passed(&encoder_)));
791 } 821 }
792 822
793 void VpxEncoder::EncodeOnEncodingTaskRunner( 823 void VpxEncoder::EncodeOnEncodingTaskRunner(
794 scoped_refptr<VideoFrame> frame, 824 scoped_refptr<VideoFrame> frame,
795 base::TimeTicks capture_timestamp) { 825 base::TimeTicks capture_timestamp) {
796 TRACE_EVENT0("video", "VpxEncoder::EncodeOnEncodingTaskRunner"); 826 TRACE_EVENT0("video", "VpxEncoder::EncodeOnEncodingTaskRunner");
797 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 827 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
798 828
799 const gfx::Size frame_size = frame->visible_rect().size(); 829 const gfx::Size frame_size = frame->visible_rect().size();
800 if (!IsInitialized() || 830 const base::TimeDelta duration = EstimateFrameDuration(frame);
831 const media::WebmMuxer::VideoParameters video_params(frame);
832 bool force_keyframe = false;
833
834 if (!IsInitialized(codec_config_) ||
801 gfx::Size(codec_config_.g_w, codec_config_.g_h) != frame_size) { 835 gfx::Size(codec_config_.g_w, codec_config_.g_h) != frame_size) {
802 ConfigureEncoderOnEncodingTaskRunner(frame_size); 836 ConfigureEncoderOnEncodingTaskRunner(frame_size, &codec_config_, &encoder_);
803 } 837 }
804 838
839 if (frame->format() == media::PIXEL_FORMAT_YV12A) {
mcasas 2017/03/11 01:21:22 nit: Maybe do a const bool frame_has_alpha = fram
emircan 2017/03/13 05:43:53 Done.
840 if (!IsInitialized(alpha_codec_config_) ||
841 gfx::Size(alpha_codec_config_.g_w, alpha_codec_config_.g_h) !=
842 frame_size) {
843 ConfigureEncoderOnEncodingTaskRunner(frame_size, &alpha_codec_config_,
844 &alpha_encoder_);
845 u_plane_stride_ = media::VideoFrame::RowBytes(
846 VideoFrame::kUPlane, frame->format(), frame_size.width());
847 v_plane_stride_ = media::VideoFrame::RowBytes(
848 VideoFrame::kVPlane, frame->format(), frame_size.width());
849 v_plane_offset_ = media::VideoFrame::PlaneSize(
850 frame->format(), VideoFrame::kUPlane, frame_size)
851 .GetArea();
852 alpha_dummy_planes_.resize(
853 v_plane_offset_ + media::VideoFrame::PlaneSize(frame->format(),
854 VideoFrame::kVPlane,
855 frame_size)
856 .GetArea());
857 // It is more expensive to encode 0x00, so use 0x80 instead.
858 std::fill(alpha_dummy_planes_.begin(), alpha_dummy_planes_.end(), 0x80);
859 }
860 // If we introduced a new alpha frame, force keyframe.
861 if (!last_frame_alpha_)
862 force_keyframe = true;
863 last_frame_alpha_ = true;
864 } else {
865 last_frame_alpha_ = false;
866 }
mcasas 2017/03/11 01:25:39 Somehow rietveld threw away my comment. I wanted
emircan 2017/03/13 05:43:53 Done.
867
868 std::unique_ptr<std::string> data(new std::string);
869 bool keyframe = false;
870 DoEncode(encoder_.get(), frame_size, frame->data(VideoFrame::kYPlane),
871 frame->visible_data(VideoFrame::kYPlane),
872 frame->stride(VideoFrame::kYPlane),
873 frame->visible_data(VideoFrame::kUPlane),
874 frame->stride(VideoFrame::kUPlane),
875 frame->visible_data(VideoFrame::kVPlane),
876 frame->stride(VideoFrame::kVPlane), duration, force_keyframe,
877 data.get(), &keyframe);
878
879 std::unique_ptr<std::string> alpha_data(new std::string);
880 if (frame->format() == media::PIXEL_FORMAT_YV12A) {
881 bool alpha_keyframe = false;
882 DoEncode(alpha_encoder_.get(), frame_size, frame->data(VideoFrame::kAPlane),
883 frame->visible_data(VideoFrame::kAPlane),
884 frame->stride(VideoFrame::kAPlane), alpha_dummy_planes_.data(),
885 u_plane_stride_, alpha_dummy_planes_.data() + v_plane_offset_,
886 v_plane_stride_, duration, keyframe, alpha_data.get(),
887 &alpha_keyframe);
888 DCHECK_EQ(keyframe, alpha_keyframe);
889 }
890 frame = nullptr;
891
892 origin_task_runner_->PostTask(
893 FROM_HERE,
894 base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_,
895 video_params, base::Passed(&data), base::Passed(&alpha_data),
896 capture_timestamp, keyframe));
897 }
898
899 void VpxEncoder::DoEncode(vpx_codec_ctx_t* const encoder,
900 const gfx::Size& frame_size,
901 uint8_t* const data,
902 uint8_t* const y_plane,
903 int y_stride,
904 uint8_t* const u_plane,
905 int u_stride,
906 uint8_t* const v_plane,
907 int v_stride,
908 const base::TimeDelta& duration,
909 bool force_keyframe,
910 std::string* const output_data,
911 bool* const keyframe) {
912 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
913
805 vpx_image_t vpx_image; 914 vpx_image_t vpx_image;
806 vpx_image_t* const result = vpx_img_wrap(&vpx_image, 915 vpx_image_t* const result =
807 VPX_IMG_FMT_I420, 916 vpx_img_wrap(&vpx_image, VPX_IMG_FMT_I420, frame_size.width(),
808 frame_size.width(), 917 frame_size.height(), 1 /* align */, data);
809 frame_size.height(),
810 1 /* align */,
811 frame->data(VideoFrame::kYPlane));
812 DCHECK_EQ(result, &vpx_image); 918 DCHECK_EQ(result, &vpx_image);
813 vpx_image.planes[VPX_PLANE_Y] = frame->visible_data(VideoFrame::kYPlane); 919 vpx_image.planes[VPX_PLANE_Y] = y_plane;
814 vpx_image.planes[VPX_PLANE_U] = frame->visible_data(VideoFrame::kUPlane); 920 vpx_image.planes[VPX_PLANE_U] = u_plane;
815 vpx_image.planes[VPX_PLANE_V] = frame->visible_data(VideoFrame::kVPlane); 921 vpx_image.planes[VPX_PLANE_V] = v_plane;
816 vpx_image.stride[VPX_PLANE_Y] = frame->stride(VideoFrame::kYPlane); 922 vpx_image.stride[VPX_PLANE_Y] = y_stride;
817 vpx_image.stride[VPX_PLANE_U] = frame->stride(VideoFrame::kUPlane); 923 vpx_image.stride[VPX_PLANE_U] = u_stride;
818 vpx_image.stride[VPX_PLANE_V] = frame->stride(VideoFrame::kVPlane); 924 vpx_image.stride[VPX_PLANE_V] = v_stride;
819 925
820 const base::TimeDelta duration = EstimateFrameDuration(frame); 926 const vpx_codec_flags_t flags = force_keyframe ? VPX_EFLAG_FORCE_KF : 0;
821 // Encode the frame. The presentation time stamp argument here is fixed to 927 // Encode the frame. The presentation time stamp argument here is fixed to
822 // zero to force the encoder to base its single-frame bandwidth calculations 928 // zero to force the encoder to base its single-frame bandwidth calculations
823 // entirely on |predicted_frame_duration|. 929 // entirely on |predicted_frame_duration|.
824 const vpx_codec_err_t ret = vpx_codec_encode(encoder_.get(), 930 const vpx_codec_err_t ret =
825 &vpx_image, 931 vpx_codec_encode(encoder, &vpx_image, 0 /* pts */,
826 0 /* pts */, 932 duration.InMicroseconds(), flags, VPX_DL_REALTIME);
827 duration.InMicroseconds(), 933 DCHECK_EQ(ret, VPX_CODEC_OK)
828 0 /* flags */, 934 << vpx_codec_err_to_string(ret) << ", #" << vpx_codec_error(encoder)
829 VPX_DL_REALTIME); 935 << " -" << vpx_codec_error_detail(encoder);
830 DCHECK_EQ(ret, VPX_CODEC_OK) << vpx_codec_err_to_string(ret) << ", #"
831 << vpx_codec_error(encoder_.get()) << " -"
832 << vpx_codec_error_detail(encoder_.get());
833 936
834 const media::WebmMuxer::VideoParameters video_params(frame); 937 *keyframe = false;
835 frame = nullptr;
836
837 std::unique_ptr<std::string> data(new std::string);
838 bool keyframe = false;
839 vpx_codec_iter_t iter = NULL; 938 vpx_codec_iter_t iter = NULL;
840 const vpx_codec_cx_pkt_t* pkt = NULL; 939 const vpx_codec_cx_pkt_t* pkt = NULL;
841 while ((pkt = vpx_codec_get_cx_data(encoder_.get(), &iter)) != NULL) { 940 while ((pkt = vpx_codec_get_cx_data(encoder, &iter)) != NULL) {
842 if (pkt->kind != VPX_CODEC_CX_FRAME_PKT) 941 if (pkt->kind != VPX_CODEC_CX_FRAME_PKT)
843 continue; 942 continue;
844 data->assign(static_cast<char*>(pkt->data.frame.buf), pkt->data.frame.sz); 943 output_data->assign(static_cast<char*>(pkt->data.frame.buf),
845 keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0; 944 pkt->data.frame.sz);
945 *keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0;
846 break; 946 break;
847 } 947 }
848 origin_task_runner_->PostTask(FROM_HERE,
849 base::Bind(OnFrameEncodeCompleted,
850 on_encoded_video_callback_,
851 video_params,
852 base::Passed(&data),
853 capture_timestamp,
854 keyframe));
855 } 948 }
856 949
857 void VpxEncoder::ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) { 950 void VpxEncoder::ConfigureEncoderOnEncodingTaskRunner(
951 const gfx::Size& size,
952 vpx_codec_enc_cfg_t* codec_config,
953 ScopedVpxCodecCtxPtr* encoder) {
858 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 954 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
859 if (IsInitialized()) { 955 if (IsInitialized(*codec_config)) {
860 // TODO(mcasas) VP8 quirk/optimisation: If the new |size| is strictly less- 956 // TODO(mcasas) VP8 quirk/optimisation: If the new |size| is strictly less-
861 // than-or-equal than the old size, in terms of area, the existing encoder 957 // than-or-equal than the old size, in terms of area, the existing encoder
862 // instance could be reused after changing |codec_config_.{g_w,g_h}|. 958 // instance could be reused after changing |codec_config->{g_w,g_h}|.
863 DVLOG(1) << "Destroying/Re-Creating encoder for new frame size: " 959 DVLOG(1) << "Destroying/Re-Creating encoder for new frame size: "
864 << gfx::Size(codec_config_.g_w, codec_config_.g_h).ToString() 960 << gfx::Size(codec_config->g_w, codec_config->g_h).ToString()
865 << " --> " << size.ToString() << (use_vp9_ ? " vp9" : " vp8"); 961 << " --> " << size.ToString() << (use_vp9_ ? " vp9" : " vp8");
866 encoder_.reset(); 962 encoder->reset();
867 } 963 }
868 964
869 const vpx_codec_iface_t* codec_interface = 965 const vpx_codec_iface_t* codec_interface =
870 use_vp9_ ? vpx_codec_vp9_cx() : vpx_codec_vp8_cx(); 966 use_vp9_ ? vpx_codec_vp9_cx() : vpx_codec_vp8_cx();
871 vpx_codec_err_t result = vpx_codec_enc_config_default( 967 vpx_codec_err_t result = vpx_codec_enc_config_default(
872 codec_interface, &codec_config_, 0 /* reserved */); 968 codec_interface, codec_config, 0 /* reserved */);
873 DCHECK_EQ(VPX_CODEC_OK, result); 969 DCHECK_EQ(VPX_CODEC_OK, result);
874 970
875 DCHECK_EQ(320u, codec_config_.g_w); 971 DCHECK_EQ(320u, codec_config->g_w);
876 DCHECK_EQ(240u, codec_config_.g_h); 972 DCHECK_EQ(240u, codec_config->g_h);
877 DCHECK_EQ(256u, codec_config_.rc_target_bitrate); 973 DCHECK_EQ(256u, codec_config->rc_target_bitrate);
878 // Use the selected bitrate or adjust default bit rate to account for the 974 // Use the selected bitrate or adjust default bit rate to account for the
879 // actual size. Note: |rc_target_bitrate| units are kbit per second. 975 // actual size. Note: |rc_target_bitrate| units are kbit per second.
880 if (bits_per_second_ > 0) { 976 if (bits_per_second_ > 0) {
881 codec_config_.rc_target_bitrate = bits_per_second_ / 1000; 977 codec_config->rc_target_bitrate = bits_per_second_ / 1000;
882 } else { 978 } else {
883 codec_config_.rc_target_bitrate = size.GetArea() * 979 codec_config->rc_target_bitrate = size.GetArea() *
884 codec_config_.rc_target_bitrate / 980 codec_config->rc_target_bitrate /
885 codec_config_.g_w / codec_config_.g_h; 981 codec_config->g_w / codec_config->g_h;
886 } 982 }
887 // Both VP8/VP9 configuration should be Variable BitRate by default. 983 // Both VP8/VP9 configuration should be Variable BitRate by default.
888 DCHECK_EQ(VPX_VBR, codec_config_.rc_end_usage); 984 DCHECK_EQ(VPX_VBR, codec_config->rc_end_usage);
889 if (use_vp9_) { 985 if (use_vp9_) {
890 // Number of frames to consume before producing output. 986 // Number of frames to consume before producing output.
891 codec_config_.g_lag_in_frames = 0; 987 codec_config->g_lag_in_frames = 0;
892 988
893 // DCHECK that the profile selected by default is I420 (magic number 0). 989 // DCHECK that the profile selected by default is I420 (magic number 0).
894 DCHECK_EQ(0u, codec_config_.g_profile); 990 DCHECK_EQ(0u, codec_config->g_profile);
895 } else { 991 } else {
896 // VP8 always produces frames instantaneously. 992 // VP8 always produces frames instantaneously.
897 DCHECK_EQ(0u, codec_config_.g_lag_in_frames); 993 DCHECK_EQ(0u, codec_config->g_lag_in_frames);
898 } 994 }
899 995
900 DCHECK(size.width()); 996 DCHECK(size.width());
901 DCHECK(size.height()); 997 DCHECK(size.height());
902 codec_config_.g_w = size.width(); 998 codec_config->g_w = size.width();
903 codec_config_.g_h = size.height(); 999 codec_config->g_h = size.height();
904 codec_config_.g_pass = VPX_RC_ONE_PASS; 1000 codec_config->g_pass = VPX_RC_ONE_PASS;
905 1001
906 // Timebase is the smallest interval used by the stream, can be set to the 1002 // Timebase is the smallest interval used by the stream, can be set to the
907 // frame rate or to e.g. microseconds. 1003 // frame rate or to e.g. microseconds.
908 codec_config_.g_timebase.num = 1; 1004 codec_config->g_timebase.num = 1;
909 codec_config_.g_timebase.den = base::Time::kMicrosecondsPerSecond; 1005 codec_config->g_timebase.den = base::Time::kMicrosecondsPerSecond;
910 1006
911 // Let the encoder decide where to place the Keyframes, between min and max. 1007 // Let the encoder decide where to place the Keyframes, between min and max.
912 // In VPX_KF_AUTO mode libvpx will sometimes emit keyframes regardless of min/ 1008 // In VPX_KF_AUTO mode libvpx will sometimes emit keyframes regardless of min/
913 // max distance out of necessity. 1009 // max distance out of necessity.
914 // Note that due to http://crbug.com/440223, it might be necessary to force a 1010 // Note that due to http://crbug.com/440223, it might be necessary to force a
915 // key frame after 10,000frames since decoding fails after 30,000 non-key 1011 // key frame after 10,000frames since decoding fails after 30,000 non-key
916 // frames. 1012 // frames.
917 // Forcing a keyframe in regular intervals also allows seeking in the 1013 // Forcing a keyframe in regular intervals also allows seeking in the
918 // resulting recording with decent performance. 1014 // resulting recording with decent performance.
919 codec_config_.kf_mode = VPX_KF_AUTO; 1015 codec_config->kf_mode = VPX_KF_AUTO;
920 codec_config_.kf_min_dist = 0; 1016 codec_config->kf_min_dist = 0;
921 codec_config_.kf_max_dist = 100; 1017 codec_config->kf_max_dist = 100;
922 1018
923 codec_config_.g_threads = GetNumberOfThreadsForEncoding(); 1019 codec_config->g_threads = GetNumberOfThreadsForEncoding();
924 1020
925 // Number of frames to consume before producing output. 1021 // Number of frames to consume before producing output.
926 codec_config_.g_lag_in_frames = 0; 1022 codec_config->g_lag_in_frames = 0;
927 1023
928 DCHECK(!encoder_); 1024 encoder->reset(new vpx_codec_ctx_t);
929 encoder_.reset(new vpx_codec_ctx_t);
930 const vpx_codec_err_t ret = vpx_codec_enc_init( 1025 const vpx_codec_err_t ret = vpx_codec_enc_init(
931 encoder_.get(), codec_interface, &codec_config_, 0 /* flags */); 1026 encoder->get(), codec_interface, codec_config, 0 /* flags */);
932 DCHECK_EQ(VPX_CODEC_OK, ret); 1027 DCHECK_EQ(VPX_CODEC_OK, ret);
933 1028
934 if (use_vp9_) { 1029 if (use_vp9_) {
935 // Values of VP8E_SET_CPUUSED greater than 0 will increase encoder speed at 1030 // Values of VP8E_SET_CPUUSED greater than 0 will increase encoder speed at
936 // the expense of quality up to a maximum value of 8 for VP9, by tuning the 1031 // the expense of quality up to a maximum value of 8 for VP9, by tuning the
937 // target time spent encoding the frame. Go from 8 to 5 (values for real 1032 // target time spent encoding the frame. Go from 8 to 5 (values for real
938 // time encoding) depending on the amount of cores available in the system. 1033 // time encoding) depending on the amount of cores available in the system.
939 const int kCpuUsed = 1034 const int kCpuUsed =
940 std::max(5, 8 - base::SysInfo::NumberOfProcessors() / 2); 1035 std::max(5, 8 - base::SysInfo::NumberOfProcessors() / 2);
941 result = vpx_codec_control(encoder_.get(), VP8E_SET_CPUUSED, kCpuUsed); 1036 result = vpx_codec_control(encoder->get(), VP8E_SET_CPUUSED, kCpuUsed);
942 DLOG_IF(WARNING, VPX_CODEC_OK != result) << "VP8E_SET_CPUUSED failed"; 1037 DLOG_IF(WARNING, VPX_CODEC_OK != result) << "VP8E_SET_CPUUSED failed";
943 } 1038 }
944 } 1039 }
945 1040
946 bool VpxEncoder::IsInitialized() const { 1041 bool VpxEncoder::IsInitialized(const vpx_codec_enc_cfg_t& codec_config) const {
947 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 1042 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
948 return codec_config_.g_timebase.den != 0; 1043 return codec_config.g_timebase.den != 0;
949 } 1044 }
950 1045
951 base::TimeDelta VpxEncoder::EstimateFrameDuration( 1046 base::TimeDelta VpxEncoder::EstimateFrameDuration(
952 const scoped_refptr<VideoFrame>& frame) { 1047 const scoped_refptr<VideoFrame>& frame) {
953 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 1048 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
954 1049
955 using base::TimeDelta; 1050 using base::TimeDelta;
956 TimeDelta predicted_frame_duration; 1051 TimeDelta predicted_frame_duration;
957 if (!frame->metadata()->GetTimeDelta(VideoFrameMetadata::FRAME_DURATION, 1052 if (!frame->metadata()->GetTimeDelta(VideoFrameMetadata::FRAME_DURATION,
958 &predicted_frame_duration) || 1053 &predicted_frame_duration) ||
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
1046 1141
1047 layer_len += layerInfo.pNalLengthInByte[nal]; 1142 layer_len += layerInfo.pNalLengthInByte[nal];
1048 } 1143 }
1049 // Copy the entire layer's data (including NAL start codes). 1144 // Copy the entire layer's data (including NAL start codes).
1050 data->append(reinterpret_cast<char*>(layerInfo.pBsBuf), layer_len); 1145 data->append(reinterpret_cast<char*>(layerInfo.pBsBuf), layer_len);
1051 } 1146 }
1052 1147
1053 const bool is_key_frame = info.eFrameType == videoFrameTypeIDR; 1148 const bool is_key_frame = info.eFrameType == videoFrameTypeIDR;
1054 origin_task_runner_->PostTask( 1149 origin_task_runner_->PostTask(
1055 FROM_HERE, base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_, 1150 FROM_HERE, base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_,
1056 video_params, base::Passed(&data), 1151 video_params, base::Passed(&data), nullptr,
1057 capture_timestamp, is_key_frame)); 1152 capture_timestamp, is_key_frame));
1058 } 1153 }
1059 1154
1060 void H264Encoder::ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) { 1155 void H264Encoder::ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) {
1061 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 1156 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
1062 ISVCEncoder* temp_encoder = nullptr; 1157 ISVCEncoder* temp_encoder = nullptr;
1063 if (WelsCreateSVCEncoder(&temp_encoder) != 0) { 1158 if (WelsCreateSVCEncoder(&temp_encoder) != 0) {
1064 NOTREACHED() << "Failed to create OpenH264 encoder"; 1159 NOTREACHED() << "Failed to create OpenH264 encoder";
1065 return; 1160 return;
1066 } 1161 }
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after
1229 if (paused_before_init_) 1324 if (paused_before_init_)
1230 encoder_->SetPaused(paused_before_init_); 1325 encoder_->SetPaused(paused_before_init_);
1231 1326
1232 // StartFrameEncode() will be called on Render IO thread. 1327 // StartFrameEncode() will be called on Render IO thread.
1233 MediaStreamVideoSink::ConnectToTrack( 1328 MediaStreamVideoSink::ConnectToTrack(
1234 track_, 1329 track_,
1235 base::Bind(&VideoTrackRecorder::Encoder::StartFrameEncode, encoder_), 1330 base::Bind(&VideoTrackRecorder::Encoder::StartFrameEncode, encoder_),
1236 false); 1331 false);
1237 } 1332 }
1238 1333
1334 bool VideoTrackRecorder::CanEncodeAlphaChannelForTesting() {
1335 DCHECK(encoder_);
1336 return encoder_->CanEncodeAlphaChannel();
1337 }
1338
1239 } // namespace content 1339 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698