Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(107)

Side by Side Diff: content/renderer/media_recorder/video_track_recorder.cc

Issue 2691373005: Support alpha channel recording for VPX in MediaRecorder (Closed)
Patch Set: Created 3 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media_recorder/video_track_recorder.h" 5 #include "content/renderer/media_recorder/video_track_recorder.h"
6 6
7 #include <utility> 7 #include <utility>
8 8
9 #include "base/bind.h" 9 #include "base/bind.h"
10 #include "base/logging.h" 10 #include "base/logging.h"
(...skipping 187 matching lines...) Expand 10 before | Expand all | Expand 10 after
198 bits_per_second_(bits_per_second) { 198 bits_per_second_(bits_per_second) {
199 DCHECK(!on_encoded_video_callback_.is_null()); 199 DCHECK(!on_encoded_video_callback_.is_null());
200 if (encoding_task_runner_) 200 if (encoding_task_runner_)
201 return; 201 return;
202 encoding_thread_.reset(new base::Thread("EncodingThread")); 202 encoding_thread_.reset(new base::Thread("EncodingThread"));
203 encoding_thread_->Start(); 203 encoding_thread_->Start();
204 encoding_task_runner_ = encoding_thread_->task_runner(); 204 encoding_task_runner_ = encoding_thread_->task_runner();
205 } 205 }
206 206
207 // Start encoding |frame|, returning via |on_encoded_video_callback_|. This 207 // Start encoding |frame|, returning via |on_encoded_video_callback_|. This
208 // call will also trigger a ConfigureEncoderOnEncodingTaskRunner() upon first 208 // call will also trigger an encode configuration upon first frame arrival
209 // frame arrival or parameter change, and an EncodeOnEncodingTaskRunner() to 209 // or parameter change, and an EncodeOnEncodingTaskRunner() to actually
210 // actually encode the frame. If the |frame|'s data is not directly available 210 // encode the frame. If the |frame|'s data is not directly available (e.g.
211 // (e.g. it's a texture) then RetrieveFrameOnMainThread() is called, and if 211 // it's a texture) then RetrieveFrameOnMainThread() is called, and if even
212 // even that fails, black frames are sent instead. 212 // that fails, black frames are sent instead.
213 void StartFrameEncode(const scoped_refptr<VideoFrame>& frame, 213 void StartFrameEncode(const scoped_refptr<VideoFrame>& frame,
214 base::TimeTicks capture_timestamp); 214 base::TimeTicks capture_timestamp);
215 void RetrieveFrameOnMainThread(const scoped_refptr<VideoFrame>& video_frame, 215 void RetrieveFrameOnMainThread(const scoped_refptr<VideoFrame>& video_frame,
216 base::TimeTicks capture_timestamp); 216 base::TimeTicks capture_timestamp);
217 217
218 void SetPaused(bool paused); 218 void SetPaused(bool paused);
219 virtual bool CanEncodeAlphaChannel() { return false; }
219 220
220 protected: 221 protected:
221 friend class base::RefCountedThreadSafe<Encoder>; 222 friend class base::RefCountedThreadSafe<Encoder>;
222 virtual ~Encoder() { 223 virtual ~Encoder() {
223 main_task_runner_->DeleteSoon(FROM_HERE, video_renderer_.release()); 224 main_task_runner_->DeleteSoon(FROM_HERE, video_renderer_.release());
224 } 225 }
225 226
226 virtual void EncodeOnEncodingTaskRunner( 227 virtual void EncodeOnEncodingTaskRunner(
227 scoped_refptr<VideoFrame> frame, 228 scoped_refptr<VideoFrame> frame,
228 base::TimeTicks capture_timestamp) = 0; 229 base::TimeTicks capture_timestamp) = 0;
229 virtual void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) = 0;
230 230
231 // Used to shutdown properly on the same thread we were created. 231 // Used to shutdown properly on the same thread we were created.
232 const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_; 232 const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
233 233
234 // Task runner where frames to encode and reply callbacks must happen. 234 // Task runner where frames to encode and reply callbacks must happen.
235 scoped_refptr<base::SingleThreadTaskRunner> origin_task_runner_; 235 scoped_refptr<base::SingleThreadTaskRunner> origin_task_runner_;
236 236
237 // Task runner where encoding interactions happen. 237 // Task runner where encoding interactions happen.
238 scoped_refptr<base::SingleThreadTaskRunner> encoding_task_runner_; 238 scoped_refptr<base::SingleThreadTaskRunner> encoding_task_runner_;
239 239
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
276 } 276 }
277 277
278 if (video_frame->HasTextures()) { 278 if (video_frame->HasTextures()) {
279 main_task_runner_->PostTask( 279 main_task_runner_->PostTask(
280 FROM_HERE, base::Bind(&Encoder::RetrieveFrameOnMainThread, this, 280 FROM_HERE, base::Bind(&Encoder::RetrieveFrameOnMainThread, this,
281 video_frame, capture_timestamp)); 281 video_frame, capture_timestamp));
282 return; 282 return;
283 } 283 }
284 284
285 scoped_refptr<media::VideoFrame> frame = video_frame; 285 scoped_refptr<media::VideoFrame> frame = video_frame;
286 // Drop alpha channel since we do not support it yet. 286 // Drop alpha channel if the encoder does not support it yet.
287 if (frame->format() == media::PIXEL_FORMAT_YV12A) 287 if (!CanEncodeAlphaChannel() && frame->format() == media::PIXEL_FORMAT_YV12A)
288 frame = media::WrapAsI420VideoFrame(video_frame); 288 frame = media::WrapAsI420VideoFrame(video_frame);
289 289
290 encoding_task_runner_->PostTask( 290 encoding_task_runner_->PostTask(
291 FROM_HERE, base::Bind(&Encoder::EncodeOnEncodingTaskRunner, this, frame, 291 FROM_HERE, base::Bind(&Encoder::EncodeOnEncodingTaskRunner, this, frame,
292 capture_timestamp)); 292 capture_timestamp));
293 } 293 }
294 294
295 void VideoTrackRecorder::Encoder::RetrieveFrameOnMainThread( 295 void VideoTrackRecorder::Encoder::RetrieveFrameOnMainThread(
296 const scoped_refptr<VideoFrame>& video_frame, 296 const scoped_refptr<VideoFrame>& video_frame,
297 base::TimeTicks capture_timestamp) { 297 base::TimeTicks capture_timestamp) {
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after
391 CHECK_EQ(ret, VPX_CODEC_OK); 391 CHECK_EQ(ret, VPX_CODEC_OK);
392 delete codec; 392 delete codec;
393 } 393 }
394 }; 394 };
395 typedef std::unique_ptr<vpx_codec_ctx_t, VpxCodecDeleter> ScopedVpxCodecCtxPtr; 395 typedef std::unique_ptr<vpx_codec_ctx_t, VpxCodecDeleter> ScopedVpxCodecCtxPtr;
396 396
397 static void OnFrameEncodeCompleted( 397 static void OnFrameEncodeCompleted(
398 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_cb, 398 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_cb,
399 const media::WebmMuxer::VideoParameters& params, 399 const media::WebmMuxer::VideoParameters& params,
400 std::unique_ptr<std::string> data, 400 std::unique_ptr<std::string> data,
401 std::unique_ptr<std::string> alpha_data,
401 base::TimeTicks capture_timestamp, 402 base::TimeTicks capture_timestamp,
402 bool keyframe) { 403 bool keyframe) {
403 DVLOG(1) << (keyframe ? "" : "non ") << "keyframe "<< data->length() << "B, " 404 DVLOG(1) << (keyframe ? "" : "non ") << "keyframe "<< data->length() << "B, "
404 << capture_timestamp << " ms"; 405 << capture_timestamp << " ms";
405 on_encoded_video_cb.Run(params, std::move(data), capture_timestamp, keyframe); 406 on_encoded_video_cb.Run(params, std::move(data), std::move(alpha_data),
407 capture_timestamp, keyframe);
406 } 408 }
407 409
408 static int GetNumberOfThreadsForEncoding() { 410 static int GetNumberOfThreadsForEncoding() {
409 // Do not saturate CPU utilization just for encoding. On a lower-end system 411 // Do not saturate CPU utilization just for encoding. On a lower-end system
410 // with only 1 or 2 cores, use only one thread for encoding. On systems with 412 // with only 1 or 2 cores, use only one thread for encoding. On systems with
411 // more cores, allow half of the cores to be used for encoding. 413 // more cores, allow half of the cores to be used for encoding.
412 return std::min(8, (base::SysInfo::NumberOfProcessors() + 1) / 2); 414 return std::min(8, (base::SysInfo::NumberOfProcessors() + 1) / 2);
413 } 415 }
414 416
415 // Class encapsulating VideoEncodeAccelerator interactions. 417 // Class encapsulating VideoEncodeAccelerator interactions.
(...skipping 24 matching lines...) Expand all
440 using VideoParamsAndTimestamp = 442 using VideoParamsAndTimestamp =
441 std::pair<media::WebmMuxer::VideoParameters, base::TimeTicks>; 443 std::pair<media::WebmMuxer::VideoParameters, base::TimeTicks>;
442 444
443 void UseOutputBitstreamBufferId(int32_t bitstream_buffer_id); 445 void UseOutputBitstreamBufferId(int32_t bitstream_buffer_id);
444 void FrameFinished(std::unique_ptr<base::SharedMemory> shm); 446 void FrameFinished(std::unique_ptr<base::SharedMemory> shm);
445 447
446 // VideoTrackRecorder::Encoder implementation. 448 // VideoTrackRecorder::Encoder implementation.
447 ~VEAEncoder() override; 449 ~VEAEncoder() override;
448 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame, 450 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame,
449 base::TimeTicks capture_timestamp) override; 451 base::TimeTicks capture_timestamp) override;
450 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) override; 452
453 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size);
451 454
452 media::GpuVideoAcceleratorFactories* const gpu_factories_; 455 media::GpuVideoAcceleratorFactories* const gpu_factories_;
453 456
454 const media::VideoCodecProfile codec_; 457 const media::VideoCodecProfile codec_;
455 458
456 // The underlying VEA to perform encoding on. 459 // The underlying VEA to perform encoding on.
457 std::unique_ptr<media::VideoEncodeAccelerator> video_encoder_; 460 std::unique_ptr<media::VideoEncodeAccelerator> video_encoder_;
458 461
459 // Shared memory buffers for output with the VEA. 462 // Shared memory buffers for output with the VEA.
460 std::vector<std::unique_ptr<base::SharedMemory>> output_buffers_; 463 std::vector<std::unique_ptr<base::SharedMemory>> output_buffers_;
(...skipping 26 matching lines...) Expand all
487 VpxEncoder( 490 VpxEncoder(
488 bool use_vp9, 491 bool use_vp9,
489 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, 492 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback,
490 int32_t bits_per_second); 493 int32_t bits_per_second);
491 494
492 private: 495 private:
493 // VideoTrackRecorder::Encoder implementation. 496 // VideoTrackRecorder::Encoder implementation.
494 ~VpxEncoder() override; 497 ~VpxEncoder() override;
495 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame, 498 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame,
496 base::TimeTicks capture_timestamp) override; 499 base::TimeTicks capture_timestamp) override;
497 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) override; 500 bool CanEncodeAlphaChannel() override { return true; }
498 501
499 // Returns true if |codec_config_| has been filled in at least once. 502 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size,
500 bool IsInitialized() const; 503 vpx_codec_enc_cfg_t* codec_config,
504 ScopedVpxCodecCtxPtr* encoder);
505 void DoEncode(vpx_codec_ctx_t* const encoder,
506 const gfx::Size& frame_size,
507 uint8_t* const data,
508 uint8_t* const y_plane,
509 int y_stride,
510 uint8_t* const u_plane,
511 int u_stride,
512 uint8_t* const v_plane,
513 int v_stride,
514 const base::TimeDelta& duration,
515 bool force_keyframe,
516 std::string* const output_data,
517 bool* const keyframe);
518
519 // Returns true if |codec_config| has been filled in at least once.
520 bool IsInitialized(const vpx_codec_enc_cfg_t& codec_config) const;
501 521
502 // Estimate the frame duration from |frame| and |last_frame_timestamp_|. 522 // Estimate the frame duration from |frame| and |last_frame_timestamp_|.
503 base::TimeDelta EstimateFrameDuration(const scoped_refptr<VideoFrame>& frame); 523 base::TimeDelta EstimateFrameDuration(const scoped_refptr<VideoFrame>& frame);
504 524
505 // Force usage of VP9 for encoding, instead of VP8 which is the default. 525 // Force usage of VP9 for encoding, instead of VP8 which is the default.
506 const bool use_vp9_; 526 const bool use_vp9_;
507 527
508 // VPx internal objects: configuration and encoder. |encoder_| is a special 528 // VPx internal objects: configuration and encoder. |encoder_| is a special
509 // scoped pointer to guarantee proper destruction, particularly when 529 // scoped pointer to guarantee proper destruction, particularly when
510 // reconfiguring due to parameters change. Only used on |encoding_thread_|. 530 // reconfiguring due to parameters change. Only used on |encoding_thread_|.
511 vpx_codec_enc_cfg_t codec_config_; 531 vpx_codec_enc_cfg_t codec_config_;
512 ScopedVpxCodecCtxPtr encoder_; 532 ScopedVpxCodecCtxPtr encoder_;
513 533
534 vpx_codec_enc_cfg_t alpha_codec_config_;
535 ScopedVpxCodecCtxPtr alpha_encoder_;
536
537 std::vector<uint8_t> alpha_dummy_planes_;
538 size_t v_plane_offset_;
539 size_t u_plane_stride_;
540 size_t v_plane_stride_;
541 bool last_frame_had_alpha_ = false;
542
514 // The |VideoFrame::timestamp()| of the last encoded frame. This is used to 543 // The |VideoFrame::timestamp()| of the last encoded frame. This is used to
515 // predict the duration of the next frame. Only used on |encoding_thread_|. 544 // predict the duration of the next frame. Only used on |encoding_thread_|.
516 base::TimeDelta last_frame_timestamp_; 545 base::TimeDelta last_frame_timestamp_;
517 546
518 DISALLOW_COPY_AND_ASSIGN(VpxEncoder); 547 DISALLOW_COPY_AND_ASSIGN(VpxEncoder);
519 }; 548 };
520 549
521 #if BUILDFLAG(RTC_USE_H264) 550 #if BUILDFLAG(RTC_USE_H264)
522 551
523 struct ISVCEncoderDeleter { 552 struct ISVCEncoderDeleter {
(...skipping 15 matching lines...) Expand all
539 568
540 H264Encoder( 569 H264Encoder(
541 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, 570 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback,
542 int32_t bits_per_second); 571 int32_t bits_per_second);
543 572
544 private: 573 private:
545 // VideoTrackRecorder::Encoder implementation. 574 // VideoTrackRecorder::Encoder implementation.
546 ~H264Encoder() override; 575 ~H264Encoder() override;
547 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame, 576 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame,
548 base::TimeTicks capture_timestamp) override; 577 base::TimeTicks capture_timestamp) override;
549 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) override; 578
579 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size);
550 580
551 // |openh264_encoder_| is a special scoped pointer to guarantee proper 581 // |openh264_encoder_| is a special scoped pointer to guarantee proper
552 // destruction, also when reconfiguring due to parameters change. Only used on 582 // destruction, also when reconfiguring due to parameters change. Only used on
553 // |encoding_thread_|. 583 // |encoding_thread_|.
554 gfx::Size configured_size_; 584 gfx::Size configured_size_;
555 ScopedISVCEncoderPtr openh264_encoder_; 585 ScopedISVCEncoderPtr openh264_encoder_;
556 586
557 // The |VideoFrame::timestamp()| of the first received frame. Only used on 587 // The |VideoFrame::timestamp()| of the first received frame. Only used on
558 // |encoding_thread_|. 588 // |encoding_thread_|.
559 base::TimeTicks first_frame_timestamp_; 589 base::TimeTicks first_frame_timestamp_;
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
621 base::SharedMemory* output_buffer = 651 base::SharedMemory* output_buffer =
622 output_buffers_[bitstream_buffer_id].get(); 652 output_buffers_[bitstream_buffer_id].get();
623 653
624 std::unique_ptr<std::string> data(new std::string); 654 std::unique_ptr<std::string> data(new std::string);
625 data->append(reinterpret_cast<char*>(output_buffer->memory()), payload_size); 655 data->append(reinterpret_cast<char*>(output_buffer->memory()), payload_size);
626 656
627 const auto front_frame = frames_in_encode_.front(); 657 const auto front_frame = frames_in_encode_.front();
628 frames_in_encode_.pop(); 658 frames_in_encode_.pop();
629 origin_task_runner_->PostTask( 659 origin_task_runner_->PostTask(
630 FROM_HERE, base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_, 660 FROM_HERE, base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_,
631 front_frame.first, base::Passed(&data), 661 front_frame.first, base::Passed(&data), nullptr,
632 front_frame.second, keyframe)); 662 front_frame.second, keyframe));
633 UseOutputBitstreamBufferId(bitstream_buffer_id); 663 UseOutputBitstreamBufferId(bitstream_buffer_id);
634 } 664 }
635 665
636 void VEAEncoder::NotifyError(media::VideoEncodeAccelerator::Error error) { 666 void VEAEncoder::NotifyError(media::VideoEncodeAccelerator::Error error) {
637 DVLOG(3) << __func__; 667 DVLOG(3) << __func__;
638 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 668 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
639 669
640 // TODO(emircan): Notify the owner via a callback. 670 // TODO(emircan): Notify the owner via a callback.
641 error_notified_ = true; 671 error_notified_ = true;
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
790 base::Passed(&encoder_))); 820 base::Passed(&encoder_)));
791 } 821 }
792 822
793 void VpxEncoder::EncodeOnEncodingTaskRunner( 823 void VpxEncoder::EncodeOnEncodingTaskRunner(
794 scoped_refptr<VideoFrame> frame, 824 scoped_refptr<VideoFrame> frame,
795 base::TimeTicks capture_timestamp) { 825 base::TimeTicks capture_timestamp) {
796 TRACE_EVENT0("video", "VpxEncoder::EncodeOnEncodingTaskRunner"); 826 TRACE_EVENT0("video", "VpxEncoder::EncodeOnEncodingTaskRunner");
797 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 827 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
798 828
799 const gfx::Size frame_size = frame->visible_rect().size(); 829 const gfx::Size frame_size = frame->visible_rect().size();
800 if (!IsInitialized() || 830 const base::TimeDelta duration = EstimateFrameDuration(frame);
831 const media::WebmMuxer::VideoParameters video_params(frame);
832
833 if (!IsInitialized(codec_config_) ||
801 gfx::Size(codec_config_.g_w, codec_config_.g_h) != frame_size) { 834 gfx::Size(codec_config_.g_w, codec_config_.g_h) != frame_size) {
802 ConfigureEncoderOnEncodingTaskRunner(frame_size); 835 ConfigureEncoderOnEncodingTaskRunner(frame_size, &codec_config_, &encoder_);
803 } 836 }
804 837
838 const bool frame_has_alpha = frame->format() == media::PIXEL_FORMAT_YV12A;
839 if (frame_has_alpha && (!IsInitialized(alpha_codec_config_) ||
840 gfx::Size(alpha_codec_config_.g_w,
841 alpha_codec_config_.g_h) != frame_size)) {
842 ConfigureEncoderOnEncodingTaskRunner(frame_size, &alpha_codec_config_,
843 &alpha_encoder_);
844 u_plane_stride_ = media::VideoFrame::RowBytes(
845 VideoFrame::kUPlane, frame->format(), frame_size.width());
846 v_plane_stride_ = media::VideoFrame::RowBytes(
847 VideoFrame::kVPlane, frame->format(), frame_size.width());
848 v_plane_offset_ = media::VideoFrame::PlaneSize(
849 frame->format(), VideoFrame::kUPlane, frame_size)
850 .GetArea();
851 alpha_dummy_planes_.resize(
852 v_plane_offset_ + media::VideoFrame::PlaneSize(
853 frame->format(), VideoFrame::kVPlane, frame_size)
854 .GetArea());
855 // It is more expensive to encode 0x00, so use 0x80 instead.
856 std::fill(alpha_dummy_planes_.begin(), alpha_dummy_planes_.end(), 0x80);
857 }
858 // If we introduced a new alpha frame, force keyframe.
859 const bool force_keyframe = frame_has_alpha && !last_frame_had_alpha_;
860 last_frame_had_alpha_ = frame_has_alpha;
861
862 std::unique_ptr<std::string> data(new std::string);
863 bool keyframe = false;
864 DoEncode(encoder_.get(), frame_size, frame->data(VideoFrame::kYPlane),
865 frame->visible_data(VideoFrame::kYPlane),
866 frame->stride(VideoFrame::kYPlane),
867 frame->visible_data(VideoFrame::kUPlane),
868 frame->stride(VideoFrame::kUPlane),
869 frame->visible_data(VideoFrame::kVPlane),
870 frame->stride(VideoFrame::kVPlane), duration, force_keyframe,
871 data.get(), &keyframe);
872
873 std::unique_ptr<std::string> alpha_data(new std::string);
874 if (frame_has_alpha) {
875 bool alpha_keyframe = false;
876 DoEncode(alpha_encoder_.get(), frame_size, frame->data(VideoFrame::kAPlane),
877 frame->visible_data(VideoFrame::kAPlane),
878 frame->stride(VideoFrame::kAPlane), alpha_dummy_planes_.data(),
879 u_plane_stride_, alpha_dummy_planes_.data() + v_plane_offset_,
880 v_plane_stride_, duration, keyframe, alpha_data.get(),
881 &alpha_keyframe);
882 DCHECK_EQ(keyframe, alpha_keyframe);
883 }
884 frame = nullptr;
885
886 origin_task_runner_->PostTask(
887 FROM_HERE,
888 base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_,
889 video_params, base::Passed(&data), base::Passed(&alpha_data),
890 capture_timestamp, keyframe));
891 }
892
893 void VpxEncoder::DoEncode(vpx_codec_ctx_t* const encoder,
894 const gfx::Size& frame_size,
895 uint8_t* const data,
896 uint8_t* const y_plane,
897 int y_stride,
898 uint8_t* const u_plane,
899 int u_stride,
900 uint8_t* const v_plane,
901 int v_stride,
902 const base::TimeDelta& duration,
903 bool force_keyframe,
904 std::string* const output_data,
905 bool* const keyframe) {
906 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
907
805 vpx_image_t vpx_image; 908 vpx_image_t vpx_image;
806 vpx_image_t* const result = vpx_img_wrap(&vpx_image, 909 vpx_image_t* const result =
807 VPX_IMG_FMT_I420, 910 vpx_img_wrap(&vpx_image, VPX_IMG_FMT_I420, frame_size.width(),
808 frame_size.width(), 911 frame_size.height(), 1 /* align */, data);
809 frame_size.height(),
810 1 /* align */,
811 frame->data(VideoFrame::kYPlane));
812 DCHECK_EQ(result, &vpx_image); 912 DCHECK_EQ(result, &vpx_image);
813 vpx_image.planes[VPX_PLANE_Y] = frame->visible_data(VideoFrame::kYPlane); 913 vpx_image.planes[VPX_PLANE_Y] = y_plane;
814 vpx_image.planes[VPX_PLANE_U] = frame->visible_data(VideoFrame::kUPlane); 914 vpx_image.planes[VPX_PLANE_U] = u_plane;
815 vpx_image.planes[VPX_PLANE_V] = frame->visible_data(VideoFrame::kVPlane); 915 vpx_image.planes[VPX_PLANE_V] = v_plane;
816 vpx_image.stride[VPX_PLANE_Y] = frame->stride(VideoFrame::kYPlane); 916 vpx_image.stride[VPX_PLANE_Y] = y_stride;
817 vpx_image.stride[VPX_PLANE_U] = frame->stride(VideoFrame::kUPlane); 917 vpx_image.stride[VPX_PLANE_U] = u_stride;
818 vpx_image.stride[VPX_PLANE_V] = frame->stride(VideoFrame::kVPlane); 918 vpx_image.stride[VPX_PLANE_V] = v_stride;
819 919
820 const base::TimeDelta duration = EstimateFrameDuration(frame); 920 const vpx_codec_flags_t flags = force_keyframe ? VPX_EFLAG_FORCE_KF : 0;
821 // Encode the frame. The presentation time stamp argument here is fixed to 921 // Encode the frame. The presentation time stamp argument here is fixed to
822 // zero to force the encoder to base its single-frame bandwidth calculations 922 // zero to force the encoder to base its single-frame bandwidth calculations
823 // entirely on |predicted_frame_duration|. 923 // entirely on |predicted_frame_duration|.
824 const vpx_codec_err_t ret = vpx_codec_encode(encoder_.get(), 924 const vpx_codec_err_t ret =
825 &vpx_image, 925 vpx_codec_encode(encoder, &vpx_image, 0 /* pts */,
826 0 /* pts */, 926 duration.InMicroseconds(), flags, VPX_DL_REALTIME);
827 duration.InMicroseconds(), 927 DCHECK_EQ(ret, VPX_CODEC_OK)
828 0 /* flags */, 928 << vpx_codec_err_to_string(ret) << ", #" << vpx_codec_error(encoder)
829 VPX_DL_REALTIME); 929 << " -" << vpx_codec_error_detail(encoder);
830 DCHECK_EQ(ret, VPX_CODEC_OK) << vpx_codec_err_to_string(ret) << ", #"
831 << vpx_codec_error(encoder_.get()) << " -"
832 << vpx_codec_error_detail(encoder_.get());
833 930
834 const media::WebmMuxer::VideoParameters video_params(frame); 931 *keyframe = false;
835 frame = nullptr;
836
837 std::unique_ptr<std::string> data(new std::string);
838 bool keyframe = false;
839 vpx_codec_iter_t iter = NULL; 932 vpx_codec_iter_t iter = NULL;
840 const vpx_codec_cx_pkt_t* pkt = NULL; 933 const vpx_codec_cx_pkt_t* pkt = NULL;
841 while ((pkt = vpx_codec_get_cx_data(encoder_.get(), &iter)) != NULL) { 934 while ((pkt = vpx_codec_get_cx_data(encoder, &iter)) != NULL) {
842 if (pkt->kind != VPX_CODEC_CX_FRAME_PKT) 935 if (pkt->kind != VPX_CODEC_CX_FRAME_PKT)
843 continue; 936 continue;
844 data->assign(static_cast<char*>(pkt->data.frame.buf), pkt->data.frame.sz); 937 output_data->assign(static_cast<char*>(pkt->data.frame.buf),
845 keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0; 938 pkt->data.frame.sz);
939 *keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0;
846 break; 940 break;
847 } 941 }
848 origin_task_runner_->PostTask(FROM_HERE,
849 base::Bind(OnFrameEncodeCompleted,
850 on_encoded_video_callback_,
851 video_params,
852 base::Passed(&data),
853 capture_timestamp,
854 keyframe));
855 } 942 }
856 943
857 void VpxEncoder::ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) { 944 void VpxEncoder::ConfigureEncoderOnEncodingTaskRunner(
945 const gfx::Size& size,
946 vpx_codec_enc_cfg_t* codec_config,
947 ScopedVpxCodecCtxPtr* encoder) {
858 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 948 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
859 if (IsInitialized()) { 949 if (IsInitialized(*codec_config)) {
860 // TODO(mcasas) VP8 quirk/optimisation: If the new |size| is strictly less- 950 // TODO(mcasas) VP8 quirk/optimisation: If the new |size| is strictly less-
861 // than-or-equal than the old size, in terms of area, the existing encoder 951 // than-or-equal than the old size, in terms of area, the existing encoder
862 // instance could be reused after changing |codec_config_.{g_w,g_h}|. 952 // instance could be reused after changing |codec_config->{g_w,g_h}|.
863 DVLOG(1) << "Destroying/Re-Creating encoder for new frame size: " 953 DVLOG(1) << "Destroying/Re-Creating encoder for new frame size: "
864 << gfx::Size(codec_config_.g_w, codec_config_.g_h).ToString() 954 << gfx::Size(codec_config->g_w, codec_config->g_h).ToString()
865 << " --> " << size.ToString() << (use_vp9_ ? " vp9" : " vp8"); 955 << " --> " << size.ToString() << (use_vp9_ ? " vp9" : " vp8");
866 encoder_.reset(); 956 encoder->reset();
867 } 957 }
868 958
869 const vpx_codec_iface_t* codec_interface = 959 const vpx_codec_iface_t* codec_interface =
870 use_vp9_ ? vpx_codec_vp9_cx() : vpx_codec_vp8_cx(); 960 use_vp9_ ? vpx_codec_vp9_cx() : vpx_codec_vp8_cx();
871 vpx_codec_err_t result = vpx_codec_enc_config_default( 961 vpx_codec_err_t result = vpx_codec_enc_config_default(
872 codec_interface, &codec_config_, 0 /* reserved */); 962 codec_interface, codec_config, 0 /* reserved */);
873 DCHECK_EQ(VPX_CODEC_OK, result); 963 DCHECK_EQ(VPX_CODEC_OK, result);
874 964
875 DCHECK_EQ(320u, codec_config_.g_w); 965 DCHECK_EQ(320u, codec_config->g_w);
876 DCHECK_EQ(240u, codec_config_.g_h); 966 DCHECK_EQ(240u, codec_config->g_h);
877 DCHECK_EQ(256u, codec_config_.rc_target_bitrate); 967 DCHECK_EQ(256u, codec_config->rc_target_bitrate);
878 // Use the selected bitrate or adjust default bit rate to account for the 968 // Use the selected bitrate or adjust default bit rate to account for the
879 // actual size. Note: |rc_target_bitrate| units are kbit per second. 969 // actual size. Note: |rc_target_bitrate| units are kbit per second.
880 if (bits_per_second_ > 0) { 970 if (bits_per_second_ > 0) {
881 codec_config_.rc_target_bitrate = bits_per_second_ / 1000; 971 codec_config->rc_target_bitrate = bits_per_second_ / 1000;
882 } else { 972 } else {
883 codec_config_.rc_target_bitrate = size.GetArea() * 973 codec_config->rc_target_bitrate = size.GetArea() *
884 codec_config_.rc_target_bitrate / 974 codec_config->rc_target_bitrate /
885 codec_config_.g_w / codec_config_.g_h; 975 codec_config->g_w / codec_config->g_h;
886 } 976 }
887 // Both VP8/VP9 configuration should be Variable BitRate by default. 977 // Both VP8/VP9 configuration should be Variable BitRate by default.
888 DCHECK_EQ(VPX_VBR, codec_config_.rc_end_usage); 978 DCHECK_EQ(VPX_VBR, codec_config->rc_end_usage);
889 if (use_vp9_) { 979 if (use_vp9_) {
890 // Number of frames to consume before producing output. 980 // Number of frames to consume before producing output.
891 codec_config_.g_lag_in_frames = 0; 981 codec_config->g_lag_in_frames = 0;
892 982
893 // DCHECK that the profile selected by default is I420 (magic number 0). 983 // DCHECK that the profile selected by default is I420 (magic number 0).
894 DCHECK_EQ(0u, codec_config_.g_profile); 984 DCHECK_EQ(0u, codec_config->g_profile);
895 } else { 985 } else {
896 // VP8 always produces frames instantaneously. 986 // VP8 always produces frames instantaneously.
897 DCHECK_EQ(0u, codec_config_.g_lag_in_frames); 987 DCHECK_EQ(0u, codec_config->g_lag_in_frames);
898 } 988 }
899 989
900 DCHECK(size.width()); 990 DCHECK(size.width());
901 DCHECK(size.height()); 991 DCHECK(size.height());
902 codec_config_.g_w = size.width(); 992 codec_config->g_w = size.width();
903 codec_config_.g_h = size.height(); 993 codec_config->g_h = size.height();
904 codec_config_.g_pass = VPX_RC_ONE_PASS; 994 codec_config->g_pass = VPX_RC_ONE_PASS;
905 995
906 // Timebase is the smallest interval used by the stream, can be set to the 996 // Timebase is the smallest interval used by the stream, can be set to the
907 // frame rate or to e.g. microseconds. 997 // frame rate or to e.g. microseconds.
908 codec_config_.g_timebase.num = 1; 998 codec_config->g_timebase.num = 1;
909 codec_config_.g_timebase.den = base::Time::kMicrosecondsPerSecond; 999 codec_config->g_timebase.den = base::Time::kMicrosecondsPerSecond;
910 1000
911 // Let the encoder decide where to place the Keyframes, between min and max. 1001 // Let the encoder decide where to place the Keyframes, between min and max.
912 // In VPX_KF_AUTO mode libvpx will sometimes emit keyframes regardless of min/ 1002 // In VPX_KF_AUTO mode libvpx will sometimes emit keyframes regardless of min/
913 // max distance out of necessity. 1003 // max distance out of necessity.
914 // Note that due to http://crbug.com/440223, it might be necessary to force a 1004 // Note that due to http://crbug.com/440223, it might be necessary to force a
915 // key frame after 10,000frames since decoding fails after 30,000 non-key 1005 // key frame after 10,000frames since decoding fails after 30,000 non-key
916 // frames. 1006 // frames.
917 // Forcing a keyframe in regular intervals also allows seeking in the 1007 // Forcing a keyframe in regular intervals also allows seeking in the
918 // resulting recording with decent performance. 1008 // resulting recording with decent performance.
919 codec_config_.kf_mode = VPX_KF_AUTO; 1009 codec_config->kf_mode = VPX_KF_AUTO;
920 codec_config_.kf_min_dist = 0; 1010 codec_config->kf_min_dist = 0;
921 codec_config_.kf_max_dist = 100; 1011 codec_config->kf_max_dist = 100;
922 1012
923 codec_config_.g_threads = GetNumberOfThreadsForEncoding(); 1013 codec_config->g_threads = GetNumberOfThreadsForEncoding();
924 1014
925 // Number of frames to consume before producing output. 1015 // Number of frames to consume before producing output.
926 codec_config_.g_lag_in_frames = 0; 1016 codec_config->g_lag_in_frames = 0;
927 1017
928 DCHECK(!encoder_); 1018 encoder->reset(new vpx_codec_ctx_t);
929 encoder_.reset(new vpx_codec_ctx_t);
930 const vpx_codec_err_t ret = vpx_codec_enc_init( 1019 const vpx_codec_err_t ret = vpx_codec_enc_init(
931 encoder_.get(), codec_interface, &codec_config_, 0 /* flags */); 1020 encoder->get(), codec_interface, codec_config, 0 /* flags */);
932 DCHECK_EQ(VPX_CODEC_OK, ret); 1021 DCHECK_EQ(VPX_CODEC_OK, ret);
933 1022
934 if (use_vp9_) { 1023 if (use_vp9_) {
935 // Values of VP8E_SET_CPUUSED greater than 0 will increase encoder speed at 1024 // Values of VP8E_SET_CPUUSED greater than 0 will increase encoder speed at
936 // the expense of quality up to a maximum value of 8 for VP9, by tuning the 1025 // the expense of quality up to a maximum value of 8 for VP9, by tuning the
937 // target time spent encoding the frame. Go from 8 to 5 (values for real 1026 // target time spent encoding the frame. Go from 8 to 5 (values for real
938 // time encoding) depending on the amount of cores available in the system. 1027 // time encoding) depending on the amount of cores available in the system.
939 const int kCpuUsed = 1028 const int kCpuUsed =
940 std::max(5, 8 - base::SysInfo::NumberOfProcessors() / 2); 1029 std::max(5, 8 - base::SysInfo::NumberOfProcessors() / 2);
941 result = vpx_codec_control(encoder_.get(), VP8E_SET_CPUUSED, kCpuUsed); 1030 result = vpx_codec_control(encoder->get(), VP8E_SET_CPUUSED, kCpuUsed);
942 DLOG_IF(WARNING, VPX_CODEC_OK != result) << "VP8E_SET_CPUUSED failed"; 1031 DLOG_IF(WARNING, VPX_CODEC_OK != result) << "VP8E_SET_CPUUSED failed";
943 } 1032 }
944 } 1033 }
945 1034
946 bool VpxEncoder::IsInitialized() const { 1035 bool VpxEncoder::IsInitialized(const vpx_codec_enc_cfg_t& codec_config) const {
947 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 1036 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
948 return codec_config_.g_timebase.den != 0; 1037 return codec_config.g_timebase.den != 0;
949 } 1038 }
950 1039
951 base::TimeDelta VpxEncoder::EstimateFrameDuration( 1040 base::TimeDelta VpxEncoder::EstimateFrameDuration(
952 const scoped_refptr<VideoFrame>& frame) { 1041 const scoped_refptr<VideoFrame>& frame) {
953 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 1042 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
954 1043
955 using base::TimeDelta; 1044 using base::TimeDelta;
956 TimeDelta predicted_frame_duration; 1045 TimeDelta predicted_frame_duration;
957 if (!frame->metadata()->GetTimeDelta(VideoFrameMetadata::FRAME_DURATION, 1046 if (!frame->metadata()->GetTimeDelta(VideoFrameMetadata::FRAME_DURATION,
958 &predicted_frame_duration) || 1047 &predicted_frame_duration) ||
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
1046 1135
1047 layer_len += layerInfo.pNalLengthInByte[nal]; 1136 layer_len += layerInfo.pNalLengthInByte[nal];
1048 } 1137 }
1049 // Copy the entire layer's data (including NAL start codes). 1138 // Copy the entire layer's data (including NAL start codes).
1050 data->append(reinterpret_cast<char*>(layerInfo.pBsBuf), layer_len); 1139 data->append(reinterpret_cast<char*>(layerInfo.pBsBuf), layer_len);
1051 } 1140 }
1052 1141
1053 const bool is_key_frame = info.eFrameType == videoFrameTypeIDR; 1142 const bool is_key_frame = info.eFrameType == videoFrameTypeIDR;
1054 origin_task_runner_->PostTask( 1143 origin_task_runner_->PostTask(
1055 FROM_HERE, base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_, 1144 FROM_HERE, base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_,
1056 video_params, base::Passed(&data), 1145 video_params, base::Passed(&data), nullptr,
1057 capture_timestamp, is_key_frame)); 1146 capture_timestamp, is_key_frame));
1058 } 1147 }
1059 1148
1060 void H264Encoder::ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) { 1149 void H264Encoder::ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) {
1061 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 1150 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
1062 ISVCEncoder* temp_encoder = nullptr; 1151 ISVCEncoder* temp_encoder = nullptr;
1063 if (WelsCreateSVCEncoder(&temp_encoder) != 0) { 1152 if (WelsCreateSVCEncoder(&temp_encoder) != 0) {
1064 NOTREACHED() << "Failed to create OpenH264 encoder"; 1153 NOTREACHED() << "Failed to create OpenH264 encoder";
1065 return; 1154 return;
1066 } 1155 }
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after
1229 if (paused_before_init_) 1318 if (paused_before_init_)
1230 encoder_->SetPaused(paused_before_init_); 1319 encoder_->SetPaused(paused_before_init_);
1231 1320
1232 // StartFrameEncode() will be called on Render IO thread. 1321 // StartFrameEncode() will be called on Render IO thread.
1233 MediaStreamVideoSink::ConnectToTrack( 1322 MediaStreamVideoSink::ConnectToTrack(
1234 track_, 1323 track_,
1235 base::Bind(&VideoTrackRecorder::Encoder::StartFrameEncode, encoder_), 1324 base::Bind(&VideoTrackRecorder::Encoder::StartFrameEncode, encoder_),
1236 false); 1325 false);
1237 } 1326 }
1238 1327
1328 bool VideoTrackRecorder::CanEncodeAlphaChannelForTesting() {
1329 DCHECK(encoder_);
1330 return encoder_->CanEncodeAlphaChannel();
1331 }
1332
1239 } // namespace content 1333 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698