Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(51)

Side by Side Diff: content/renderer/media/video_track_recorder.cc

Issue 2612403002: Release video frames earlier in MediaRecorder (Closed)
Patch Set: mcasas@ nits. Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/video_track_recorder.h" 5 #include "content/renderer/media/video_track_recorder.h"
6 6
7 #include <utility> 7 #include <utility>
8 8
9 #include "base/bind.h" 9 #include "base/bind.h"
10 #include "base/logging.h" 10 #include "base/logging.h"
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
159 159
160 void SetPaused(bool paused); 160 void SetPaused(bool paused);
161 161
162 protected: 162 protected:
163 friend class base::RefCountedThreadSafe<Encoder>; 163 friend class base::RefCountedThreadSafe<Encoder>;
164 virtual ~Encoder() { 164 virtual ~Encoder() {
165 main_task_runner_->DeleteSoon(FROM_HERE, video_renderer_.release()); 165 main_task_runner_->DeleteSoon(FROM_HERE, video_renderer_.release());
166 } 166 }
167 167
168 virtual void EncodeOnEncodingTaskRunner( 168 virtual void EncodeOnEncodingTaskRunner(
169 const scoped_refptr<VideoFrame>& frame, 169 scoped_refptr<VideoFrame> frame,
170 base::TimeTicks capture_timestamp) = 0; 170 base::TimeTicks capture_timestamp) = 0;
171 virtual void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) = 0; 171 virtual void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) = 0;
172 172
173 // Used to shutdown properly on the same thread we were created. 173 // Used to shutdown properly on the same thread we were created.
174 const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_; 174 const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
175 175
176 // Task runner where frames to encode and reply callbacks must happen. 176 // Task runner where frames to encode and reply callbacks must happen.
177 scoped_refptr<base::SingleThreadTaskRunner> origin_task_runner_; 177 scoped_refptr<base::SingleThreadTaskRunner> origin_task_runner_;
178 178
179 // Task runner where encoding interactions happen. 179 // Task runner where encoding interactions happen.
(...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after
331 return; 331 return;
332 vpx_codec_err_t ret = vpx_codec_destroy(codec); 332 vpx_codec_err_t ret = vpx_codec_destroy(codec);
333 CHECK_EQ(ret, VPX_CODEC_OK); 333 CHECK_EQ(ret, VPX_CODEC_OK);
334 delete codec; 334 delete codec;
335 } 335 }
336 }; 336 };
337 typedef std::unique_ptr<vpx_codec_ctx_t, VpxCodecDeleter> ScopedVpxCodecCtxPtr; 337 typedef std::unique_ptr<vpx_codec_ctx_t, VpxCodecDeleter> ScopedVpxCodecCtxPtr;
338 338
339 static void OnFrameEncodeCompleted( 339 static void OnFrameEncodeCompleted(
340 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_cb, 340 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_cb,
341 const scoped_refptr<VideoFrame>& frame, 341 const media::WebmMuxer::VideoParameters& params,
342 std::unique_ptr<std::string> data, 342 std::unique_ptr<std::string> data,
343 base::TimeTicks capture_timestamp, 343 base::TimeTicks capture_timestamp,
344 bool keyframe) { 344 bool keyframe) {
345 DVLOG(1) << (keyframe ? "" : "non ") << "keyframe "<< data->length() << "B, " 345 DVLOG(1) << (keyframe ? "" : "non ") << "keyframe "<< data->length() << "B, "
346 << capture_timestamp << " ms"; 346 << capture_timestamp << " ms";
347 on_encoded_video_cb.Run(frame, std::move(data), capture_timestamp, keyframe); 347 on_encoded_video_cb.Run(params, std::move(data), capture_timestamp, keyframe);
348 } 348 }
349 349
350 static int GetNumberOfThreadsForEncoding() { 350 static int GetNumberOfThreadsForEncoding() {
351 // Do not saturate CPU utilization just for encoding. On a lower-end system 351 // Do not saturate CPU utilization just for encoding. On a lower-end system
352 // with only 1 or 2 cores, use only one thread for encoding. On systems with 352 // with only 1 or 2 cores, use only one thread for encoding. On systems with
353 // more cores, allow half of the cores to be used for encoding. 353 // more cores, allow half of the cores to be used for encoding.
354 return std::min(8, (base::SysInfo::NumberOfProcessors() + 1) / 2); 354 return std::min(8, (base::SysInfo::NumberOfProcessors() + 1) / 2);
355 } 355 }
356 356
357 // Class encapsulating VideoEncodeAccelerator interactions. 357 // Class encapsulating VideoEncodeAccelerator interactions.
(...skipping 13 matching lines...) Expand all
371 const gfx::Size& input_coded_size, 371 const gfx::Size& input_coded_size,
372 size_t output_buffer_size) override; 372 size_t output_buffer_size) override;
373 void BitstreamBufferReady(int32_t bitstream_buffer_id, 373 void BitstreamBufferReady(int32_t bitstream_buffer_id,
374 size_t payload_size, 374 size_t payload_size,
375 bool key_frame, 375 bool key_frame,
376 base::TimeDelta timestamp) override; 376 base::TimeDelta timestamp) override;
377 void NotifyError(media::VideoEncodeAccelerator::Error error) override; 377 void NotifyError(media::VideoEncodeAccelerator::Error error) override;
378 378
379 private: 379 private:
380 using VideoFrameAndTimestamp = 380 using VideoFrameAndTimestamp =
381 std::pair<scoped_refptr<VideoFrame>, base::TimeTicks>; 381 std::pair<scoped_refptr<media::VideoFrame>, base::TimeTicks>;
382 using VideoParamsAndTimestamp =
383 std::pair<media::WebmMuxer::VideoParameters, base::TimeTicks>;
382 384
383 void UseOutputBitstreamBufferId(int32_t bitstream_buffer_id); 385 void UseOutputBitstreamBufferId(int32_t bitstream_buffer_id);
384 void FrameFinished(std::unique_ptr<base::SharedMemory> shm); 386 void FrameFinished(std::unique_ptr<base::SharedMemory> shm);
385 387
386 // VideoTrackRecorder::Encoder implementation. 388 // VideoTrackRecorder::Encoder implementation.
387 ~VEAEncoder() override; 389 ~VEAEncoder() override;
388 void EncodeOnEncodingTaskRunner(const scoped_refptr<VideoFrame>& frame, 390 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame,
389 base::TimeTicks capture_timestamp) override; 391 base::TimeTicks capture_timestamp) override;
390 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) override; 392 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) override;
391 393
392 media::GpuVideoAcceleratorFactories* const gpu_factories_; 394 media::GpuVideoAcceleratorFactories* const gpu_factories_;
393 395
394 const media::VideoCodecProfile codec_; 396 const media::VideoCodecProfile codec_;
395 397
396 // The underlying VEA to perform encoding on. 398 // The underlying VEA to perform encoding on.
397 std::unique_ptr<media::VideoEncodeAccelerator> video_encoder_; 399 std::unique_ptr<media::VideoEncodeAccelerator> video_encoder_;
398 400
399 // Shared memory buffers for output with the VEA. 401 // Shared memory buffers for output with the VEA.
400 std::vector<std::unique_ptr<base::SharedMemory>> output_buffers_; 402 std::vector<std::unique_ptr<base::SharedMemory>> output_buffers_;
401 403
402 // Shared memory buffers for output with the VEA as FIFO. 404 // Shared memory buffers for output with the VEA as FIFO.
403 std::queue<std::unique_ptr<base::SharedMemory>> input_buffers_; 405 std::queue<std::unique_ptr<base::SharedMemory>> input_buffers_;
404 406
405 // Tracks error status. 407 // Tracks error status.
406 bool error_notified_; 408 bool error_notified_;
407 409
408 // Tracks the last frame that we delay the encode. 410 // Tracks the last frame that we delay the encode.
409 std::unique_ptr<VideoFrameAndTimestamp> last_frame_; 411 std::unique_ptr<VideoFrameAndTimestamp> last_frame_;
410 412
411 // Size used to initialize encoder. 413 // Size used to initialize encoder.
412 gfx::Size input_size_; 414 gfx::Size input_size_;
413 415
414 // Coded size that encoder requests as input. 416 // Coded size that encoder requests as input.
415 gfx::Size vea_requested_input_size_; 417 gfx::Size vea_requested_input_size_;
416 418
417 // Frames and corresponding timestamps in encode as FIFO. 419 // Frames and corresponding timestamps in encode as FIFO.
418 std::queue<VideoFrameAndTimestamp> frames_in_encode_; 420 std::queue<VideoParamsAndTimestamp> frames_in_encode_;
419 }; 421 };
420 422
421 // Class encapsulating all libvpx interactions for VP8/VP9 encoding. 423 // Class encapsulating all libvpx interactions for VP8/VP9 encoding.
422 class VpxEncoder final : public VideoTrackRecorder::Encoder { 424 class VpxEncoder final : public VideoTrackRecorder::Encoder {
423 public: 425 public:
424 static void ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread, 426 static void ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread,
425 ScopedVpxCodecCtxPtr encoder); 427 ScopedVpxCodecCtxPtr encoder);
426 428
427 VpxEncoder( 429 VpxEncoder(
428 bool use_vp9, 430 bool use_vp9,
429 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, 431 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback,
430 int32_t bits_per_second); 432 int32_t bits_per_second);
431 433
432 private: 434 private:
433 // VideoTrackRecorder::Encoder implementation. 435 // VideoTrackRecorder::Encoder implementation.
434 ~VpxEncoder() override; 436 ~VpxEncoder() override;
435 void EncodeOnEncodingTaskRunner(const scoped_refptr<VideoFrame>& frame, 437 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame,
436 base::TimeTicks capture_timestamp) override; 438 base::TimeTicks capture_timestamp) override;
437 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) override; 439 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) override;
438 440
439 // Returns true if |codec_config_| has been filled in at least once. 441 // Returns true if |codec_config_| has been filled in at least once.
440 bool IsInitialized() const; 442 bool IsInitialized() const;
441 443
442 // Estimate the frame duration from |frame| and |last_frame_timestamp_|. 444 // Estimate the frame duration from |frame| and |last_frame_timestamp_|.
443 base::TimeDelta EstimateFrameDuration(const scoped_refptr<VideoFrame>& frame); 445 base::TimeDelta EstimateFrameDuration(const scoped_refptr<VideoFrame>& frame);
444 446
445 // Force usage of VP9 for encoding, instead of VP8 which is the default. 447 // Force usage of VP9 for encoding, instead of VP8 which is the default.
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
477 static void ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread, 479 static void ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread,
478 ScopedISVCEncoderPtr encoder); 480 ScopedISVCEncoderPtr encoder);
479 481
480 H264Encoder( 482 H264Encoder(
481 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback, 483 const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback,
482 int32_t bits_per_second); 484 int32_t bits_per_second);
483 485
484 private: 486 private:
485 // VideoTrackRecorder::Encoder implementation. 487 // VideoTrackRecorder::Encoder implementation.
486 ~H264Encoder() override; 488 ~H264Encoder() override;
487 void EncodeOnEncodingTaskRunner(const scoped_refptr<VideoFrame>& frame, 489 void EncodeOnEncodingTaskRunner(scoped_refptr<VideoFrame> frame,
488 base::TimeTicks capture_timestamp) override; 490 base::TimeTicks capture_timestamp) override;
489 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) override; 491 void ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) override;
490 492
491 // |openh264_encoder_| is a special scoped pointer to guarantee proper 493 // |openh264_encoder_| is a special scoped pointer to guarantee proper
492 // destruction, also when reconfiguring due to parameters change. Only used on 494 // destruction, also when reconfiguring due to parameters change. Only used on
493 // |encoding_thread_|. 495 // |encoding_thread_|.
494 gfx::Size configured_size_; 496 gfx::Size configured_size_;
495 ScopedISVCEncoderPtr openh264_encoder_; 497 ScopedISVCEncoderPtr openh264_encoder_;
496 498
497 // The |VideoFrame::timestamp()| of the first received frame. Only used on 499 // The |VideoFrame::timestamp()| of the first received frame. Only used on
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
589 output_buffers_[bitstream_buffer_id]->mapped_size())); 591 output_buffers_[bitstream_buffer_id]->mapped_size()));
590 } 592 }
591 593
592 void VEAEncoder::FrameFinished(std::unique_ptr<base::SharedMemory> shm) { 594 void VEAEncoder::FrameFinished(std::unique_ptr<base::SharedMemory> shm) {
593 DVLOG(3) << __func__; 595 DVLOG(3) << __func__;
594 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 596 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
595 input_buffers_.push(std::move(shm)); 597 input_buffers_.push(std::move(shm));
596 } 598 }
597 599
598 void VEAEncoder::EncodeOnEncodingTaskRunner( 600 void VEAEncoder::EncodeOnEncodingTaskRunner(
599 const scoped_refptr<VideoFrame>& frame, 601 scoped_refptr<VideoFrame> frame,
600 base::TimeTicks capture_timestamp) { 602 base::TimeTicks capture_timestamp) {
601 DVLOG(3) << __func__; 603 DVLOG(3) << __func__;
602 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 604 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
603 605
604 if (input_size_ != frame->visible_rect().size() && video_encoder_) { 606 if (input_size_ != frame->visible_rect().size() && video_encoder_) {
605 video_encoder_->Destroy(); 607 video_encoder_->Destroy();
606 video_encoder_.reset(); 608 video_encoder_.reset();
607 } 609 }
608 610
609 if (!video_encoder_) 611 if (!video_encoder_)
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
673 frame->visible_data(media::VideoFrame::kVPlane), 675 frame->visible_data(media::VideoFrame::kVPlane),
674 frame->stride(media::VideoFrame::kVPlane), 676 frame->stride(media::VideoFrame::kVPlane),
675 video_frame->visible_data(media::VideoFrame::kYPlane), 677 video_frame->visible_data(media::VideoFrame::kYPlane),
676 video_frame->stride(media::VideoFrame::kYPlane), 678 video_frame->stride(media::VideoFrame::kYPlane),
677 video_frame->visible_data(media::VideoFrame::kUPlane), 679 video_frame->visible_data(media::VideoFrame::kUPlane),
678 video_frame->stride(media::VideoFrame::kUPlane), 680 video_frame->stride(media::VideoFrame::kUPlane),
679 video_frame->visible_data(media::VideoFrame::kVPlane), 681 video_frame->visible_data(media::VideoFrame::kVPlane),
680 video_frame->stride(media::VideoFrame::kVPlane), 682 video_frame->stride(media::VideoFrame::kVPlane),
681 input_size_.width(), input_size_.height()); 683 input_size_.width(), input_size_.height());
682 } 684 }
683 frames_in_encode_.push(std::make_pair(video_frame, capture_timestamp)); 685 frames_in_encode_.push(std::make_pair(
686 media::WebmMuxer::VideoParameters(frame), capture_timestamp));
684 687
685 encoding_task_runner_->PostTask( 688 encoding_task_runner_->PostTask(
686 FROM_HERE, 689 FROM_HERE,
687 base::Bind(&media::VideoEncodeAccelerator::Encode, 690 base::Bind(&media::VideoEncodeAccelerator::Encode,
688 base::Unretained(video_encoder_.get()), video_frame, false)); 691 base::Unretained(video_encoder_.get()), video_frame, false));
689 } 692 }
690 693
691 void VEAEncoder::ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) { 694 void VEAEncoder::ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) {
692 DVLOG(3) << __func__; 695 DVLOG(3) << __func__;
693 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 696 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
(...skipping 27 matching lines...) Expand all
721 } 724 }
722 725
723 VpxEncoder::~VpxEncoder() { 726 VpxEncoder::~VpxEncoder() {
724 main_task_runner_->PostTask(FROM_HERE, 727 main_task_runner_->PostTask(FROM_HERE,
725 base::Bind(&VpxEncoder::ShutdownEncoder, 728 base::Bind(&VpxEncoder::ShutdownEncoder,
726 base::Passed(&encoding_thread_), 729 base::Passed(&encoding_thread_),
727 base::Passed(&encoder_))); 730 base::Passed(&encoder_)));
728 } 731 }
729 732
730 void VpxEncoder::EncodeOnEncodingTaskRunner( 733 void VpxEncoder::EncodeOnEncodingTaskRunner(
731 const scoped_refptr<VideoFrame>& frame, 734 scoped_refptr<VideoFrame> frame,
732 base::TimeTicks capture_timestamp) { 735 base::TimeTicks capture_timestamp) {
733 TRACE_EVENT0("video", "VpxEncoder::EncodeOnEncodingTaskRunner"); 736 TRACE_EVENT0("video", "VpxEncoder::EncodeOnEncodingTaskRunner");
734 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 737 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
735 738
736 const gfx::Size frame_size = frame->visible_rect().size(); 739 const gfx::Size frame_size = frame->visible_rect().size();
737 if (!IsInitialized() || 740 if (!IsInitialized() ||
738 gfx::Size(codec_config_.g_w, codec_config_.g_h) != frame_size) { 741 gfx::Size(codec_config_.g_w, codec_config_.g_h) != frame_size) {
739 ConfigureEncoderOnEncodingTaskRunner(frame_size); 742 ConfigureEncoderOnEncodingTaskRunner(frame_size);
740 } 743 }
741 744
(...skipping 19 matching lines...) Expand all
761 const vpx_codec_err_t ret = vpx_codec_encode(encoder_.get(), 764 const vpx_codec_err_t ret = vpx_codec_encode(encoder_.get(),
762 &vpx_image, 765 &vpx_image,
763 0 /* pts */, 766 0 /* pts */,
764 duration.InMicroseconds(), 767 duration.InMicroseconds(),
765 0 /* flags */, 768 0 /* flags */,
766 VPX_DL_REALTIME); 769 VPX_DL_REALTIME);
767 DCHECK_EQ(ret, VPX_CODEC_OK) << vpx_codec_err_to_string(ret) << ", #" 770 DCHECK_EQ(ret, VPX_CODEC_OK) << vpx_codec_err_to_string(ret) << ", #"
768 << vpx_codec_error(encoder_.get()) << " -" 771 << vpx_codec_error(encoder_.get()) << " -"
769 << vpx_codec_error_detail(encoder_.get()); 772 << vpx_codec_error_detail(encoder_.get());
770 773
774 const media::WebmMuxer::VideoParameters video_params(frame);
775 frame = nullptr;
776
771 std::unique_ptr<std::string> data(new std::string); 777 std::unique_ptr<std::string> data(new std::string);
772 bool keyframe = false; 778 bool keyframe = false;
773 vpx_codec_iter_t iter = NULL; 779 vpx_codec_iter_t iter = NULL;
774 const vpx_codec_cx_pkt_t* pkt = NULL; 780 const vpx_codec_cx_pkt_t* pkt = NULL;
775 while ((pkt = vpx_codec_get_cx_data(encoder_.get(), &iter)) != NULL) { 781 while ((pkt = vpx_codec_get_cx_data(encoder_.get(), &iter)) != NULL) {
776 if (pkt->kind != VPX_CODEC_CX_FRAME_PKT) 782 if (pkt->kind != VPX_CODEC_CX_FRAME_PKT)
777 continue; 783 continue;
778 data->assign(static_cast<char*>(pkt->data.frame.buf), pkt->data.frame.sz); 784 data->assign(static_cast<char*>(pkt->data.frame.buf), pkt->data.frame.sz);
779 keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0; 785 keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0;
780 break; 786 break;
781 } 787 }
782 origin_task_runner_->PostTask(FROM_HERE, 788 origin_task_runner_->PostTask(FROM_HERE,
783 base::Bind(OnFrameEncodeCompleted, 789 base::Bind(OnFrameEncodeCompleted,
784 on_encoded_video_callback_, 790 on_encoded_video_callback_,
785 frame, 791 video_params,
786 base::Passed(&data), 792 base::Passed(&data),
787 capture_timestamp, 793 capture_timestamp,
788 keyframe)); 794 keyframe));
789 } 795 }
790 796
791 void VpxEncoder::ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) { 797 void VpxEncoder::ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) {
792 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 798 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
793 if (IsInitialized()) { 799 if (IsInitialized()) {
794 // TODO(mcasas) VP8 quirk/optimisation: If the new |size| is strictly less- 800 // TODO(mcasas) VP8 quirk/optimisation: If the new |size| is strictly less-
795 // than-or-equal than the old size, in terms of area, the existing encoder 801 // than-or-equal than the old size, in terms of area, the existing encoder
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
925 } 931 }
926 932
927 H264Encoder::~H264Encoder() { 933 H264Encoder::~H264Encoder() {
928 main_task_runner_->PostTask(FROM_HERE, 934 main_task_runner_->PostTask(FROM_HERE,
929 base::Bind(&H264Encoder::ShutdownEncoder, 935 base::Bind(&H264Encoder::ShutdownEncoder,
930 base::Passed(&encoding_thread_), 936 base::Passed(&encoding_thread_),
931 base::Passed(&openh264_encoder_))); 937 base::Passed(&openh264_encoder_)));
932 } 938 }
933 939
934 void H264Encoder::EncodeOnEncodingTaskRunner( 940 void H264Encoder::EncodeOnEncodingTaskRunner(
935 const scoped_refptr<VideoFrame>& frame, 941 scoped_refptr<VideoFrame> frame,
936 base::TimeTicks capture_timestamp) { 942 base::TimeTicks capture_timestamp) {
937 TRACE_EVENT0("video", "H264Encoder::EncodeOnEncodingTaskRunner"); 943 TRACE_EVENT0("video", "H264Encoder::EncodeOnEncodingTaskRunner");
938 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 944 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
939 945
940 const gfx::Size frame_size = frame->visible_rect().size(); 946 const gfx::Size frame_size = frame->visible_rect().size();
941 if (!openh264_encoder_ || configured_size_ != frame_size) { 947 if (!openh264_encoder_ || configured_size_ != frame_size) {
942 ConfigureEncoderOnEncodingTaskRunner(frame_size); 948 ConfigureEncoderOnEncodingTaskRunner(frame_size);
943 first_frame_timestamp_ = capture_timestamp; 949 first_frame_timestamp_ = capture_timestamp;
944 } 950 }
945 951
946 SSourcePicture picture = {}; 952 SSourcePicture picture = {};
947 picture.iPicWidth = frame_size.width(); 953 picture.iPicWidth = frame_size.width();
948 picture.iPicHeight = frame_size.height(); 954 picture.iPicHeight = frame_size.height();
949 picture.iColorFormat = EVideoFormatType::videoFormatI420; 955 picture.iColorFormat = EVideoFormatType::videoFormatI420;
950 picture.uiTimeStamp = 956 picture.uiTimeStamp =
951 (capture_timestamp - first_frame_timestamp_).InMilliseconds(); 957 (capture_timestamp - first_frame_timestamp_).InMilliseconds();
952 picture.iStride[0] = frame->stride(VideoFrame::kYPlane); 958 picture.iStride[0] = frame->stride(VideoFrame::kYPlane);
953 picture.iStride[1] = frame->stride(VideoFrame::kUPlane); 959 picture.iStride[1] = frame->stride(VideoFrame::kUPlane);
954 picture.iStride[2] = frame->stride(VideoFrame::kVPlane); 960 picture.iStride[2] = frame->stride(VideoFrame::kVPlane);
955 picture.pData[0] = frame->visible_data(VideoFrame::kYPlane); 961 picture.pData[0] = frame->visible_data(VideoFrame::kYPlane);
956 picture.pData[1] = frame->visible_data(VideoFrame::kUPlane); 962 picture.pData[1] = frame->visible_data(VideoFrame::kUPlane);
957 picture.pData[2] = frame->visible_data(VideoFrame::kVPlane); 963 picture.pData[2] = frame->visible_data(VideoFrame::kVPlane);
958 964
959 SFrameBSInfo info = {}; 965 SFrameBSInfo info = {};
960 if (openh264_encoder_->EncodeFrame(&picture, &info) != cmResultSuccess) { 966 if (openh264_encoder_->EncodeFrame(&picture, &info) != cmResultSuccess) {
961 NOTREACHED() << "OpenH264 encoding failed"; 967 NOTREACHED() << "OpenH264 encoding failed";
962 return; 968 return;
963 } 969 }
970 const media::WebmMuxer::VideoParameters video_params(frame);
971 frame = nullptr;
964 972
965 std::unique_ptr<std::string> data(new std::string); 973 std::unique_ptr<std::string> data(new std::string);
966 const uint8_t kNALStartCode[4] = {0, 0, 0, 1}; 974 const uint8_t kNALStartCode[4] = {0, 0, 0, 1};
967 for (int layer = 0; layer < info.iLayerNum; ++layer) { 975 for (int layer = 0; layer < info.iLayerNum; ++layer) {
968 const SLayerBSInfo& layerInfo = info.sLayerInfo[layer]; 976 const SLayerBSInfo& layerInfo = info.sLayerInfo[layer];
969 // Iterate NAL units making up this layer, noting fragments. 977 // Iterate NAL units making up this layer, noting fragments.
970 size_t layer_len = 0; 978 size_t layer_len = 0;
971 for (int nal = 0; nal < layerInfo.iNalCount; ++nal) { 979 for (int nal = 0; nal < layerInfo.iNalCount; ++nal) {
972 // The following DCHECKs make sure that the header of each NAL unit is OK. 980 // The following DCHECKs make sure that the header of each NAL unit is OK.
973 DCHECK_GE(layerInfo.pNalLengthInByte[nal], 4); 981 DCHECK_GE(layerInfo.pNalLengthInByte[nal], 4);
974 DCHECK_EQ(kNALStartCode[0], layerInfo.pBsBuf[layer_len+0]); 982 DCHECK_EQ(kNALStartCode[0], layerInfo.pBsBuf[layer_len+0]);
975 DCHECK_EQ(kNALStartCode[1], layerInfo.pBsBuf[layer_len+1]); 983 DCHECK_EQ(kNALStartCode[1], layerInfo.pBsBuf[layer_len+1]);
976 DCHECK_EQ(kNALStartCode[2], layerInfo.pBsBuf[layer_len+2]); 984 DCHECK_EQ(kNALStartCode[2], layerInfo.pBsBuf[layer_len+2]);
977 DCHECK_EQ(kNALStartCode[3], layerInfo.pBsBuf[layer_len+3]); 985 DCHECK_EQ(kNALStartCode[3], layerInfo.pBsBuf[layer_len+3]);
978 986
979 layer_len += layerInfo.pNalLengthInByte[nal]; 987 layer_len += layerInfo.pNalLengthInByte[nal];
980 } 988 }
981 // Copy the entire layer's data (including NAL start codes). 989 // Copy the entire layer's data (including NAL start codes).
982 data->append(reinterpret_cast<char*>(layerInfo.pBsBuf), layer_len); 990 data->append(reinterpret_cast<char*>(layerInfo.pBsBuf), layer_len);
983 } 991 }
984 992
985 const bool is_key_frame = info.eFrameType == videoFrameTypeIDR; 993 const bool is_key_frame = info.eFrameType == videoFrameTypeIDR;
986 origin_task_runner_->PostTask( 994 origin_task_runner_->PostTask(
987 FROM_HERE, 995 FROM_HERE, base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_,
988 base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_, frame, 996 video_params, base::Passed(&data),
989 base::Passed(&data), capture_timestamp, is_key_frame)); 997 capture_timestamp, is_key_frame));
990 } 998 }
991 999
992 void H264Encoder::ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) { 1000 void H264Encoder::ConfigureEncoderOnEncodingTaskRunner(const gfx::Size& size) {
993 DCHECK(encoding_task_runner_->BelongsToCurrentThread()); 1001 DCHECK(encoding_task_runner_->BelongsToCurrentThread());
994 ISVCEncoder* temp_encoder = nullptr; 1002 ISVCEncoder* temp_encoder = nullptr;
995 if (WelsCreateSVCEncoder(&temp_encoder) != 0) { 1003 if (WelsCreateSVCEncoder(&temp_encoder) != 0) {
996 NOTREACHED() << "Failed to create OpenH264 encoder"; 1004 NOTREACHED() << "Failed to create OpenH264 encoder";
997 return; 1005 return;
998 } 1006 }
999 openh264_encoder_.reset(temp_encoder); 1007 openh264_encoder_.reset(temp_encoder);
(...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after
1156 encoder_->SetPaused(paused_before_init_); 1164 encoder_->SetPaused(paused_before_init_);
1157 1165
1158 // StartFrameEncode() will be called on Render IO thread. 1166 // StartFrameEncode() will be called on Render IO thread.
1159 MediaStreamVideoSink::ConnectToTrack( 1167 MediaStreamVideoSink::ConnectToTrack(
1160 track_, 1168 track_,
1161 base::Bind(&VideoTrackRecorder::Encoder::StartFrameEncode, encoder_), 1169 base::Bind(&VideoTrackRecorder::Encoder::StartFrameEncode, encoder_),
1162 false); 1170 false);
1163 } 1171 }
1164 1172
1165 } // namespace content 1173 } // namespace content
OLDNEW
« no previous file with comments | « content/renderer/media/video_track_recorder.h ('k') | content/renderer/media/video_track_recorder_unittest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698