| Index: content/renderer/media/video_track_recorder.cc
|
| diff --git a/content/renderer/media/video_track_recorder.cc b/content/renderer/media/video_track_recorder.cc
|
| index 2dc0b667c7251257fe3eb7c8a6b618483bf9576e..86ef0265270b1d7bdba26a4cff2e78eb36910479 100644
|
| --- a/content/renderer/media/video_track_recorder.cc
|
| +++ b/content/renderer/media/video_track_recorder.cc
|
| @@ -25,6 +25,10 @@ extern "C" {
|
| #include "third_party/libvpx/source/libvpx/vpx/vpx_encoder.h"
|
| }
|
|
|
| +#include "third_party/openh264/src/codec/api/svc/codec_api.h"
|
| +#include "third_party/openh264/src/codec/api/svc/codec_app_def.h"
|
| +#include "third_party/openh264/src/codec/api/svc/codec_def.h"
|
| +
|
| using media::VideoFrame;
|
| using media::VideoFrameMetadata;
|
|
|
| @@ -61,6 +65,20 @@ void OnFrameEncodeCompleted(
|
|
|
| } // anonymous namespace
|
|
|
| +class VideoTrackRecorder::Encoder : public base::RefCountedThreadSafe<Encoder> {
|
| + public:
|
| + virtual void StartFrameEncode(const scoped_refptr<VideoFrame>& frame,
|
| + base::TimeTicks capture_timestamp) = 0;
|
| +
|
| + virtual void set_paused(bool paused) = 0;
|
| +
|
| + protected:
|
| + friend class base::RefCountedThreadSafe<Encoder>;
|
| + virtual ~Encoder() {}
|
| +};
|
| +
|
| +namespace {
|
| +
|
| // Inner class encapsulating all libvpx interactions and the encoding+delivery
|
| // of received frames. Limitation: Only VP8 is supported for the time being.
|
| // This class must be ref-counted because the MediaStreamVideoTrack will hold a
|
| @@ -73,24 +91,23 @@ void OnFrameEncodeCompleted(
|
| // thread, but this is not enforced;
|
| // - uses an internal |encoding_thread_| for libvpx interactions, notably for
|
| // encoding (which might take some time).
|
| -class VideoTrackRecorder::VpxEncoder final
|
| - : public base::RefCountedThreadSafe<VpxEncoder> {
|
| +class VpxEncoder final : public VideoTrackRecorder::Encoder {
|
| public:
|
| static void ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread,
|
| ScopedVpxCodecCtxPtr encoder);
|
|
|
| - VpxEncoder(bool use_vp9,
|
| - const OnEncodedVideoCB& on_encoded_video_callback,
|
| - int32_t bits_per_second);
|
| + VpxEncoder(
|
| + bool use_vp9,
|
| + const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback,
|
| + int32_t bits_per_second);
|
|
|
| void StartFrameEncode(const scoped_refptr<VideoFrame>& frame,
|
| - base::TimeTicks capture_timestamp);
|
| + base::TimeTicks capture_timestamp) override;
|
|
|
| - void set_paused(bool paused) { paused_ = paused; }
|
| + void set_paused(bool paused) override { paused_ = paused; }
|
|
|
| private:
|
| - friend class base::RefCountedThreadSafe<VpxEncoder>;
|
| - ~VpxEncoder();
|
| + ~VpxEncoder() override;
|
|
|
| void EncodeOnEncodingThread(const scoped_refptr<VideoFrame>& frame,
|
| base::TimeTicks capture_timestamp);
|
| @@ -111,7 +128,7 @@ class VideoTrackRecorder::VpxEncoder final
|
| const bool use_vp9_;
|
|
|
| // This callback should be exercised on IO thread.
|
| - const OnEncodedVideoCB on_encoded_video_callback_;
|
| + const VideoTrackRecorder::OnEncodedVideoCB on_encoded_video_callback_;
|
|
|
| // Target bitrate or video encoding. If 0, a standard bitrate is used.
|
| const int32_t bits_per_second_;
|
| @@ -131,6 +148,7 @@ class VideoTrackRecorder::VpxEncoder final
|
| // Again, it should only be accessed on |encoding_thread_|.
|
| ScopedVpxCodecCtxPtr encoder_;
|
|
|
| +
|
| // The |VideoFrame::timestamp()| of the last encoded frame. This is used to
|
| // predict the duration of the next frame.
|
| base::TimeDelta last_frame_timestamp_;
|
| @@ -138,18 +156,254 @@ class VideoTrackRecorder::VpxEncoder final
|
| DISALLOW_COPY_AND_ASSIGN(VpxEncoder);
|
| };
|
|
|
| +class H264Encoder final : public VideoTrackRecorder::Encoder {
|
| + public:
|
| + static void ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread,
|
| + scoped_ptr<ISVCEncoder> encoder);
|
| +
|
| + H264Encoder(
|
| + const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback,
|
| + int32_t bits_per_second);
|
| +
|
| + void StartFrameEncode(const scoped_refptr<VideoFrame>& frame,
|
| + base::TimeTicks capture_timestamp) override;
|
| +
|
| + void set_paused(bool paused) override { paused_ = paused; }
|
| +
|
| + private:
|
| + ~H264Encoder() override;
|
| +
|
| + void EncodeOnEncodingThread(const scoped_refptr<VideoFrame>& frame,
|
| + base::TimeTicks capture_timestamp);
|
| +
|
| + void ConfigureEncoding(const gfx::Size& size);
|
| +
|
| + // While |paused_|, frames are not encoded.
|
| + bool paused_;
|
| +
|
| + // This callback should be exercised on IO thread.
|
| + const VideoTrackRecorder::OnEncodedVideoCB on_encoded_video_callback_;
|
| +
|
| + // Target bitrate or video encoding. If 0, a standard bitrate is used.
|
| + const int32_t bits_per_second_;
|
| +
|
| + // Used to shutdown properly on the same thread we were created.
|
| + const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
|
| +
|
| + // Task runner where frames to encode and reply callbacks must happen.
|
| + scoped_refptr<base::SingleThreadTaskRunner> origin_task_runner_;
|
| +
|
| + // Thread for encoding. Active for the lifetime of VpxEncoder. All variables
|
| + // below this are used in this thread.
|
| + std::unique_ptr<base::Thread> encoding_thread_;
|
| +
|
| + scoped_ptr<ISVCEncoder> openh264_encoder_;
|
| + gfx::Size configured_size_;
|
| +
|
| + // The |VideoFrame::timestamp()| of the first received frame.
|
| + base::TimeTicks first_frame_timestamp_;
|
| +
|
| + DISALLOW_COPY_AND_ASSIGN(H264Encoder);
|
| +};
|
| +
|
| +// static
|
| +void H264Encoder::ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread,
|
| + scoped_ptr<ISVCEncoder> encoder) {
|
| + DVLOG(1) << __FUNCTION__;
|
| + DCHECK(encoding_thread->IsRunning());
|
| + encoding_thread->Stop();
|
| +
|
| + if (encoder) {
|
| + const int uninit_ret = encoder->Uninitialize();
|
| + DLOG_IF(ERROR, uninit_ret != 0) << "OpenH264 Uninitialize()";
|
| +
|
| + WelsDestroySVCEncoder(encoder.release());
|
| + }
|
| +}
|
| +
|
| +H264Encoder::H264Encoder(
|
| + const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback,
|
| + int32_t bits_per_second)
|
| + : paused_(false),
|
| + on_encoded_video_callback_(on_encoded_video_callback),
|
| + bits_per_second_(bits_per_second),
|
| + main_task_runner_(base::MessageLoop::current()->task_runner()),
|
| + encoding_thread_(new base::Thread("EncodingThread")) {
|
| + DCHECK(!on_encoded_video_callback_.is_null());
|
| +
|
| + DCHECK(!encoding_thread_->IsRunning());
|
| + encoding_thread_->Start();
|
| +}
|
| +
|
| +void H264Encoder::StartFrameEncode(const scoped_refptr<VideoFrame>& frame,
|
| + base::TimeTicks capture_timestamp) {
|
| + DVLOG(1) << __FUNCTION__;
|
| + // Cache the thread sending frames on first frame arrival.
|
| + if (!origin_task_runner_.get())
|
| + origin_task_runner_ = base::MessageLoop::current()->task_runner();
|
| + DCHECK(origin_task_runner_->BelongsToCurrentThread());
|
| + if (paused_)
|
| + return;
|
| + encoding_thread_->task_runner()->PostTask(
|
| + FROM_HERE, base::Bind(&H264Encoder::EncodeOnEncodingThread,
|
| + this, frame, capture_timestamp));
|
| +}
|
| +
|
| +
|
| +void H264Encoder::EncodeOnEncodingThread(
|
| + const scoped_refptr<VideoFrame>& video_frame,
|
| + base::TimeTicks capture_timestamp) {
|
| + DVLOG(1) << __FUNCTION__;
|
| + DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread());
|
| +
|
| + if (!(video_frame->format() == media::PIXEL_FORMAT_I420 ||
|
| + video_frame->format() == media::PIXEL_FORMAT_YV12 ||
|
| + video_frame->format() == media::PIXEL_FORMAT_YV12A)) {
|
| + NOTREACHED();
|
| + return;
|
| + }
|
| + scoped_refptr<media::VideoFrame> frame = video_frame;
|
| + // Drop alpha channel since we do not support it yet.
|
| + if (frame->format() == media::PIXEL_FORMAT_YV12A)
|
| + frame = media::WrapAsI420VideoFrame(video_frame);
|
| +
|
| + const gfx::Size frame_size = frame->visible_rect().size();
|
| + if (!openh264_encoder_ || configured_size_ != frame_size) {
|
| + ConfigureEncoding(frame_size);
|
| + first_frame_timestamp_ = capture_timestamp;
|
| + }
|
| +
|
| + DCHECK(openh264_encoder_);
|
| +
|
| + // EncodeFrame input.
|
| + SSourcePicture picture = {};
|
| + picture.iPicWidth = frame_size.width();
|
| + picture.iPicHeight = frame_size.height();
|
| + picture.iColorFormat = EVideoFormatType::videoFormatI420;
|
| + picture.uiTimeStamp =
|
| + (capture_timestamp - first_frame_timestamp_).InMilliseconds();
|
| + picture.iStride[0] = video_frame->stride(VideoFrame::kYPlane);
|
| + picture.iStride[1] = video_frame->stride(VideoFrame::kUPlane);
|
| + picture.iStride[2] = video_frame->stride(VideoFrame::kVPlane);
|
| + picture.pData[0] = video_frame->data(VideoFrame::kYPlane);
|
| + picture.pData[1] = video_frame->data(VideoFrame::kUPlane);
|
| + picture.pData[2] = video_frame->data(VideoFrame::kVPlane);
|
| +
|
| + // EncodeFrame output.
|
| + SFrameBSInfo info;
|
| + memset(&info, 0, sizeof(SFrameBSInfo));
|
| +
|
| + // Encode!
|
| + int enc_ret = openh264_encoder_->EncodeFrame(&picture, &info);
|
| + if (enc_ret != cmResultSuccess) {
|
| + DLOG(ERROR) << "OpenH264 encoding failed, EncodeFrame says " << enc_ret;
|
| + NOTREACHED();
|
| + return;
|
| + }
|
| +
|
| + std::unique_ptr<std::string> data(new std::string);
|
| + const uint8_t kNALStartCode[4] = {0, 0, 0, 1};
|
| + for (int layer = 0; layer < info.iLayerNum; ++layer) {
|
| + const SLayerBSInfo& layerInfo = info.sLayerInfo[layer];
|
| + // Iterate NAL units making up this layer, noting fragments.
|
| + size_t layer_len = 0;
|
| + for (int nal = 0; nal < layerInfo.iNalCount; ++nal) {
|
| +
|
| + DCHECK_GE(layerInfo.pNalLengthInByte[nal], 4);
|
| + DCHECK_EQ(kNALStartCode[0], layerInfo.pBsBuf[layer_len+0]);
|
| + DCHECK_EQ(kNALStartCode[1], layerInfo.pBsBuf[layer_len+1]);
|
| + DCHECK_EQ(kNALStartCode[2], layerInfo.pBsBuf[layer_len+2]);
|
| + DCHECK_EQ(kNALStartCode[3], layerInfo.pBsBuf[layer_len+3]);
|
| +
|
| + layer_len += layerInfo.pNalLengthInByte[nal];
|
| + }
|
| + // Copy the entire layer's data (including start codes).
|
| + data->append(reinterpret_cast<char*>(layerInfo.pBsBuf), layer_len);
|
| + }
|
| +
|
| + const bool is_key_frame = info.eFrameType == videoFrameTypeIDR;
|
| +
|
| + origin_task_runner_->PostTask(
|
| + FROM_HERE,
|
| + base::Bind(OnFrameEncodeCompleted, on_encoded_video_callback_, frame,
|
| + base::Passed(&data), capture_timestamp, is_key_frame));
|
| +}
|
| +
|
| +void H264Encoder::ConfigureEncoding(const gfx::Size& size) {
|
| + DVLOG(1) << __FUNCTION__;
|
| + configured_size_ = size;
|
| +
|
| + ISVCEncoder* temp_encoder = nullptr;
|
| + int result = WelsCreateSVCEncoder(&temp_encoder);
|
| + if (result != 0) {
|
| + DLOG(ERROR) << "Failed to create OpenH264 encoder";
|
| + NOTREACHED();
|
| + return;
|
| + }
|
| + openh264_encoder_.reset(temp_encoder);
|
| +
|
| +#if DCHECK_IS_ON()
|
| + int trace_level = WELS_LOG_DETAIL;
|
| + openh264_encoder_->SetOption(ENCODER_OPTION_TRACE_LEVEL, &trace_level);
|
| +#endif
|
| +
|
| + SEncParamExt init_params;
|
| + openh264_encoder_->GetDefaultParams(&init_params);
|
| + init_params.iUsageType = CAMERA_VIDEO_REAL_TIME;
|
| +
|
| + init_params.iPicWidth = size.width();
|
| + init_params.iPicHeight = size.height();
|
| + if (bits_per_second_ > 0) {
|
| + init_params.iRCMode = RC_BITRATE_MODE;
|
| + init_params.iTargetBitrate = bits_per_second_;
|
| + } else {
|
| + init_params.iRCMode = RC_OFF_MODE;
|
| + }
|
| +
|
| + // Do not saturate CPU utilization just for encoding. On a lower-end system
|
| + // with only 1 or 2 cores, use only one thread for encoding. On systems with
|
| + // more cores, allow half of the cores to be used for encoding.
|
| + init_params.iMultipleThreadIdc =
|
| + std::min(8, (base::SysInfo::NumberOfProcessors() + 1) / 2);
|
| +
|
| + // The base spatial layer 0 is the only one we use.
|
| + DCHECK_EQ(1, init_params.iSpatialLayerNum);
|
| + init_params.sSpatialLayers[0].iVideoWidth = init_params.iPicWidth;
|
| + init_params.sSpatialLayers[0].iVideoHeight = init_params.iPicHeight;
|
| + init_params.sSpatialLayers[0].iSpatialBitrate = init_params.iTargetBitrate;
|
| +
|
| + result = openh264_encoder_->InitializeExt(&init_params);
|
| + if (result != cmResultSuccess) {
|
| + DLOG(ERROR) << "Failed to initialize OpenH264 encoder";
|
| + NOTREACHED();
|
| + return;
|
| + }
|
| +
|
| + int video_format = EVideoFormatType::videoFormatI420;
|
| + openh264_encoder_->SetOption(ENCODER_OPTION_DATAFORMAT,
|
| + &video_format);
|
| +}
|
| +
|
| +H264Encoder::~H264Encoder() {
|
| + main_task_runner_->PostTask(FROM_HERE,
|
| + base::Bind(&H264Encoder::ShutdownEncoder,
|
| + base::Passed(&encoding_thread_),
|
| + base::Passed(&openh264_encoder_)));
|
| +}
|
| +
|
| +//
|
| +//
|
| // static
|
| -void VideoTrackRecorder::VpxEncoder::ShutdownEncoder(
|
| - std::unique_ptr<base::Thread> encoding_thread,
|
| - ScopedVpxCodecCtxPtr encoder) {
|
| +void VpxEncoder::ShutdownEncoder(std::unique_ptr<base::Thread> encoding_thread,
|
| + ScopedVpxCodecCtxPtr encoder) {
|
| DCHECK(encoding_thread->IsRunning());
|
| encoding_thread->Stop();
|
| // Both |encoding_thread| and |encoder| will be destroyed at end-of-scope.
|
| }
|
|
|
| -VideoTrackRecorder::VpxEncoder::VpxEncoder(
|
| +VpxEncoder::VpxEncoder(
|
| bool use_vp9,
|
| - const OnEncodedVideoCB& on_encoded_video_callback,
|
| + const VideoTrackRecorder::OnEncodedVideoCB& on_encoded_video_callback,
|
| int32_t bits_per_second)
|
| : paused_(false),
|
| use_vp9_(use_vp9),
|
| @@ -165,16 +419,15 @@ VideoTrackRecorder::VpxEncoder::VpxEncoder(
|
| encoding_thread_->Start();
|
| }
|
|
|
| -VideoTrackRecorder::VpxEncoder::~VpxEncoder() {
|
| +VpxEncoder::~VpxEncoder() {
|
| main_task_runner_->PostTask(FROM_HERE,
|
| base::Bind(&VpxEncoder::ShutdownEncoder,
|
| base::Passed(&encoding_thread_),
|
| base::Passed(&encoder_)));
|
| }
|
|
|
| -void VideoTrackRecorder::VpxEncoder::StartFrameEncode(
|
| - const scoped_refptr<VideoFrame>& frame,
|
| - base::TimeTicks capture_timestamp) {
|
| +void VpxEncoder::StartFrameEncode(const scoped_refptr<VideoFrame>& frame,
|
| + base::TimeTicks capture_timestamp) {
|
| // Cache the thread sending frames on first frame arrival.
|
| if (!origin_task_runner_.get())
|
| origin_task_runner_ = base::MessageLoop::current()->task_runner();
|
| @@ -186,11 +439,11 @@ void VideoTrackRecorder::VpxEncoder::StartFrameEncode(
|
| this, frame, capture_timestamp));
|
| }
|
|
|
| -void VideoTrackRecorder::VpxEncoder::EncodeOnEncodingThread(
|
| +void VpxEncoder::EncodeOnEncodingThread(
|
| const scoped_refptr<VideoFrame>& video_frame,
|
| base::TimeTicks capture_timestamp) {
|
| TRACE_EVENT0("video",
|
| - "VideoTrackRecorder::VpxEncoder::EncodeOnEncodingThread");
|
| + "VpxEncoder::EncodeOnEncodingThread");
|
| DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread());
|
|
|
| if (!(video_frame->format() == media::PIXEL_FORMAT_I420 ||
|
| @@ -259,7 +512,7 @@ void VideoTrackRecorder::VpxEncoder::EncodeOnEncodingThread(
|
| keyframe));
|
| }
|
|
|
| -void VideoTrackRecorder::VpxEncoder::ConfigureEncoding(const gfx::Size& size) {
|
| +void VpxEncoder::ConfigureEncoding(const gfx::Size& size) {
|
| if (IsInitialized()) {
|
| // TODO(mcasas) VP8 quirk/optimisation: If the new |size| is strictly less-
|
| // than-or-equal than the old size, in terms of area, the existing encoder
|
| @@ -351,12 +604,12 @@ void VideoTrackRecorder::VpxEncoder::ConfigureEncoding(const gfx::Size& size) {
|
| }
|
| }
|
|
|
| -bool VideoTrackRecorder::VpxEncoder::IsInitialized() const {
|
| +bool VpxEncoder::IsInitialized() const {
|
| DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread());
|
| return codec_config_.g_timebase.den != 0;
|
| }
|
|
|
| -base::TimeDelta VideoTrackRecorder::VpxEncoder::CalculateFrameDuration(
|
| +base::TimeDelta VpxEncoder::CalculateFrameDuration(
|
| const scoped_refptr<VideoFrame>& frame) {
|
| DCHECK(encoding_thread_->task_runner()->BelongsToCurrentThread());
|
|
|
| @@ -381,22 +634,35 @@ base::TimeDelta VideoTrackRecorder::VpxEncoder::CalculateFrameDuration(
|
| kMinFrameDuration));
|
| }
|
|
|
| +} // anonymous namespace
|
| +
|
| VideoTrackRecorder::VideoTrackRecorder(
|
| - bool use_vp9,
|
| + CodecId codec,
|
| const blink::WebMediaStreamTrack& track,
|
| const OnEncodedVideoCB& on_encoded_video_callback,
|
| int32_t bits_per_second)
|
| - : track_(track),
|
| - encoder_(
|
| - new VpxEncoder(use_vp9, on_encoded_video_callback, bits_per_second)) {
|
| + : track_(track) {
|
| DCHECK(main_render_thread_checker_.CalledOnValidThread());
|
| DCHECK(!track_.isNull());
|
| DCHECK(track_.getExtraData());
|
|
|
| + switch (codec) {
|
| + case CodecId::H264:
|
| + encoder_ = new H264Encoder(on_encoded_video_callback, bits_per_second);
|
| + break;
|
| + case CodecId::VP8:
|
| + case CodecId::VP9:
|
| + encoder_ = new VpxEncoder(codec == CodecId::VP9,
|
| + on_encoded_video_callback, bits_per_second);
|
| + break;
|
| + default:
|
| + NOTREACHED();
|
| + }
|
| +
|
| // StartFrameEncode() will be called on Render IO thread.
|
| MediaStreamVideoSink::ConnectToTrack(
|
| track_,
|
| - base::Bind(&VideoTrackRecorder::VpxEncoder::StartFrameEncode, encoder_));
|
| + base::Bind(&VideoTrackRecorder::Encoder::StartFrameEncode, encoder_));
|
| }
|
|
|
| VideoTrackRecorder::~VideoTrackRecorder() {
|
|
|