Chromium Code Reviews| Index: content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
| diff --git a/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
| index 96b6fe32af607570b192f469204a6f3298979e64..901ccb3463dcd54d8145cb773cc40def185def22 100644 |
| --- a/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
| +++ b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
| @@ -6,17 +6,26 @@ |
| #include "base/bind.h" |
| #include "base/memory/aligned_memory.h" |
| +#include "base/memory/ref_counted.h" |
| +#include "base/synchronization/waitable_event.h" |
| #include "base/trace_event/trace_event.h" |
| +#include "content/common/gpu/client/context_provider_command_buffer.h" |
| #include "content/renderer/media/webrtc/webrtc_video_frame_adapter.h" |
| +#include "content/renderer/render_thread_impl.h" |
| #include "media/base/timestamp_constants.h" |
| #include "media/base/video_util.h" |
| +#include "media/renderers/skcanvas_video_renderer.h" |
| +#include "skia/ext/platform_canvas.h" |
| +#include "third_party/libyuv/include/libyuv/convert.h" |
| #include "third_party/libyuv/include/libyuv/convert_from.h" |
| #include "third_party/libyuv/include/libyuv/scale.h" |
| +#include "third_party/skia/include/core/SkSurface.h" |
| #include "third_party/webrtc/common_video/include/video_frame_buffer.h" |
| #include "third_party/webrtc/common_video/rotation.h" |
| #include "third_party/webrtc/media/engine/webrtcvideoframe.h" |
| namespace content { |
| + |
| namespace { |
| // Empty method used for keeping a reference to the original media::VideoFrame. |
| @@ -24,72 +33,127 @@ namespace { |
| void ReleaseOriginalFrame(const scoped_refptr<media::VideoFrame>& frame) { |
| } |
| +// Helper class that signals a WaitableEvent when it goes out of scope. |
| +class ScopedWaitableEvent { |
| + public: |
| + explicit ScopedWaitableEvent(base::WaitableEvent* event) : event_(event) {} |
| + ~ScopedWaitableEvent() { |
| + if (event_) |
| + event_->Signal(); |
| + } |
| + |
| + private: |
| + base::WaitableEvent* const event_; |
| +}; |
| + |
| } // anonymous namespace |
| -WebRtcVideoCapturerAdapter::WebRtcVideoCapturerAdapter(bool is_screencast) |
| - : is_screencast_(is_screencast), |
| - running_(false) { |
| - thread_checker_.DetachFromThread(); |
| -} |
| +// Initializes the GL context environment and provides a method for copying |
| +// texture backed frames into CPU mappable memory. |
| +// The class is created and destroyed on the main render thread. |
| +class WebRtcVideoCapturerAdapter::TextureFrameCopier |
| + : public base::RefCounted<WebRtcVideoCapturerAdapter::TextureFrameCopier> { |
| + public: |
| + TextureFrameCopier() |
| + : main_thread_task_runner_(base::ThreadTaskRunnerHandle::Get()), |
| + canvas_video_renderer_(new media::SkCanvasVideoRenderer) { |
| + RenderThreadImpl* const main_thread = RenderThreadImpl::current(); |
| + if (main_thread) |
| + provider_ = main_thread->SharedMainThreadContextProvider(); |
| + } |
| -WebRtcVideoCapturerAdapter::~WebRtcVideoCapturerAdapter() { |
| - DVLOG(3) << " WebRtcVideoCapturerAdapter::dtor"; |
| -} |
| + // Synchronous call to copy a texture backed |frame| into a CPU mappable |
| + // |new_frame|. If it is not called on the main render thread, this call posts |
| + // a task on main thread by calling CopyTextureFrameOnMainThread() and blocks |
| + // until it is completed. |
| + void CopyTextureFrame(const scoped_refptr<media::VideoFrame>& frame, |
| + scoped_refptr<media::VideoFrame>* new_frame) { |
| + if (main_thread_task_runner_->BelongsToCurrentThread()) { |
| + CopyTextureFrameOnMainThread(frame, new_frame, nullptr); |
| + return; |
| + } |
| -cricket::CaptureState WebRtcVideoCapturerAdapter::Start( |
| - const cricket::VideoFormat& capture_format) { |
| - DCHECK(thread_checker_.CalledOnValidThread()); |
| - DCHECK(!running_); |
| - DVLOG(3) << " WebRtcVideoCapturerAdapter::Start w = " << capture_format.width |
| - << " h = " << capture_format.height; |
| + base::WaitableEvent waiter(base::WaitableEvent::ResetPolicy::MANUAL, |
| + base::WaitableEvent::InitialState::NOT_SIGNALED); |
| + main_thread_task_runner_->PostTask( |
| + FROM_HERE, base::Bind(&TextureFrameCopier::CopyTextureFrameOnMainThread, |
| + this, frame, new_frame, &waiter)); |
| + waiter.Wait(); |
| + } |
| - running_ = true; |
| - return cricket::CS_RUNNING; |
| -} |
| + private: |
| + friend class base::RefCounted<TextureFrameCopier>; |
| + ~TextureFrameCopier() { |
| + // |canvas_video_renderer_| should be deleted on the thread it was created. |
| + if (!main_thread_task_runner_->BelongsToCurrentThread()) { |
| + main_thread_task_runner_->DeleteSoon(FROM_HERE, |
| + canvas_video_renderer_.release()); |
| + } |
| + } |
| -void WebRtcVideoCapturerAdapter::Stop() { |
| - DCHECK(thread_checker_.CalledOnValidThread()); |
| - DVLOG(3) << " WebRtcVideoCapturerAdapter::Stop "; |
| - DCHECK(running_); |
| - running_ = false; |
| - SetCaptureFormat(NULL); |
| - SignalStateChange(this, cricket::CS_STOPPED); |
| -} |
| + void CopyTextureFrameOnMainThread( |
| + const scoped_refptr<media::VideoFrame>& frame, |
| + scoped_refptr<media::VideoFrame>* new_frame, |
| + base::WaitableEvent* waiter) { |
| + DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
| + DCHECK(frame->format() == media::PIXEL_FORMAT_ARGB || |
| + frame->format() == media::PIXEL_FORMAT_XRGB || |
| + frame->format() == media::PIXEL_FORMAT_I420 || |
| + frame->format() == media::PIXEL_FORMAT_UYVY || |
| + frame->format() == media::PIXEL_FORMAT_NV12); |
| + ScopedWaitableEvent event(waiter); |
| + sk_sp<SkSurface> surface = SkSurface::MakeRasterN32Premul( |
| + frame->visible_rect().width(), frame->visible_rect().height()); |
| -bool WebRtcVideoCapturerAdapter::IsRunning() { |
| - DCHECK(thread_checker_.CalledOnValidThread()); |
| - return running_; |
| -} |
| + if (!surface || !provider_) { |
| + // Return a black frame (yuv = {0, 0x80, 0x80}). |
| + *new_frame = media::VideoFrame::CreateColorFrame( |
| + frame->visible_rect().size(), 0u, 0x80, 0x80, frame->timestamp()); |
| + return; |
| + } |
| -bool WebRtcVideoCapturerAdapter::GetPreferredFourccs( |
| - std::vector<uint32_t>* fourccs) { |
| - DCHECK(thread_checker_.CalledOnValidThread()); |
| - DCHECK(!fourccs || fourccs->empty()); |
| - if (fourccs) |
| - fourccs->push_back(cricket::FOURCC_I420); |
| - return fourccs != NULL; |
| -} |
| + *new_frame = media::VideoFrame::CreateFrame( |
| + media::PIXEL_FORMAT_I420, frame->coded_size(), frame->visible_rect(), |
| + frame->natural_size(), frame->timestamp()); |
| + DCHECK(provider_->ContextGL()); |
| + canvas_video_renderer_->Copy( |
| + frame.get(), surface->getCanvas(), |
| + media::Context3D(provider_->ContextGL(), provider_->GrContext())); |
| -bool WebRtcVideoCapturerAdapter::IsScreencast() const { |
| - return is_screencast_; |
| -} |
| + SkPixmap pixmap; |
| + const bool result = surface->getCanvas()->peekPixels(&pixmap); |
| + DCHECK(result) << "Error trying to access SkSurface's pixels"; |
| + const uint32 source_pixel_format = |
| + (kN32_SkColorType == kRGBA_8888_SkColorType) ? cricket::FOURCC_ABGR |
| + : cricket::FOURCC_ARGB; |
| + libyuv::ConvertToI420( |
| + static_cast<const uint8*>(pixmap.addr(0, 0)), pixmap.getSafeSize64(), |
| + (*new_frame)->visible_data(media::VideoFrame::kYPlane), |
| + (*new_frame)->stride(media::VideoFrame::kYPlane), |
| + (*new_frame)->visible_data(media::VideoFrame::kUPlane), |
| + (*new_frame)->stride(media::VideoFrame::kUPlane), |
| + (*new_frame)->visible_data(media::VideoFrame::kVPlane), |
| + (*new_frame)->stride(media::VideoFrame::kVPlane), 0 /* crop_x */, |
| + 0 /* crop_y */, pixmap.width(), pixmap.height(), |
| + (*new_frame)->visible_rect().width(), |
| + (*new_frame)->visible_rect().height(), libyuv::kRotate0, |
| + source_pixel_format); |
| + } |
| -bool WebRtcVideoCapturerAdapter::GetBestCaptureFormat( |
| - const cricket::VideoFormat& desired, |
| - cricket::VideoFormat* best_format) { |
| - DCHECK(thread_checker_.CalledOnValidThread()); |
| - DVLOG(3) << " GetBestCaptureFormat:: " |
| - << " w = " << desired.width |
| - << " h = " << desired.height; |
| + const scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner_; |
| + scoped_refptr<ContextProviderCommandBuffer> provider_; |
| + std::unique_ptr<media::SkCanvasVideoRenderer> canvas_video_renderer_; |
| +}; |
| - // Capability enumeration is done in MediaStreamVideoSource. The adapter can |
| - // just use what is provided. |
| - // Use the desired format as the best format. |
| - best_format->width = desired.width; |
| - best_format->height = desired.height; |
| - best_format->fourcc = cricket::FOURCC_I420; |
| - best_format->interval = desired.interval; |
| - return true; |
| +WebRtcVideoCapturerAdapter::WebRtcVideoCapturerAdapter(bool is_screencast) |
| + : texture_copier_(new WebRtcVideoCapturerAdapter::TextureFrameCopier()), |
| + is_screencast_(is_screencast), |
| + running_(false) { |
| + thread_checker_.DetachFromThread(); |
| +} |
| + |
| +WebRtcVideoCapturerAdapter::~WebRtcVideoCapturerAdapter() { |
| + DVLOG(3) << __func__; |
| } |
| void WebRtcVideoCapturerAdapter::OnFrameCaptured( |
| @@ -99,9 +163,13 @@ void WebRtcVideoCapturerAdapter::OnFrameCaptured( |
| if (!(input_frame->IsMappable() && |
| (input_frame->format() == media::PIXEL_FORMAT_I420 || |
| input_frame->format() == media::PIXEL_FORMAT_YV12 || |
| - input_frame->format() == media::PIXEL_FORMAT_YV12A))) { |
| + input_frame->format() == media::PIXEL_FORMAT_YV12A)) && |
| + !input_frame->HasTextures()) { |
| // Since connecting sources and sinks do not check the format, we need to |
| // just ignore formats that we can not handle. |
| + LOG(ERROR) << "We cannot send frame with storage type: " |
| + << input_frame->storage_type() << " format: " |
| + << media::VideoPixelFormatToString(input_frame->format()); |
|
mcasas
2016/10/28 19:49:35
nit: my suggestion was ToString() the whole thing,
emircan
2016/10/28 22:08:17
AsHumanReadableString() is awesome. Thanks.
|
| NOTREACHED(); |
| return; |
| } |
| @@ -131,13 +199,14 @@ void WebRtcVideoCapturerAdapter::OnFrameCaptured( |
| return; |
| } |
| + WebRtcVideoFrameAdapter::CopyTextureFrameCallback copy_texture_callback = |
| + base::Bind(&TextureFrameCopier::CopyTextureFrame, texture_copier_); |
| // Return |frame| directly if it is texture backed, because there is no |
| // cropping support for texture yet. See http://crbug/503653. |
| - // Return |frame| directly if it is GpuMemoryBuffer backed, as we want to |
| - // keep the frame on native buffers. |
| if (frame->HasTextures()) { |
| OnFrame(cricket::WebRtcVideoFrame( |
| - new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(frame), |
| + new rtc::RefCountedObject<WebRtcVideoFrameAdapter>( |
| + frame, copy_texture_callback), |
| webrtc::kVideoRotation_0, translated_camera_time_us), |
| orig_width, orig_height); |
| return; |
| @@ -164,7 +233,8 @@ void WebRtcVideoCapturerAdapter::OnFrameCaptured( |
| // If no scaling is needed, return a wrapped version of |frame| directly. |
| if (video_frame->natural_size() == video_frame->visible_rect().size()) { |
| OnFrame(cricket::WebRtcVideoFrame( |
| - new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(video_frame), |
| + new rtc::RefCountedObject<WebRtcVideoFrameAdapter>( |
| + video_frame, copy_texture_callback), |
| webrtc::kVideoRotation_0, translated_camera_time_us), |
| orig_width, orig_height); |
| return; |
| @@ -192,9 +262,64 @@ void WebRtcVideoCapturerAdapter::OnFrameCaptured( |
| adapted_width, adapted_height, libyuv::kFilterBilinear); |
| OnFrame(cricket::WebRtcVideoFrame( |
| - new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(scaled_frame), |
| + new rtc::RefCountedObject<WebRtcVideoFrameAdapter>( |
| + scaled_frame, copy_texture_callback), |
| webrtc::kVideoRotation_0, translated_camera_time_us), |
|
mcasas
2016/10/28 19:49:35
nit: Since here and in l.237 we know that
frame->H
emircan
2016/10/28 22:08:17
Empty callback makes more sense as reference is he
|
| orig_width, orig_height); |
| } |
| +cricket::CaptureState WebRtcVideoCapturerAdapter::Start( |
| + const cricket::VideoFormat& capture_format) { |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + DCHECK(!running_); |
| + DVLOG(3) << __func__ << " capture format: " << capture_format.ToString(); |
| + |
| + running_ = true; |
| + return cricket::CS_RUNNING; |
| +} |
| + |
| +void WebRtcVideoCapturerAdapter::Stop() { |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + DVLOG(3) << __func__; |
| + DCHECK(running_); |
| + running_ = false; |
| + SetCaptureFormat(NULL); |
| + SignalStateChange(this, cricket::CS_STOPPED); |
| +} |
| + |
| +bool WebRtcVideoCapturerAdapter::IsRunning() { |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + return running_; |
| +} |
| + |
| +bool WebRtcVideoCapturerAdapter::GetPreferredFourccs( |
| + std::vector<uint32_t>* fourccs) { |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + if (!fourccs) |
| + return false; |
| + DCHECK(fourccs->empty()); |
| + fourccs->push_back(cricket::FOURCC_I420); |
| + return true; |
| +} |
| + |
| +bool WebRtcVideoCapturerAdapter::IsScreencast() const { |
| + return is_screencast_; |
| +} |
| + |
| +bool WebRtcVideoCapturerAdapter::GetBestCaptureFormat( |
| + const cricket::VideoFormat& desired, |
| + cricket::VideoFormat* best_format) { |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + DVLOG(3) << __func__ << " desired: " << desired.ToString(); |
| + |
| + // Capability enumeration is done in MediaStreamVideoSource. The adapter can |
| + // just use what is provided. |
| + // Use the desired format as the best format. |
| + best_format->width = desired.width; |
| + best_format->height = desired.height; |
| + best_format->fourcc = cricket::FOURCC_I420; |
| + best_format->interval = desired.interval; |
| + return true; |
| +} |
| + |
| } // namespace content |