| OLD | NEW |
| 1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" | 5 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" |
| 6 | 6 |
| 7 #include "base/bind.h" | 7 #include "base/bind.h" |
| 8 #include "base/memory/aligned_memory.h" | 8 #include "base/memory/aligned_memory.h" |
| 9 #include "base/memory/ref_counted.h" |
| 10 #include "base/synchronization/waitable_event.h" |
| 9 #include "base/trace_event/trace_event.h" | 11 #include "base/trace_event/trace_event.h" |
| 12 #include "content/common/gpu/client/context_provider_command_buffer.h" |
| 10 #include "content/renderer/media/webrtc/webrtc_video_frame_adapter.h" | 13 #include "content/renderer/media/webrtc/webrtc_video_frame_adapter.h" |
| 14 #include "content/renderer/render_thread_impl.h" |
| 15 #include "media/base/bind_to_current_loop.h" |
| 11 #include "media/base/timestamp_constants.h" | 16 #include "media/base/timestamp_constants.h" |
| 12 #include "media/base/video_util.h" | 17 #include "media/base/video_util.h" |
| 18 #include "media/renderers/skcanvas_video_renderer.h" |
| 19 #include "skia/ext/platform_canvas.h" |
| 20 #include "third_party/libyuv/include/libyuv/convert.h" |
| 13 #include "third_party/libyuv/include/libyuv/convert_from.h" | 21 #include "third_party/libyuv/include/libyuv/convert_from.h" |
| 14 #include "third_party/libyuv/include/libyuv/scale.h" | 22 #include "third_party/libyuv/include/libyuv/scale.h" |
| 23 #include "third_party/skia/include/core/SkSurface.h" |
| 15 #include "third_party/webrtc/common_video/include/video_frame_buffer.h" | 24 #include "third_party/webrtc/common_video/include/video_frame_buffer.h" |
| 16 #include "third_party/webrtc/common_video/rotation.h" | 25 #include "third_party/webrtc/common_video/rotation.h" |
| 17 #include "third_party/webrtc/media/engine/webrtcvideoframe.h" | 26 #include "third_party/webrtc/media/engine/webrtcvideoframe.h" |
| 18 | 27 |
| 19 namespace content { | 28 namespace content { |
| 29 |
| 20 namespace { | 30 namespace { |
| 21 | 31 |
| 22 // Empty method used for keeping a reference to the original media::VideoFrame. | 32 // Empty method used for keeping a reference to the original media::VideoFrame. |
| 23 // The reference to |frame| is kept in the closure that calls this method. | 33 // The reference to |frame| is kept in the closure that calls this method. |
| 24 void ReleaseOriginalFrame(const scoped_refptr<media::VideoFrame>& frame) { | 34 void ReleaseOriginalFrame(const scoped_refptr<media::VideoFrame>& frame) { |
| 25 } | 35 } |
| 26 | 36 |
| 37 // Helper class that signals a WaitableEvent when it goes out of scope. |
| 38 class ScopedWaitableEvent { |
| 39 public: |
| 40 explicit ScopedWaitableEvent(base::WaitableEvent* event) : event_(event) {} |
| 41 ~ScopedWaitableEvent() { |
| 42 if (event_) |
| 43 event_->Signal(); |
| 44 } |
| 45 |
| 46 private: |
| 47 base::WaitableEvent* const event_; |
| 48 }; |
| 49 |
| 27 } // anonymous namespace | 50 } // anonymous namespace |
| 28 | 51 |
| 52 // Initializes the GL context environment and provides a method for copying |
| 53 // texture backed frames into CPU mappable memory. |
| 54 // The class is created and destroyed on the main render thread. |
| 55 class WebRtcVideoCapturerAdapter::TextureFrameCopier |
| 56 : public base::RefCounted<WebRtcVideoCapturerAdapter::TextureFrameCopier> { |
| 57 public: |
| 58 TextureFrameCopier() |
| 59 : main_thread_task_runner_(base::ThreadTaskRunnerHandle::Get()), |
| 60 canvas_video_renderer_(new media::SkCanvasVideoRenderer) { |
| 61 RenderThreadImpl* const main_thread = RenderThreadImpl::current(); |
| 62 if (main_thread) |
| 63 provider_ = main_thread->SharedMainThreadContextProvider(); |
| 64 } |
| 65 |
| 66 |
| 67 void CopyTextureFrameOnSomeThread( |
| 68 const scoped_refptr<media::VideoFrame>& frame, |
| 69 base::Callback<void(const scoped_refptr<media::VideoFrame>&)> |
| 70 done_callback) { |
| 71 main_thread_task_runner_->PostTask( |
| 72 FROM_HERE, base::Bind(&TextureFrameCopier::CopyTextureFrameOnMainThread, |
| 73 this, frame, done_callback)); |
| 74 } |
| 75 |
| 76 void CopyTextureFrameOnMainThread( |
| 77 const scoped_refptr<media::VideoFrame>& frame, |
| 78 base::Callback<void(const scoped_refptr<media::VideoFrame>&)> |
| 79 done_callback) { |
| 80 LOG(ERROR) << __func__; |
| 81 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
| 82 DCHECK(frame->format() == media::PIXEL_FORMAT_ARGB || |
| 83 frame->format() == media::PIXEL_FORMAT_XRGB || |
| 84 frame->format() == media::PIXEL_FORMAT_I420 || |
| 85 frame->format() == media::PIXEL_FORMAT_UYVY || |
| 86 frame->format() == media::PIXEL_FORMAT_NV12); |
| 87 sk_sp<SkSurface> surface = SkSurface::MakeRasterN32Premul( |
| 88 frame->visible_rect().width(), frame->visible_rect().height()); |
| 89 scoped_refptr<media::VideoFrame> new_frame; |
| 90 if (!surface || !provider_) { |
| 91 // Return a black frame (yuv = {0, 0x80, 0x80}). |
| 92 new_frame = media::VideoFrame::CreateColorFrame( |
| 93 frame->visible_rect().size(), 0u, 0x80, 0x80, frame->timestamp()); |
| 94 done_callback.Run(new_frame); |
| 95 return; |
| 96 } |
| 97 |
| 98 new_frame = media::VideoFrame::CreateFrame( |
| 99 media::PIXEL_FORMAT_I420, frame->coded_size(), frame->visible_rect(), |
| 100 frame->natural_size(), frame->timestamp()); |
| 101 DCHECK(provider_->ContextGL()); |
| 102 canvas_video_renderer_->Copy( |
| 103 frame.get(), surface->getCanvas(), |
| 104 media::Context3D(provider_->ContextGL(), provider_->GrContext())); |
| 105 |
| 106 SkPixmap pixmap; |
| 107 const bool result = surface->getCanvas()->peekPixels(&pixmap); |
| 108 DCHECK(result) << "Error trying to access SkSurface's pixels"; |
| 109 const uint32 source_pixel_format = |
| 110 (kN32_SkColorType == kRGBA_8888_SkColorType) ? cricket::FOURCC_ABGR |
| 111 : cricket::FOURCC_ARGB; |
| 112 libyuv::ConvertToI420( |
| 113 static_cast<const uint8*>(pixmap.addr(0, 0)), pixmap.getSafeSize64(), |
| 114 (new_frame)->visible_data(media::VideoFrame::kYPlane), |
| 115 (new_frame)->stride(media::VideoFrame::kYPlane), |
| 116 (new_frame)->visible_data(media::VideoFrame::kUPlane), |
| 117 (new_frame)->stride(media::VideoFrame::kUPlane), |
| 118 (new_frame)->visible_data(media::VideoFrame::kVPlane), |
| 119 (new_frame)->stride(media::VideoFrame::kVPlane), 0 /* crop_x */, |
| 120 0 /* crop_y */, pixmap.width(), pixmap.height(), |
| 121 (new_frame)->visible_rect().width(), |
| 122 (new_frame)->visible_rect().height(), libyuv::kRotate0, |
| 123 source_pixel_format); |
| 124 |
| 125 done_callback.Run(new_frame); |
| 126 } |
| 127 |
| 128 private: |
| 129 friend class base::RefCounted<TextureFrameCopier>; |
| 130 ~TextureFrameCopier() { |
| 131 // |canvas_video_renderer_| should be deleted on the thread it was created. |
| 132 main_thread_task_runner_->DeleteSoon(FROM_HERE, |
| 133 canvas_video_renderer_.release()); |
| 134 } |
| 135 |
| 136 const scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner_; |
| 137 scoped_refptr<ContextProviderCommandBuffer> provider_; |
| 138 std::unique_ptr<media::SkCanvasVideoRenderer> canvas_video_renderer_; |
| 139 }; |
| 140 |
| 29 WebRtcVideoCapturerAdapter::WebRtcVideoCapturerAdapter(bool is_screencast) | 141 WebRtcVideoCapturerAdapter::WebRtcVideoCapturerAdapter(bool is_screencast) |
| 30 : is_screencast_(is_screencast), | 142 : texture_copier_(new WebRtcVideoCapturerAdapter::TextureFrameCopier()), |
| 143 is_screencast_(is_screencast), |
| 31 running_(false) { | 144 running_(false) { |
| 32 thread_checker_.DetachFromThread(); | 145 thread_checker_.DetachFromThread(); |
| 33 } | 146 } |
| 34 | 147 |
| 35 WebRtcVideoCapturerAdapter::~WebRtcVideoCapturerAdapter() { | 148 WebRtcVideoCapturerAdapter::~WebRtcVideoCapturerAdapter() { |
| 36 DVLOG(3) << " WebRtcVideoCapturerAdapter::dtor"; | 149 DVLOG(3) << __func__; |
| 37 } | 150 } |
| 38 | 151 |
| 39 cricket::CaptureState WebRtcVideoCapturerAdapter::Start( | 152 void WebRtcVideoCapturerAdapter::OnFrameCopied( |
| 40 const cricket::VideoFormat& capture_format) { | 153 int64_t translated_camera_time_us, |
| 154 const scoped_refptr<media::VideoFrame>& frame) { |
| 41 DCHECK(thread_checker_.CalledOnValidThread()); | 155 DCHECK(thread_checker_.CalledOnValidThread()); |
| 42 DCHECK(!running_); | 156 WebRtcVideoFrameAdapter::CopyTextureFrameCallback copy_texture_callback = |
| 43 DVLOG(3) << " WebRtcVideoCapturerAdapter::Start w = " << capture_format.width | 157 base::Bind(&TextureFrameCopier::CopyTextureFrame, texture_copier_); |
| 44 << " h = " << capture_format.height; | 158 OnFrame(cricket::WebRtcVideoFrame( |
| 45 | 159 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>( |
| 46 running_ = true; | 160 frame, copy_texture_callback), |
| 47 return cricket::CS_RUNNING; | 161 webrtc::kVideoRotation_0, translated_camera_time_us), |
| 48 } | 162 frame->natural_size().width(), frame->natural_size().height()); |
| 49 | |
| 50 void WebRtcVideoCapturerAdapter::Stop() { | |
| 51 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 52 DVLOG(3) << " WebRtcVideoCapturerAdapter::Stop "; | |
| 53 DCHECK(running_); | |
| 54 running_ = false; | |
| 55 SetCaptureFormat(NULL); | |
| 56 SignalStateChange(this, cricket::CS_STOPPED); | |
| 57 } | |
| 58 | |
| 59 bool WebRtcVideoCapturerAdapter::IsRunning() { | |
| 60 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 61 return running_; | |
| 62 } | |
| 63 | |
| 64 bool WebRtcVideoCapturerAdapter::GetPreferredFourccs( | |
| 65 std::vector<uint32_t>* fourccs) { | |
| 66 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 67 DCHECK(!fourccs || fourccs->empty()); | |
| 68 if (fourccs) | |
| 69 fourccs->push_back(cricket::FOURCC_I420); | |
| 70 return fourccs != NULL; | |
| 71 } | |
| 72 | |
| 73 bool WebRtcVideoCapturerAdapter::IsScreencast() const { | |
| 74 return is_screencast_; | |
| 75 } | |
| 76 | |
| 77 bool WebRtcVideoCapturerAdapter::GetBestCaptureFormat( | |
| 78 const cricket::VideoFormat& desired, | |
| 79 cricket::VideoFormat* best_format) { | |
| 80 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 81 DVLOG(3) << " GetBestCaptureFormat:: " | |
| 82 << " w = " << desired.width | |
| 83 << " h = " << desired.height; | |
| 84 | |
| 85 // Capability enumeration is done in MediaStreamVideoSource. The adapter can | |
| 86 // just use what is provided. | |
| 87 // Use the desired format as the best format. | |
| 88 best_format->width = desired.width; | |
| 89 best_format->height = desired.height; | |
| 90 best_format->fourcc = cricket::FOURCC_I420; | |
| 91 best_format->interval = desired.interval; | |
| 92 return true; | |
| 93 } | 163 } |
| 94 | 164 |
| 95 void WebRtcVideoCapturerAdapter::OnFrameCaptured( | 165 void WebRtcVideoCapturerAdapter::OnFrameCaptured( |
| 96 const scoped_refptr<media::VideoFrame>& input_frame) { | 166 const scoped_refptr<media::VideoFrame>& input_frame) { |
| 97 DCHECK(thread_checker_.CalledOnValidThread()); | 167 DCHECK(thread_checker_.CalledOnValidThread()); |
| 98 TRACE_EVENT0("video", "WebRtcVideoCapturerAdapter::OnFrameCaptured"); | 168 TRACE_EVENT0("video", "WebRtcVideoCapturerAdapter::OnFrameCaptured"); |
| 99 if (!(input_frame->IsMappable() && | 169 if (!(input_frame->IsMappable() && |
| 100 (input_frame->format() == media::PIXEL_FORMAT_I420 || | 170 (input_frame->format() == media::PIXEL_FORMAT_I420 || |
| 101 input_frame->format() == media::PIXEL_FORMAT_YV12 || | 171 input_frame->format() == media::PIXEL_FORMAT_YV12 || |
| 102 input_frame->format() == media::PIXEL_FORMAT_YV12A))) { | 172 input_frame->format() == media::PIXEL_FORMAT_YV12A)) && |
| 173 !input_frame->HasTextures()) { |
| 103 // Since connecting sources and sinks do not check the format, we need to | 174 // Since connecting sources and sinks do not check the format, we need to |
| 104 // just ignore formats that we can not handle. | 175 // just ignore formats that we can not handle. |
| 176 LOG(ERROR) << "We cannot send frame with storage type: " |
| 177 << input_frame->storage_type() << " format: " |
| 178 << media::VideoPixelFormatToString(input_frame->format()); |
| 105 NOTREACHED(); | 179 NOTREACHED(); |
| 106 return; | 180 return; |
| 107 } | 181 } |
| 108 scoped_refptr<media::VideoFrame> frame = input_frame; | 182 scoped_refptr<media::VideoFrame> frame = input_frame; |
| 109 // Drop alpha channel since we do not support it yet. | 183 // Drop alpha channel since we do not support it yet. |
| 110 if (frame->format() == media::PIXEL_FORMAT_YV12A) | 184 if (frame->format() == media::PIXEL_FORMAT_YV12A) |
| 111 frame = media::WrapAsI420VideoFrame(input_frame); | 185 frame = media::WrapAsI420VideoFrame(input_frame); |
| 112 | 186 |
| 113 const int orig_width = frame->natural_size().width(); | 187 const int orig_width = frame->natural_size().width(); |
| 114 const int orig_height = frame->natural_size().height(); | 188 const int orig_height = frame->natural_size().height(); |
| 115 int adapted_width; | 189 int adapted_width; |
| 116 int adapted_height; | 190 int adapted_height; |
| 117 // The VideoAdapter is only used for cpu-adaptation downscaling, no | 191 // The VideoAdapter is only used for cpu-adaptation downscaling, no |
| 118 // aspect changes. So we ignore these crop-related outputs. | 192 // aspect changes. So we ignore these crop-related outputs. |
| 119 int crop_width; | 193 int crop_width; |
| 120 int crop_height; | 194 int crop_height; |
| 121 int crop_x; | 195 int crop_x; |
| 122 int crop_y; | 196 int crop_y; |
| 123 int64_t translated_camera_time_us; | 197 int64_t translated_camera_time_us; |
| 124 | 198 |
| 125 if (!AdaptFrame(orig_width, orig_height, | 199 if (!AdaptFrame(orig_width, orig_height, |
| 126 frame->timestamp().InMicroseconds(), | 200 frame->timestamp().InMicroseconds(), |
| 127 rtc::TimeMicros(), | 201 rtc::TimeMicros(), |
| 128 &adapted_width, &adapted_height, | 202 &adapted_width, &adapted_height, |
| 129 &crop_width, &crop_height, &crop_x, &crop_y, | 203 &crop_width, &crop_height, &crop_x, &crop_y, |
| 130 &translated_camera_time_us)) { | 204 &translated_camera_time_us)) { |
| 131 return; | 205 return; |
| 132 } | 206 } |
| 133 | 207 |
| 208 WebRtcVideoFrameAdapter::CopyTextureFrameCallback copy_texture_callback = |
| 209 base::Bind(&TextureFrameCopier::CopyTextureFrame, texture_copier_); |
| 134 // Return |frame| directly if it is texture backed, because there is no | 210 // Return |frame| directly if it is texture backed, because there is no |
| 135 // cropping support for texture yet. See http://crbug/503653. | 211 // cropping support for texture yet. See http://crbug/503653. |
| 136 // Return |frame| directly if it is GpuMemoryBuffer backed, as we want to | |
| 137 // keep the frame on native buffers. | |
| 138 if (frame->HasTextures()) { | 212 if (frame->HasTextures()) { |
| 139 OnFrame(cricket::WebRtcVideoFrame( | 213 texture_copier_->CopyTextureFrameOnSomeThread( |
| 140 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(frame), | 214 input_frame, |
| 141 webrtc::kVideoRotation_0, translated_camera_time_us), | 215 base::Bind(&WebRtcVideoCapturerAdapter::OnFrameCopied, |
| 142 orig_width, orig_height); | 216 base::Unretained(this), translated_camera_time_us)); |
| 217 // OnFrame(cricket::WebRtcVideoFrame( |
| 218 // new rtc::RefCountedObject<WebRtcVideoFrameAdapter>( |
| 219 // frame, copy_texture_callback), |
| 220 // webrtc::kVideoRotation_0, translated_camera_time_us), |
| 221 // orig_width, orig_height); |
| 143 return; | 222 return; |
| 144 } | 223 } |
| 145 | 224 |
| 146 // Translate crop rectangle from natural size to visible size. | 225 // Translate crop rectangle from natural size to visible size. |
| 147 gfx::Rect cropped_visible_rect( | 226 gfx::Rect cropped_visible_rect( |
| 148 frame->visible_rect().x() + | 227 frame->visible_rect().x() + |
| 149 crop_x * frame->visible_rect().width() / orig_width, | 228 crop_x * frame->visible_rect().width() / orig_width, |
| 150 frame->visible_rect().y() + | 229 frame->visible_rect().y() + |
| 151 crop_y * frame->visible_rect().height() / orig_height, | 230 crop_y * frame->visible_rect().height() / orig_height, |
| 152 crop_width * frame->visible_rect().width() / orig_width, | 231 crop_width * frame->visible_rect().width() / orig_width, |
| 153 crop_height * frame->visible_rect().height() / orig_height); | 232 crop_height * frame->visible_rect().height() / orig_height); |
| 154 | 233 |
| 155 const gfx::Size adapted_size(adapted_width, adapted_height); | 234 const gfx::Size adapted_size(adapted_width, adapted_height); |
| 156 scoped_refptr<media::VideoFrame> video_frame = | 235 scoped_refptr<media::VideoFrame> video_frame = |
| 157 media::VideoFrame::WrapVideoFrame(frame, frame->format(), | 236 media::VideoFrame::WrapVideoFrame(frame, frame->format(), |
| 158 cropped_visible_rect, adapted_size); | 237 cropped_visible_rect, adapted_size); |
| 159 if (!video_frame) | 238 if (!video_frame) |
| 160 return; | 239 return; |
| 161 | 240 |
| 162 video_frame->AddDestructionObserver(base::Bind(&ReleaseOriginalFrame, frame)); | 241 video_frame->AddDestructionObserver(base::Bind(&ReleaseOriginalFrame, frame)); |
| 163 | 242 |
| 164 // If no scaling is needed, return a wrapped version of |frame| directly. | 243 // If no scaling is needed, return a wrapped version of |frame| directly. |
| 165 if (video_frame->natural_size() == video_frame->visible_rect().size()) { | 244 if (video_frame->natural_size() == video_frame->visible_rect().size()) { |
| 166 OnFrame(cricket::WebRtcVideoFrame( | 245 OnFrame(cricket::WebRtcVideoFrame( |
| 167 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(video_frame), | 246 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>( |
| 247 video_frame, copy_texture_callback), |
| 168 webrtc::kVideoRotation_0, translated_camera_time_us), | 248 webrtc::kVideoRotation_0, translated_camera_time_us), |
| 169 orig_width, orig_height); | 249 orig_width, orig_height); |
| 170 return; | 250 return; |
| 171 } | 251 } |
| 172 | 252 |
| 173 // We need to scale the frame before we hand it over to webrtc. | 253 // We need to scale the frame before we hand it over to webrtc. |
| 174 scoped_refptr<media::VideoFrame> scaled_frame = | 254 scoped_refptr<media::VideoFrame> scaled_frame = |
| 175 scaled_frame_pool_.CreateFrame(media::PIXEL_FORMAT_I420, adapted_size, | 255 scaled_frame_pool_.CreateFrame(media::PIXEL_FORMAT_I420, adapted_size, |
| 176 gfx::Rect(adapted_size), adapted_size, | 256 gfx::Rect(adapted_size), adapted_size, |
| 177 frame->timestamp()); | 257 frame->timestamp()); |
| 178 libyuv::I420Scale(video_frame->visible_data(media::VideoFrame::kYPlane), | 258 libyuv::I420Scale(video_frame->visible_data(media::VideoFrame::kYPlane), |
| 179 video_frame->stride(media::VideoFrame::kYPlane), | 259 video_frame->stride(media::VideoFrame::kYPlane), |
| 180 video_frame->visible_data(media::VideoFrame::kUPlane), | 260 video_frame->visible_data(media::VideoFrame::kUPlane), |
| 181 video_frame->stride(media::VideoFrame::kUPlane), | 261 video_frame->stride(media::VideoFrame::kUPlane), |
| 182 video_frame->visible_data(media::VideoFrame::kVPlane), | 262 video_frame->visible_data(media::VideoFrame::kVPlane), |
| 183 video_frame->stride(media::VideoFrame::kVPlane), | 263 video_frame->stride(media::VideoFrame::kVPlane), |
| 184 video_frame->visible_rect().width(), | 264 video_frame->visible_rect().width(), |
| 185 video_frame->visible_rect().height(), | 265 video_frame->visible_rect().height(), |
| 186 scaled_frame->data(media::VideoFrame::kYPlane), | 266 scaled_frame->data(media::VideoFrame::kYPlane), |
| 187 scaled_frame->stride(media::VideoFrame::kYPlane), | 267 scaled_frame->stride(media::VideoFrame::kYPlane), |
| 188 scaled_frame->data(media::VideoFrame::kUPlane), | 268 scaled_frame->data(media::VideoFrame::kUPlane), |
| 189 scaled_frame->stride(media::VideoFrame::kUPlane), | 269 scaled_frame->stride(media::VideoFrame::kUPlane), |
| 190 scaled_frame->data(media::VideoFrame::kVPlane), | 270 scaled_frame->data(media::VideoFrame::kVPlane), |
| 191 scaled_frame->stride(media::VideoFrame::kVPlane), | 271 scaled_frame->stride(media::VideoFrame::kVPlane), |
| 192 adapted_width, adapted_height, libyuv::kFilterBilinear); | 272 adapted_width, adapted_height, libyuv::kFilterBilinear); |
| 193 | 273 |
| 194 OnFrame(cricket::WebRtcVideoFrame( | 274 OnFrame(cricket::WebRtcVideoFrame( |
| 195 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(scaled_frame), | 275 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>( |
| 276 scaled_frame, copy_texture_callback), |
| 196 webrtc::kVideoRotation_0, translated_camera_time_us), | 277 webrtc::kVideoRotation_0, translated_camera_time_us), |
| 197 orig_width, orig_height); | 278 orig_width, orig_height); |
| 198 } | 279 } |
| 199 | 280 |
| 281 cricket::CaptureState WebRtcVideoCapturerAdapter::Start( |
| 282 const cricket::VideoFormat& capture_format) { |
| 283 DCHECK(thread_checker_.CalledOnValidThread()); |
| 284 DCHECK(!running_); |
| 285 DVLOG(3) << __func__ << " capture format: " << capture_format.ToString(); |
| 286 |
| 287 running_ = true; |
| 288 return cricket::CS_RUNNING; |
| 289 } |
| 290 |
| 291 void WebRtcVideoCapturerAdapter::Stop() { |
| 292 DCHECK(thread_checker_.CalledOnValidThread()); |
| 293 DVLOG(3) << __func__; |
| 294 DCHECK(running_); |
| 295 running_ = false; |
| 296 SetCaptureFormat(NULL); |
| 297 SignalStateChange(this, cricket::CS_STOPPED); |
| 298 } |
| 299 |
| 300 bool WebRtcVideoCapturerAdapter::IsRunning() { |
| 301 DCHECK(thread_checker_.CalledOnValidThread()); |
| 302 return running_; |
| 303 } |
| 304 |
| 305 bool WebRtcVideoCapturerAdapter::GetPreferredFourccs( |
| 306 std::vector<uint32_t>* fourccs) { |
| 307 DCHECK(thread_checker_.CalledOnValidThread()); |
| 308 if (!fourccs) |
| 309 return false; |
| 310 DCHECK(fourccs->empty()); |
| 311 fourccs->push_back(cricket::FOURCC_I420); |
| 312 return true; |
| 313 } |
| 314 |
| 315 bool WebRtcVideoCapturerAdapter::IsScreencast() const { |
| 316 return is_screencast_; |
| 317 } |
| 318 |
| 319 bool WebRtcVideoCapturerAdapter::GetBestCaptureFormat( |
| 320 const cricket::VideoFormat& desired, |
| 321 cricket::VideoFormat* best_format) { |
| 322 DCHECK(thread_checker_.CalledOnValidThread()); |
| 323 DVLOG(3) << __func__ << " desired: " << desired.ToString(); |
| 324 |
| 325 // Capability enumeration is done in MediaStreamVideoSource. The adapter can |
| 326 // just use what is provided. |
| 327 // Use the desired format as the best format. |
| 328 best_format->width = desired.width; |
| 329 best_format->height = desired.height; |
| 330 best_format->fourcc = cricket::FOURCC_I420; |
| 331 best_format->interval = desired.interval; |
| 332 return true; |
| 333 } |
| 334 |
| 200 } // namespace content | 335 } // namespace content |
| OLD | NEW |