OLD | NEW |
---|---|
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" | 5 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" |
6 | 6 |
7 #include "base/bind.h" | 7 #include "base/bind.h" |
8 #include "base/memory/aligned_memory.h" | 8 #include "base/memory/aligned_memory.h" |
9 #include "base/trace_event/trace_event.h" | 9 #include "base/trace_event/trace_event.h" |
10 #include "content/renderer/media/webrtc/webrtc_video_frame_adapter.h" | 10 #include "content/renderer/media/webrtc/webrtc_video_frame_adapter.h" |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
64 cricket::VideoFrame* CreateAliasedFrame( | 64 cricket::VideoFrame* CreateAliasedFrame( |
65 const cricket::CapturedFrame* input_frame, | 65 const cricket::CapturedFrame* input_frame, |
66 int cropped_input_width, | 66 int cropped_input_width, |
67 int cropped_input_height, | 67 int cropped_input_height, |
68 int output_width, | 68 int output_width, |
69 int output_height) const override { | 69 int output_height) const override { |
70 // Check that captured_frame is actually our frame. | 70 // Check that captured_frame is actually our frame. |
71 DCHECK(input_frame == &captured_frame_); | 71 DCHECK(input_frame == &captured_frame_); |
72 DCHECK(frame_.get()); | 72 DCHECK(frame_.get()); |
73 | 73 |
74 const int64_t timestamp_ns = frame_->timestamp().InMicroseconds() * | |
75 base::Time::kNanosecondsPerMicrosecond; | |
76 | |
77 // Return |frame_| directly if it is texture backed, because there is no | |
78 // cropping support for texture yet. See http://crbug/503653. | |
79 if (frame_->HasTextures()) { | |
80 return new cricket::WebRtcVideoFrame( | |
81 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(frame_), | |
82 captured_frame_.elapsed_time, timestamp_ns); | |
83 } | |
84 | |
74 // Create a centered cropped visible rect that preservers aspect ratio for | 85 // Create a centered cropped visible rect that preservers aspect ratio for |
75 // cropped natural size. | 86 // cropped natural size. |
76 gfx::Rect visible_rect = frame_->visible_rect(); | 87 gfx::Rect visible_rect = frame_->visible_rect(); |
77 visible_rect.ClampToCenteredSize(gfx::Size( | 88 visible_rect.ClampToCenteredSize(gfx::Size( |
78 visible_rect.width() * cropped_input_width / input_frame->width, | 89 visible_rect.width() * cropped_input_width / input_frame->width, |
79 visible_rect.height() * cropped_input_height / input_frame->height)); | 90 visible_rect.height() * cropped_input_height / input_frame->height)); |
80 | 91 |
81 const gfx::Size output_size(output_width, output_height); | 92 const gfx::Size output_size(output_width, output_height); |
82 scoped_refptr<media::VideoFrame> video_frame = | 93 scoped_refptr<media::VideoFrame> video_frame = |
83 media::VideoFrame::WrapVideoFrame(frame_, visible_rect, output_size); | 94 media::VideoFrame::WrapVideoFrame(frame_, visible_rect, output_size); |
84 video_frame->AddDestructionObserver( | 95 video_frame->AddDestructionObserver( |
85 base::Bind(&ReleaseOriginalFrame, frame_)); | 96 base::Bind(&ReleaseOriginalFrame, frame_)); |
86 | 97 |
87 const int64_t timestamp_ns = frame_->timestamp().InMicroseconds() * | |
88 base::Time::kNanosecondsPerMicrosecond; | |
89 | |
90 // If no scaling is needed, return a wrapped version of |frame_| directly. | 98 // If no scaling is needed, return a wrapped version of |frame_| directly. |
91 if (video_frame->natural_size() == video_frame->visible_rect().size()) { | 99 if (video_frame->natural_size() == video_frame->visible_rect().size()) { |
92 return new cricket::WebRtcVideoFrame( | 100 return new cricket::WebRtcVideoFrame( |
93 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(video_frame), | 101 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(video_frame), |
94 captured_frame_.elapsed_time, timestamp_ns); | 102 captured_frame_.elapsed_time, timestamp_ns); |
95 } | 103 } |
96 | 104 |
97 // We need to scale the frame before we hand it over to cricket. | 105 // We need to scale the frame before we hand it over to cricket. |
98 scoped_refptr<media::VideoFrame> scaled_frame = | 106 scoped_refptr<media::VideoFrame> scaled_frame = |
99 scaled_frame_pool_.CreateFrame(media::VideoFrame::I420, output_size, | 107 scaled_frame_pool_.CreateFrame(media::VideoFrame::I420, output_size, |
(...skipping 30 matching lines...) Expand all Loading... | |
130 private: | 138 private: |
131 scoped_refptr<media::VideoFrame> frame_; | 139 scoped_refptr<media::VideoFrame> frame_; |
132 cricket::CapturedFrame captured_frame_; | 140 cricket::CapturedFrame captured_frame_; |
133 // This is used only if scaling is needed. | 141 // This is used only if scaling is needed. |
134 mutable media::VideoFramePool scaled_frame_pool_; | 142 mutable media::VideoFramePool scaled_frame_pool_; |
135 }; | 143 }; |
136 | 144 |
137 WebRtcVideoCapturerAdapter::WebRtcVideoCapturerAdapter(bool is_screencast) | 145 WebRtcVideoCapturerAdapter::WebRtcVideoCapturerAdapter(bool is_screencast) |
138 : is_screencast_(is_screencast), | 146 : is_screencast_(is_screencast), |
139 running_(false), | 147 running_(false), |
140 first_frame_timestamp_(media::kNoTimestamp()), | 148 first_frame_timestamp_(media::kNoTimestamp()) { |
mcasas
2015/06/27 01:03:38
Not your problem but kNoTimestamp() is
a misleadin
| |
141 frame_factory_(new MediaVideoFrameFactory) { | |
142 thread_checker_.DetachFromThread(); | 149 thread_checker_.DetachFromThread(); |
143 // The base class takes ownership of the frame factory. | 150 // The base class takes ownership of the frame factory. |
144 set_frame_factory(frame_factory_); | 151 set_frame_factory(new MediaVideoFrameFactory); |
145 } | 152 } |
146 | 153 |
147 WebRtcVideoCapturerAdapter::~WebRtcVideoCapturerAdapter() { | 154 WebRtcVideoCapturerAdapter::~WebRtcVideoCapturerAdapter() { |
148 DVLOG(3) << " WebRtcVideoCapturerAdapter::dtor"; | 155 DVLOG(3) << " WebRtcVideoCapturerAdapter::dtor"; |
149 } | 156 } |
150 | 157 |
151 cricket::CaptureState WebRtcVideoCapturerAdapter::Start( | 158 cricket::CaptureState WebRtcVideoCapturerAdapter::Start( |
152 const cricket::VideoFormat& capture_format) { | 159 const cricket::VideoFormat& capture_format) { |
153 DCHECK(thread_checker_.CalledOnValidThread()); | 160 DCHECK(thread_checker_.CalledOnValidThread()); |
154 DCHECK(!running_); | 161 DCHECK(!running_); |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
201 best_format->height = desired.height; | 208 best_format->height = desired.height; |
202 best_format->fourcc = cricket::FOURCC_I420; | 209 best_format->fourcc = cricket::FOURCC_I420; |
203 best_format->interval = desired.interval; | 210 best_format->interval = desired.interval; |
204 return true; | 211 return true; |
205 } | 212 } |
206 | 213 |
207 void WebRtcVideoCapturerAdapter::OnFrameCaptured( | 214 void WebRtcVideoCapturerAdapter::OnFrameCaptured( |
208 const scoped_refptr<media::VideoFrame>& frame) { | 215 const scoped_refptr<media::VideoFrame>& frame) { |
209 DCHECK(thread_checker_.CalledOnValidThread()); | 216 DCHECK(thread_checker_.CalledOnValidThread()); |
210 TRACE_EVENT0("video", "WebRtcVideoCapturerAdapter::OnFrameCaptured"); | 217 TRACE_EVENT0("video", "WebRtcVideoCapturerAdapter::OnFrameCaptured"); |
211 if (!(media::VideoFrame::I420 == frame->format() || | 218 if (!((frame->IsMappable() && (frame->format() == media::VideoFrame::I420 || |
212 media::VideoFrame::YV12 == frame->format())) { | 219 frame->format() == media::VideoFrame::YV12)) || |
213 // Some types of sources support textures as output. Since connecting | 220 frame->HasTextures())) { |
214 // sources and sinks do not check the format, we need to just ignore | 221 // Since connecting sources and sinks do not check the format, we need to |
215 // formats that we can not handle. | 222 // just ignore formats that we can not handle. |
216 NOTREACHED(); | 223 NOTREACHED(); |
217 return; | 224 return; |
218 } | 225 } |
219 | 226 |
220 if (first_frame_timestamp_ == media::kNoTimestamp()) | 227 if (first_frame_timestamp_ == media::kNoTimestamp()) |
221 first_frame_timestamp_ = frame->timestamp(); | 228 first_frame_timestamp_ = frame->timestamp(); |
222 | 229 |
223 const int64 elapsed_time = | 230 const int64 elapsed_time = |
224 (frame->timestamp() - first_frame_timestamp_).InMicroseconds() * | 231 (frame->timestamp() - first_frame_timestamp_).InMicroseconds() * |
225 base::Time::kNanosecondsPerMicrosecond; | 232 base::Time::kNanosecondsPerMicrosecond; |
226 | 233 |
227 // Inject the frame via the VideoFrameFractory. | 234 // Inject the frame via the VideoFrameFactory of base class. |
228 DCHECK(frame_factory_ == frame_factory()); | 235 MediaVideoFrameFactory* media_video_frame_factory = |
mcasas
2015/06/27 01:03:38
MediaVideoFrameFactory* const ?
:-)
| |
229 frame_factory_->SetFrame(frame, elapsed_time); | 236 reinterpret_cast<MediaVideoFrameFactory*>(frame_factory()); |
237 media_video_frame_factory->SetFrame(frame, elapsed_time); | |
230 | 238 |
231 // This signals to libJingle that a new VideoFrame is available. | 239 // This signals to libJingle that a new VideoFrame is available. |
232 SignalFrameCaptured(this, frame_factory_->GetCapturedFrame()); | 240 SignalFrameCaptured(this, media_video_frame_factory->GetCapturedFrame()); |
233 | 241 |
234 frame_factory_->ReleaseFrame(); // Release the frame ASAP. | 242 media_video_frame_factory->ReleaseFrame(); // Release the frame ASAP. |
235 } | 243 } |
236 | 244 |
237 } // namespace content | 245 } // namespace content |
OLD | NEW |