OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" | 5 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" |
6 | 6 |
7 #include "base/bind.h" | 7 #include "base/bind.h" |
8 #include "base/memory/aligned_memory.h" | 8 #include "base/memory/aligned_memory.h" |
9 #include "base/trace_event/trace_event.h" | 9 #include "base/trace_event/trace_event.h" |
10 #include "media/base/video_frame.h" | 10 #include "media/base/video_frame.h" |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
58 | 58 |
59 uint8_t* data(webrtc::PlaneType type) override { | 59 uint8_t* data(webrtc::PlaneType type) override { |
60 NOTREACHED(); | 60 NOTREACHED(); |
61 return nullptr; | 61 return nullptr; |
62 } | 62 } |
63 | 63 |
64 int stride(webrtc::PlaneType type) const override { | 64 int stride(webrtc::PlaneType type) const override { |
65 return frame_->stride(WebRtcToMediaPlaneType(type)); | 65 return frame_->stride(WebRtcToMediaPlaneType(type)); |
66 } | 66 } |
67 | 67 |
68 void* native_handle() const override { return nullptr; } | 68 void* native_handle() const override { |
| 69 return media::VideoFrame::IsMappable(frame_->storage_type()) ? nullptr |
| 70 : frame_.get(); |
| 71 } |
69 | 72 |
70 ~VideoFrameWrapper() override {} | 73 ~VideoFrameWrapper() override {} |
71 friend class rtc::RefCountedObject<VideoFrameWrapper>; | 74 friend class rtc::RefCountedObject<VideoFrameWrapper>; |
72 | 75 |
73 scoped_refptr<media::VideoFrame> frame_; | 76 scoped_refptr<media::VideoFrame> frame_; |
74 }; | 77 }; |
75 | 78 |
76 } // anonymous namespace | 79 } // anonymous namespace |
77 | 80 |
78 // A cricket::VideoFrameFactory for media::VideoFrame. The purpose of this | 81 // A cricket::VideoFrameFactory for media::VideoFrame. The purpose of this |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
112 cricket::VideoFrame* CreateAliasedFrame( | 115 cricket::VideoFrame* CreateAliasedFrame( |
113 const cricket::CapturedFrame* input_frame, | 116 const cricket::CapturedFrame* input_frame, |
114 int cropped_input_width, | 117 int cropped_input_width, |
115 int cropped_input_height, | 118 int cropped_input_height, |
116 int output_width, | 119 int output_width, |
117 int output_height) const override { | 120 int output_height) const override { |
118 // Check that captured_frame is actually our frame. | 121 // Check that captured_frame is actually our frame. |
119 DCHECK(input_frame == &captured_frame_); | 122 DCHECK(input_frame == &captured_frame_); |
120 DCHECK(frame_.get()); | 123 DCHECK(frame_.get()); |
121 | 124 |
| 125 const int64_t timestamp_ns = frame_->timestamp().InMicroseconds() * |
| 126 base::Time::kNanosecondsPerMicrosecond; |
| 127 |
| 128 // Return |frame_| directly if it is texture backed, because there is no |
| 129 // cropping support for textures yet. See http://crbug/362521. |
| 130 if (frame_->storage_type() == media::VideoFrame::STORAGE_TEXTURE) { |
| 131 return new cricket::WebRtcVideoFrame( |
| 132 new rtc::RefCountedObject<VideoFrameWrapper>(frame_), |
| 133 captured_frame_.elapsed_time, timestamp_ns); |
| 134 } |
| 135 |
122 // Create a centered cropped visible rect that preservers aspect ratio for | 136 // Create a centered cropped visible rect that preservers aspect ratio for |
123 // cropped natural size. | 137 // cropped natural size. |
124 gfx::Rect visible_rect = frame_->visible_rect(); | 138 gfx::Rect visible_rect = frame_->visible_rect(); |
125 visible_rect.ClampToCenteredSize(gfx::Size( | 139 visible_rect.ClampToCenteredSize(gfx::Size( |
126 visible_rect.width() * cropped_input_width / input_frame->width, | 140 visible_rect.width() * cropped_input_width / input_frame->width, |
127 visible_rect.height() * cropped_input_height / input_frame->height)); | 141 visible_rect.height() * cropped_input_height / input_frame->height)); |
128 | 142 |
129 const gfx::Size output_size(output_width, output_height); | 143 const gfx::Size output_size(output_width, output_height); |
130 scoped_refptr<media::VideoFrame> video_frame = | 144 scoped_refptr<media::VideoFrame> video_frame = |
131 media::VideoFrame::WrapVideoFrame(frame_, visible_rect, output_size); | 145 media::VideoFrame::WrapVideoFrame(frame_, visible_rect, output_size); |
132 video_frame->AddDestructionObserver( | 146 video_frame->AddDestructionObserver( |
133 base::Bind(&ReleaseOriginalFrame, frame_)); | 147 base::Bind(&ReleaseOriginalFrame, frame_)); |
134 | 148 |
135 const int64_t timestamp_ns = frame_->timestamp().InMicroseconds() * | |
136 base::Time::kNanosecondsPerMicrosecond; | |
137 | |
138 // If no scaling is needed, return a wrapped version of |frame_| directly. | 149 // If no scaling is needed, return a wrapped version of |frame_| directly. |
139 if (video_frame->natural_size() == video_frame->visible_rect().size()) { | 150 if (video_frame->natural_size() == video_frame->visible_rect().size()) { |
140 return new cricket::WebRtcVideoFrame( | 151 return new cricket::WebRtcVideoFrame( |
141 new rtc::RefCountedObject<VideoFrameWrapper>(video_frame), | 152 new rtc::RefCountedObject<VideoFrameWrapper>(video_frame), |
142 captured_frame_.elapsed_time, timestamp_ns); | 153 captured_frame_.elapsed_time, timestamp_ns); |
143 } | 154 } |
144 | 155 |
145 // We need to scale the frame before we hand it over to cricket. | 156 // We need to scale the frame before we hand it over to cricket. |
146 scoped_refptr<media::VideoFrame> scaled_frame = | 157 scoped_refptr<media::VideoFrame> scaled_frame = |
147 scaled_frame_pool_.CreateFrame(media::VideoFrame::I420, output_size, | 158 scaled_frame_pool_.CreateFrame(media::VideoFrame::I420, output_size, |
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
250 best_format->fourcc = cricket::FOURCC_I420; | 261 best_format->fourcc = cricket::FOURCC_I420; |
251 best_format->interval = desired.interval; | 262 best_format->interval = desired.interval; |
252 return true; | 263 return true; |
253 } | 264 } |
254 | 265 |
255 void WebRtcVideoCapturerAdapter::OnFrameCaptured( | 266 void WebRtcVideoCapturerAdapter::OnFrameCaptured( |
256 const scoped_refptr<media::VideoFrame>& frame) { | 267 const scoped_refptr<media::VideoFrame>& frame) { |
257 DCHECK(thread_checker_.CalledOnValidThread()); | 268 DCHECK(thread_checker_.CalledOnValidThread()); |
258 TRACE_EVENT0("video", "WebRtcVideoCapturerAdapter::OnFrameCaptured"); | 269 TRACE_EVENT0("video", "WebRtcVideoCapturerAdapter::OnFrameCaptured"); |
259 if (!(media::VideoFrame::I420 == frame->format() || | 270 if (!(media::VideoFrame::I420 == frame->format() || |
260 media::VideoFrame::YV12 == frame->format())) { | 271 media::VideoFrame::YV12 == frame->format() || |
261 // Some types of sources support textures as output. Since connecting | 272 media::VideoFrame::STORAGE_TEXTURE == frame->storage_type())) { |
262 // sources and sinks do not check the format, we need to just ignore | 273 // Since connecting sources and sinks do not check the format, we need to |
263 // formats that we can not handle. | 274 // just ignore formats that we can not handle. |
264 NOTREACHED(); | 275 NOTREACHED(); |
265 return; | 276 return; |
266 } | 277 } |
267 | 278 |
268 if (first_frame_timestamp_ == media::kNoTimestamp()) | 279 if (first_frame_timestamp_ == media::kNoTimestamp()) |
269 first_frame_timestamp_ = frame->timestamp(); | 280 first_frame_timestamp_ = frame->timestamp(); |
270 | 281 |
271 const int64 elapsed_time = | 282 const int64 elapsed_time = |
272 (frame->timestamp() - first_frame_timestamp_).InMicroseconds() * | 283 (frame->timestamp() - first_frame_timestamp_).InMicroseconds() * |
273 base::Time::kNanosecondsPerMicrosecond; | 284 base::Time::kNanosecondsPerMicrosecond; |
274 | 285 |
275 // Inject the frame via the VideoFrameFractory. | 286 // Inject the frame via the VideoFrameFractory. |
276 DCHECK(frame_factory_ == frame_factory()); | 287 DCHECK(frame_factory_ == frame_factory()); |
277 frame_factory_->SetFrame(frame, elapsed_time); | 288 frame_factory_->SetFrame(frame, elapsed_time); |
278 | 289 |
279 // This signals to libJingle that a new VideoFrame is available. | 290 // This signals to libJingle that a new VideoFrame is available. |
280 SignalFrameCaptured(this, frame_factory_->GetCapturedFrame()); | 291 SignalFrameCaptured(this, frame_factory_->GetCapturedFrame()); |
281 | 292 |
282 frame_factory_->ReleaseFrame(); // Release the frame ASAP. | 293 frame_factory_->ReleaseFrame(); // Release the frame ASAP. |
283 } | 294 } |
284 | 295 |
285 } // namespace content | 296 } // namespace content |
OLD | NEW |