Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(143)

Side by Side Diff: content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc

Issue 1128213005: Passing Native Texture backed Video Frame from Renderer to GPU process (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" 5 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
6 6
7 #include "base/bind.h" 7 #include "base/bind.h"
8 #include "base/memory/aligned_memory.h" 8 #include "base/memory/aligned_memory.h"
9 #include "base/trace_event/trace_event.h" 9 #include "base/trace_event/trace_event.h"
10 #include "media/base/video_frame.h" 10 #include "media/base/video_frame.h"
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
58 58
59 uint8_t* data(webrtc::PlaneType type) override { 59 uint8_t* data(webrtc::PlaneType type) override {
60 NOTREACHED(); 60 NOTREACHED();
61 return nullptr; 61 return nullptr;
62 } 62 }
63 63
64 int stride(webrtc::PlaneType type) const override { 64 int stride(webrtc::PlaneType type) const override {
65 return frame_->stride(WebRtcToMediaPlaneType(type)); 65 return frame_->stride(WebRtcToMediaPlaneType(type));
66 } 66 }
67 67
68 void* native_handle() const override { return nullptr; } 68 void* native_handle() const override {
69 return frame_.get();
magjed_chromium 2015/06/03 08:40:00 In webrtc, we have some branching on whether nativ
emircan 2015/06/03 20:56:12 Done.
70 }
69 71
70 ~VideoFrameWrapper() override {} 72 ~VideoFrameWrapper() override {}
71 friend class rtc::RefCountedObject<VideoFrameWrapper>; 73 friend class rtc::RefCountedObject<VideoFrameWrapper>;
72 74
73 scoped_refptr<media::VideoFrame> frame_; 75 scoped_refptr<media::VideoFrame> frame_;
74 }; 76 };
75 77
76 } // anonymous namespace 78 } // anonymous namespace
77 79
78 // A cricket::VideoFrameFactory for media::VideoFrame. The purpose of this 80 // A cricket::VideoFrameFactory for media::VideoFrame. The purpose of this
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
112 cricket::VideoFrame* CreateAliasedFrame( 114 cricket::VideoFrame* CreateAliasedFrame(
113 const cricket::CapturedFrame* input_frame, 115 const cricket::CapturedFrame* input_frame,
114 int cropped_input_width, 116 int cropped_input_width,
115 int cropped_input_height, 117 int cropped_input_height,
116 int output_width, 118 int output_width,
117 int output_height) const override { 119 int output_height) const override {
118 // Check that captured_frame is actually our frame. 120 // Check that captured_frame is actually our frame.
119 DCHECK(input_frame == &captured_frame_); 121 DCHECK(input_frame == &captured_frame_);
120 DCHECK(frame_.get()); 122 DCHECK(frame_.get());
121 123
124 const int64_t timestamp_ns = frame_->timestamp().InMicroseconds() *
125 base::Time::kNanosecondsPerMicrosecond;
126
127 // Return |frame_| directly if it is texture backed, because there is no
128 // cropping support for textures yet. See http://crbug/362521.
129 if (frame_->storage_type() == media::VideoFrame::STORAGE_TEXTURE) {
130 return new cricket::WebRtcVideoFrame(
131 new rtc::RefCountedObject<VideoFrameWrapper>(frame_),
132 captured_frame_.elapsed_time, timestamp_ns);
133 }
134
122 // Create a centered cropped visible rect that preservers aspect ratio for 135 // Create a centered cropped visible rect that preservers aspect ratio for
123 // cropped natural size. 136 // cropped natural size.
124 gfx::Rect visible_rect = frame_->visible_rect(); 137 gfx::Rect visible_rect = frame_->visible_rect();
125 visible_rect.ClampToCenteredSize(gfx::Size( 138 visible_rect.ClampToCenteredSize(gfx::Size(
126 visible_rect.width() * cropped_input_width / input_frame->width, 139 visible_rect.width() * cropped_input_width / input_frame->width,
127 visible_rect.height() * cropped_input_height / input_frame->height)); 140 visible_rect.height() * cropped_input_height / input_frame->height));
128 141
129 const gfx::Size output_size(output_width, output_height); 142 const gfx::Size output_size(output_width, output_height);
130 scoped_refptr<media::VideoFrame> video_frame = 143 scoped_refptr<media::VideoFrame> video_frame =
131 media::VideoFrame::WrapVideoFrame(frame_, visible_rect, output_size); 144 media::VideoFrame::WrapVideoFrame(frame_, visible_rect, output_size);
132 video_frame->AddDestructionObserver( 145 video_frame->AddDestructionObserver(
133 base::Bind(&ReleaseOriginalFrame, frame_)); 146 base::Bind(&ReleaseOriginalFrame, frame_));
134 147
135 const int64_t timestamp_ns = frame_->timestamp().InMicroseconds() *
136 base::Time::kNanosecondsPerMicrosecond;
137
138 // If no scaling is needed, return a wrapped version of |frame_| directly. 148 // If no scaling is needed, return a wrapped version of |frame_| directly.
139 if (video_frame->natural_size() == video_frame->visible_rect().size()) { 149 if (video_frame->natural_size() == video_frame->visible_rect().size()) {
140 return new cricket::WebRtcVideoFrame( 150 return new cricket::WebRtcVideoFrame(
141 new rtc::RefCountedObject<VideoFrameWrapper>(video_frame), 151 new rtc::RefCountedObject<VideoFrameWrapper>(video_frame),
142 captured_frame_.elapsed_time, timestamp_ns); 152 captured_frame_.elapsed_time, timestamp_ns);
143 } 153 }
144 154
145 // We need to scale the frame before we hand it over to cricket. 155 // We need to scale the frame before we hand it over to cricket.
146 scoped_refptr<media::VideoFrame> scaled_frame = 156 scoped_refptr<media::VideoFrame> scaled_frame =
147 scaled_frame_pool_.CreateFrame(media::VideoFrame::I420, output_size, 157 scaled_frame_pool_.CreateFrame(media::VideoFrame::I420, output_size,
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after
250 best_format->fourcc = cricket::FOURCC_I420; 260 best_format->fourcc = cricket::FOURCC_I420;
251 best_format->interval = desired.interval; 261 best_format->interval = desired.interval;
252 return true; 262 return true;
253 } 263 }
254 264
255 void WebRtcVideoCapturerAdapter::OnFrameCaptured( 265 void WebRtcVideoCapturerAdapter::OnFrameCaptured(
256 const scoped_refptr<media::VideoFrame>& frame) { 266 const scoped_refptr<media::VideoFrame>& frame) {
257 DCHECK(thread_checker_.CalledOnValidThread()); 267 DCHECK(thread_checker_.CalledOnValidThread());
258 TRACE_EVENT0("video", "WebRtcVideoCapturerAdapter::OnFrameCaptured"); 268 TRACE_EVENT0("video", "WebRtcVideoCapturerAdapter::OnFrameCaptured");
259 if (!(media::VideoFrame::I420 == frame->format() || 269 if (!(media::VideoFrame::I420 == frame->format() ||
260 media::VideoFrame::YV12 == frame->format())) { 270 media::VideoFrame::YV12 == frame->format() ||
261 // Some types of sources support textures as output. Since connecting 271 media::VideoFrame::STORAGE_TEXTURE == frame->storage_type())) {
262 // sources and sinks do not check the format, we need to just ignore 272 // Since connecting sources and sinks do not check the format, we need to
263 // formats that we can not handle. 273 // just ignore formats that we can not handle.
264 NOTREACHED(); 274 NOTREACHED();
265 return; 275 return;
266 } 276 }
267 277
268 if (first_frame_timestamp_ == media::kNoTimestamp()) 278 if (first_frame_timestamp_ == media::kNoTimestamp())
269 first_frame_timestamp_ = frame->timestamp(); 279 first_frame_timestamp_ = frame->timestamp();
270 280
271 const int64 elapsed_time = 281 const int64 elapsed_time =
272 (frame->timestamp() - first_frame_timestamp_).InMicroseconds() * 282 (frame->timestamp() - first_frame_timestamp_).InMicroseconds() *
273 base::Time::kNanosecondsPerMicrosecond; 283 base::Time::kNanosecondsPerMicrosecond;
274 284
275 // Inject the frame via the VideoFrameFractory. 285 // Inject the frame via the VideoFrameFractory.
276 DCHECK(frame_factory_ == frame_factory()); 286 DCHECK(frame_factory_ == frame_factory());
277 frame_factory_->SetFrame(frame, elapsed_time); 287 frame_factory_->SetFrame(frame, elapsed_time);
278 288
279 // This signals to libJingle that a new VideoFrame is available. 289 // This signals to libJingle that a new VideoFrame is available.
280 SignalFrameCaptured(this, frame_factory_->GetCapturedFrame()); 290 SignalFrameCaptured(this, frame_factory_->GetCapturedFrame());
281 291
282 frame_factory_->ReleaseFrame(); // Release the frame ASAP. 292 frame_factory_->ReleaseFrame(); // Release the frame ASAP.
283 } 293 }
284 294
285 } // namespace content 295 } // namespace content
OLDNEW
« content/renderer/media/rtc_video_encoder.cc ('K') | « content/renderer/media/rtc_video_encoder.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698