OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/browser/renderer_host/media/video_capture_device_client.h" | 5 #include "content/browser/renderer_host/media/video_capture_device_client.h" |
6 | 6 |
7 #include "base/bind.h" | 7 #include "base/bind.h" |
8 #include "base/strings/stringprintf.h" | 8 #include "base/strings/stringprintf.h" |
9 #include "base/trace_event/trace_event.h" | 9 #include "base/trace_event/trace_event.h" |
10 #include "content/browser/renderer_host/media/video_capture_buffer_pool.h" | 10 #include "content/browser/renderer_host/media/video_capture_buffer_pool.h" |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
66 | 66 |
67 if (last_captured_pixel_format_ != frame_format.pixel_format) { | 67 if (last_captured_pixel_format_ != frame_format.pixel_format) { |
68 OnLog("Pixel format: " + media::VideoCaptureFormat::PixelFormatToString( | 68 OnLog("Pixel format: " + media::VideoCaptureFormat::PixelFormatToString( |
69 frame_format.pixel_format)); | 69 frame_format.pixel_format)); |
70 last_captured_pixel_format_ = frame_format.pixel_format; | 70 last_captured_pixel_format_ = frame_format.pixel_format; |
71 } | 71 } |
72 | 72 |
73 if (!frame_format.IsValid()) | 73 if (!frame_format.IsValid()) |
74 return; | 74 return; |
75 | 75 |
76 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest | 76 // Chopped pixels in width/height in case video capture device has odd |
77 // bit decomposition of {width, height}, grabbing the odd and even parts. | 77 // numbers for width/height. |
78 const int chopped_width = frame_format.frame_size.width() & 1; | 78 int chopped_width = 0; |
79 const int chopped_height = frame_format.frame_size.height() & 1; | 79 int chopped_height = 0; |
80 const int new_unrotated_width = frame_format.frame_size.width() & ~1; | 80 int new_unrotated_width = frame_format.frame_size.width(); |
81 const int new_unrotated_height = frame_format.frame_size.height() & ~1; | 81 int new_unrotated_height = frame_format.frame_size.height(); |
| 82 |
| 83 if (new_unrotated_width & 1) { |
| 84 --new_unrotated_width; |
| 85 chopped_width = 1; |
| 86 } |
| 87 if (new_unrotated_height & 1) { |
| 88 --new_unrotated_height; |
| 89 chopped_height = 1; |
| 90 } |
82 | 91 |
83 int destination_width = new_unrotated_width; | 92 int destination_width = new_unrotated_width; |
84 int destination_height = new_unrotated_height; | 93 int destination_height = new_unrotated_height; |
85 if (rotation == 90 || rotation == 270) { | 94 if (rotation == 90 || rotation == 270) { |
86 destination_width = new_unrotated_height; | 95 destination_width = new_unrotated_height; |
87 destination_height = new_unrotated_width; | 96 destination_height = new_unrotated_width; |
88 } | 97 } |
89 | |
90 DCHECK_EQ(rotation % 90, 0) | |
91 << " Rotation must be a multiple of 90, now: " << rotation; | |
92 libyuv::RotationMode rotation_mode = libyuv::kRotate0; | |
93 if (rotation == 90) | |
94 rotation_mode = libyuv::kRotate90; | |
95 else if (rotation == 180) | |
96 rotation_mode = libyuv::kRotate180; | |
97 else if (rotation == 270) | |
98 rotation_mode = libyuv::kRotate270; | |
99 | |
100 const gfx::Size dimensions(destination_width, destination_height); | 98 const gfx::Size dimensions(destination_width, destination_height); |
101 if (!VideoFrame::IsValidConfig(VideoFrame::I420, | 99 if (!VideoFrame::IsValidConfig(VideoFrame::I420, |
102 dimensions, | 100 dimensions, |
103 gfx::Rect(dimensions), | 101 gfx::Rect(dimensions), |
104 dimensions)) { | 102 dimensions)) { |
105 return; | 103 return; |
106 } | 104 } |
107 | 105 |
108 scoped_refptr<Buffer> buffer = ReserveOutputBuffer(VideoFrame::I420, | 106 scoped_refptr<Buffer> buffer = ReserveOutputBuffer(VideoFrame::I420, |
109 dimensions); | 107 dimensions); |
110 if (!buffer.get()) | 108 if (!buffer.get()) |
111 return; | 109 return; |
112 | 110 |
113 uint8* const yplane = reinterpret_cast<uint8*>(buffer->data()); | 111 uint8* const yplane = reinterpret_cast<uint8*>(buffer->data()); |
114 uint8* const uplane = | 112 uint8* const uplane = |
115 yplane + VideoFrame::PlaneAllocationSize(VideoFrame::I420, | 113 yplane + VideoFrame::PlaneAllocationSize(VideoFrame::I420, |
116 VideoFrame::kYPlane, dimensions); | 114 VideoFrame::kYPlane, dimensions); |
117 uint8* const vplane = | 115 uint8* const vplane = |
118 uplane + VideoFrame::PlaneAllocationSize(VideoFrame::I420, | 116 uplane + VideoFrame::PlaneAllocationSize(VideoFrame::I420, |
119 VideoFrame::kUPlane, dimensions); | 117 VideoFrame::kUPlane, dimensions); |
120 int yplane_stride = dimensions.width(); | 118 int yplane_stride = dimensions.width(); |
121 int uv_plane_stride = yplane_stride / 2; | 119 int uv_plane_stride = yplane_stride / 2; |
122 int crop_x = 0; | 120 int crop_x = 0; |
123 int crop_y = 0; | 121 int crop_y = 0; |
124 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; | 122 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; |
125 | 123 |
| 124 libyuv::RotationMode rotation_mode = libyuv::kRotate0; |
| 125 if (rotation == 90) |
| 126 rotation_mode = libyuv::kRotate90; |
| 127 else if (rotation == 180) |
| 128 rotation_mode = libyuv::kRotate180; |
| 129 else if (rotation == 270) |
| 130 rotation_mode = libyuv::kRotate270; |
| 131 |
126 bool flip = false; | 132 bool flip = false; |
127 switch (frame_format.pixel_format) { | 133 switch (frame_format.pixel_format) { |
128 case media::PIXEL_FORMAT_UNKNOWN: // Color format not set. | 134 case media::PIXEL_FORMAT_UNKNOWN: // Color format not set. |
129 break; | 135 break; |
130 case media::PIXEL_FORMAT_I420: | 136 case media::PIXEL_FORMAT_I420: |
131 DCHECK(!chopped_width && !chopped_height); | 137 DCHECK(!chopped_width && !chopped_height); |
132 origin_colorspace = libyuv::FOURCC_I420; | 138 origin_colorspace = libyuv::FOURCC_I420; |
133 break; | 139 break; |
134 case media::PIXEL_FORMAT_YV12: | 140 case media::PIXEL_FORMAT_YV12: |
135 DCHECK(!chopped_width && !chopped_height); | 141 DCHECK(!chopped_width && !chopped_height); |
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
216 BrowserThread::IO, | 222 BrowserThread::IO, |
217 FROM_HERE, | 223 FROM_HERE, |
218 base::Bind( | 224 base::Bind( |
219 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread, | 225 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread, |
220 controller_, | 226 controller_, |
221 buffer, | 227 buffer, |
222 frame, | 228 frame, |
223 timestamp)); | 229 timestamp)); |
224 } | 230 } |
225 | 231 |
226 void | |
227 VideoCaptureDeviceClient::OnIncomingCapturedYuvData( | |
228 const uint8* y_data, | |
229 const uint8* u_data, | |
230 const uint8* v_data, | |
231 size_t y_stride, | |
232 size_t u_stride, | |
233 size_t v_stride, | |
234 const VideoCaptureFormat& frame_format, | |
235 int clockwise_rotation, | |
236 const base::TimeTicks& timestamp) { | |
237 TRACE_EVENT0("video", "VideoCaptureController::OnIncomingCapturedYuvData"); | |
238 DCHECK_EQ(frame_format.pixel_format, media::PIXEL_FORMAT_I420); | |
239 DCHECK_EQ(clockwise_rotation, 0) << "Rotation not supported"; | |
240 | |
241 scoped_refptr<Buffer> buffer = ReserveOutputBuffer(VideoFrame::I420, | |
242 frame_format.frame_size); | |
243 if (!buffer.get()) | |
244 return; | |
245 | |
246 // Blit (copy) here from y,u,v into buffer.data()). Needed so we can return | |
247 // the parameter buffer synchronously to the driver. | |
248 const size_t y_plane_size = VideoFrame::PlaneAllocationSize(VideoFrame::I420, | |
249 VideoFrame::kYPlane, frame_format.frame_size); | |
250 const size_t u_plane_size = VideoFrame::PlaneAllocationSize( | |
251 VideoFrame::I420, VideoFrame::kUPlane, frame_format.frame_size); | |
252 uint8* const dst_y = reinterpret_cast<uint8*>(buffer->data()); | |
253 uint8* const dst_u = dst_y + y_plane_size; | |
254 uint8* const dst_v = dst_u + u_plane_size; | |
255 | |
256 const size_t dst_y_stride = VideoFrame::RowBytes( | |
257 VideoFrame::kYPlane, VideoFrame::I420, frame_format.frame_size.width()); | |
258 const size_t dst_u_stride = VideoFrame::RowBytes( | |
259 VideoFrame::kUPlane, VideoFrame::I420, frame_format.frame_size.width()); | |
260 const size_t dst_v_stride = VideoFrame::RowBytes( | |
261 VideoFrame::kVPlane, VideoFrame::I420, frame_format.frame_size.width()); | |
262 DCHECK_GE(y_stride, dst_y_stride); | |
263 DCHECK_GE(u_stride, dst_u_stride); | |
264 DCHECK_GE(v_stride, dst_v_stride); | |
265 | |
266 if (libyuv::I420Copy(y_data, y_stride, | |
267 u_data, u_stride, | |
268 v_data, v_stride, | |
269 dst_y, dst_y_stride, | |
270 dst_u, dst_u_stride, | |
271 dst_v, dst_v_stride, | |
272 frame_format.frame_size.width(), | |
273 frame_format.frame_size.height())) { | |
274 DLOG(WARNING) << "Failed to copy buffer"; | |
275 return; | |
276 } | |
277 | |
278 scoped_refptr<VideoFrame> video_frame = VideoFrame::WrapExternalYuvData( | |
279 VideoFrame::I420, frame_format.frame_size, | |
280 gfx::Rect(frame_format.frame_size), frame_format.frame_size, y_stride, | |
281 u_stride, v_stride, dst_y, dst_u, dst_v, base::TimeDelta(), | |
282 base::Closure()); | |
283 DCHECK(video_frame.get()); | |
284 | |
285 BrowserThread::PostTask( | |
286 BrowserThread::IO, | |
287 FROM_HERE, | |
288 base::Bind( | |
289 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread, | |
290 controller_, | |
291 buffer, | |
292 video_frame, | |
293 timestamp)); | |
294 }; | |
295 | |
296 scoped_refptr<media::VideoCaptureDevice::Client::Buffer> | 232 scoped_refptr<media::VideoCaptureDevice::Client::Buffer> |
297 VideoCaptureDeviceClient::ReserveOutputBuffer(VideoFrame::Format format, | 233 VideoCaptureDeviceClient::ReserveOutputBuffer(VideoFrame::Format format, |
298 const gfx::Size& dimensions) { | 234 const gfx::Size& dimensions) { |
299 const size_t frame_bytes = VideoFrame::AllocationSize(format, dimensions); | 235 const size_t frame_bytes = VideoFrame::AllocationSize(format, dimensions); |
300 if (format == VideoFrame::NATIVE_TEXTURE) { | 236 if (format == VideoFrame::NATIVE_TEXTURE) { |
301 DCHECK_EQ(dimensions.width(), 0); | 237 DCHECK_EQ(dimensions.width(), 0); |
302 DCHECK_EQ(dimensions.height(), 0); | 238 DCHECK_EQ(dimensions.height(), 0); |
303 } else { | 239 } else { |
304 DLOG_IF(ERROR, frame_bytes == 0) << "Error calculating allocation size"; | 240 DLOG_IF(ERROR, frame_bytes == 0) << "Error calculating allocation size"; |
305 } | 241 } |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
357 } | 293 } |
358 | 294 |
359 void VideoCaptureDeviceClient::OnLog( | 295 void VideoCaptureDeviceClient::OnLog( |
360 const std::string& message) { | 296 const std::string& message) { |
361 BrowserThread::PostTask(BrowserThread::IO, FROM_HERE, | 297 BrowserThread::PostTask(BrowserThread::IO, FROM_HERE, |
362 base::Bind(&VideoCaptureController::DoLogOnIOThread, | 298 base::Bind(&VideoCaptureController::DoLogOnIOThread, |
363 controller_, message)); | 299 controller_, message)); |
364 } | 300 } |
365 | 301 |
366 } // namespace content | 302 } // namespace content |
OLD | NEW |