Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(275)

Side by Side Diff: media/capture/video/video_capture_device_client.cc

Issue 2595853003: Revert of [Mojo Video Capture] Decouple VCController from VCBufferPool and VCDeviceClient (Closed)
Patch Set: Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « media/capture/video/video_capture_device.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/capture/video/video_capture_device_client.h" 5 #include "media/capture/video/video_capture_device_client.h"
6 6
7 #include <algorithm> 7 #include <algorithm>
8 #include <utility> 8 #include <utility>
9 9
10 #include "base/bind.h" 10 #include "base/bind.h"
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
88 88
89 VideoCaptureDeviceClient::~VideoCaptureDeviceClient() { 89 VideoCaptureDeviceClient::~VideoCaptureDeviceClient() {
90 // This should be on the platform auxiliary thread since 90 // This should be on the platform auxiliary thread since
91 // |external_jpeg_decoder_| need to be destructed on the same thread as 91 // |external_jpeg_decoder_| need to be destructed on the same thread as
92 // OnIncomingCapturedData. 92 // OnIncomingCapturedData.
93 } 93 }
94 94
95 void VideoCaptureDeviceClient::OnIncomingCapturedData( 95 void VideoCaptureDeviceClient::OnIncomingCapturedData(
96 const uint8_t* data, 96 const uint8_t* data,
97 int length, 97 int length,
98 const VideoCaptureFormat& format, 98 const VideoCaptureFormat& frame_format,
99 int rotation, 99 int rotation,
100 base::TimeTicks reference_time, 100 base::TimeTicks reference_time,
101 base::TimeDelta timestamp, 101 base::TimeDelta timestamp,
102 int frame_feedback_id) { 102 int frame_feedback_id) {
103 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedData"); 103 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedData");
104 DCHECK_EQ(media::PIXEL_STORAGE_CPU, format.pixel_storage); 104 DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage);
105 105
106 if (last_captured_pixel_format_ != format.pixel_format) { 106 if (last_captured_pixel_format_ != frame_format.pixel_format) {
107 OnLog("Pixel format: " + 107 OnLog("Pixel format: " +
108 media::VideoPixelFormatToString(format.pixel_format)); 108 media::VideoPixelFormatToString(frame_format.pixel_format));
109 last_captured_pixel_format_ = format.pixel_format; 109 last_captured_pixel_format_ = frame_format.pixel_format;
110 110
111 if (format.pixel_format == media::PIXEL_FORMAT_MJPEG && 111 if (frame_format.pixel_format == media::PIXEL_FORMAT_MJPEG &&
112 !external_jpeg_decoder_initialized_) { 112 !external_jpeg_decoder_initialized_) {
113 external_jpeg_decoder_initialized_ = true; 113 external_jpeg_decoder_initialized_ = true;
114 external_jpeg_decoder_ = jpeg_decoder_factory_callback_.Run(); 114 external_jpeg_decoder_ = jpeg_decoder_factory_callback_.Run();
115 external_jpeg_decoder_->Initialize(); 115 external_jpeg_decoder_->Initialize();
116 } 116 }
117 } 117 }
118 118
119 if (!format.IsValid()) 119 if (!frame_format.IsValid())
120 return; 120 return;
121 121
122 if (format.pixel_format == media::PIXEL_FORMAT_Y16) { 122 if (frame_format.pixel_format == media::PIXEL_FORMAT_Y16) {
123 return OnIncomingCapturedY16Data(data, length, format, reference_time, 123 return OnIncomingCapturedY16Data(data, length, frame_format, reference_time,
124 timestamp, frame_feedback_id); 124 timestamp, frame_feedback_id);
125 } 125 }
126 126
127 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest 127 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest
128 // bit decomposition of {width, height}, grabbing the odd and even parts. 128 // bit decomposition of {width, height}, grabbing the odd and even parts.
129 const int chopped_width = format.frame_size.width() & 1; 129 const int chopped_width = frame_format.frame_size.width() & 1;
130 const int chopped_height = format.frame_size.height() & 1; 130 const int chopped_height = frame_format.frame_size.height() & 1;
131 const int new_unrotated_width = format.frame_size.width() & ~1; 131 const int new_unrotated_width = frame_format.frame_size.width() & ~1;
132 const int new_unrotated_height = format.frame_size.height() & ~1; 132 const int new_unrotated_height = frame_format.frame_size.height() & ~1;
133 133
134 int destination_width = new_unrotated_width; 134 int destination_width = new_unrotated_width;
135 int destination_height = new_unrotated_height; 135 int destination_height = new_unrotated_height;
136 if (rotation == 90 || rotation == 270) 136 if (rotation == 90 || rotation == 270)
137 std::swap(destination_width, destination_height); 137 std::swap(destination_width, destination_height);
138 138
139 DCHECK_EQ(0, rotation % 90) << " Rotation must be a multiple of 90, now: " 139 DCHECK_EQ(0, rotation % 90) << " Rotation must be a multiple of 90, now: "
140 << rotation; 140 << rotation;
141 libyuv::RotationMode rotation_mode = libyuv::kRotate0; 141 libyuv::RotationMode rotation_mode = libyuv::kRotate0;
142 if (rotation == 90) 142 if (rotation == 90)
(...skipping 17 matching lines...) Expand all
160 if (!buffer.get()) 160 if (!buffer.get())
161 return; 161 return;
162 162
163 const int yplane_stride = dimensions.width(); 163 const int yplane_stride = dimensions.width();
164 const int uv_plane_stride = yplane_stride / 2; 164 const int uv_plane_stride = yplane_stride / 2;
165 int crop_x = 0; 165 int crop_x = 0;
166 int crop_y = 0; 166 int crop_y = 0;
167 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; 167 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY;
168 168
169 bool flip = false; 169 bool flip = false;
170 switch (format.pixel_format) { 170 switch (frame_format.pixel_format) {
171 case media::PIXEL_FORMAT_UNKNOWN: // Color format not set. 171 case media::PIXEL_FORMAT_UNKNOWN: // Color format not set.
172 break; 172 break;
173 case media::PIXEL_FORMAT_I420: 173 case media::PIXEL_FORMAT_I420:
174 DCHECK(!chopped_width && !chopped_height); 174 DCHECK(!chopped_width && !chopped_height);
175 origin_colorspace = libyuv::FOURCC_I420; 175 origin_colorspace = libyuv::FOURCC_I420;
176 break; 176 break;
177 case media::PIXEL_FORMAT_YV12: 177 case media::PIXEL_FORMAT_YV12:
178 DCHECK(!chopped_width && !chopped_height); 178 DCHECK(!chopped_width && !chopped_height);
179 origin_colorspace = libyuv::FOURCC_YV12; 179 origin_colorspace = libyuv::FOURCC_YV12;
180 break; 180 break;
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
225 break; 225 break;
226 case media::PIXEL_FORMAT_MJPEG: 226 case media::PIXEL_FORMAT_MJPEG:
227 origin_colorspace = libyuv::FOURCC_MJPG; 227 origin_colorspace = libyuv::FOURCC_MJPG;
228 break; 228 break;
229 default: 229 default:
230 NOTREACHED(); 230 NOTREACHED();
231 } 231 }
232 232
233 // The input |length| can be greater than the required buffer size because of 233 // The input |length| can be greater than the required buffer size because of
234 // paddings and/or alignments, but it cannot be smaller. 234 // paddings and/or alignments, but it cannot be smaller.
235 DCHECK_GE(static_cast<size_t>(length), format.ImageAllocationSize()); 235 DCHECK_GE(static_cast<size_t>(length), frame_format.ImageAllocationSize());
236 236
237 if (external_jpeg_decoder_) { 237 if (external_jpeg_decoder_) {
238 const VideoCaptureJpegDecoder::STATUS status = 238 const VideoCaptureJpegDecoder::STATUS status =
239 external_jpeg_decoder_->GetStatus(); 239 external_jpeg_decoder_->GetStatus();
240 if (status == VideoCaptureJpegDecoder::FAILED) { 240 if (status == VideoCaptureJpegDecoder::FAILED) {
241 external_jpeg_decoder_.reset(); 241 external_jpeg_decoder_.reset();
242 } else if (status == VideoCaptureJpegDecoder::INIT_PASSED && 242 } else if (status == VideoCaptureJpegDecoder::INIT_PASSED &&
243 format.pixel_format == media::PIXEL_FORMAT_MJPEG && 243 frame_format.pixel_format == media::PIXEL_FORMAT_MJPEG &&
244 rotation == 0 && !flip) { 244 rotation == 0 && !flip) {
245 external_jpeg_decoder_->DecodeCapturedData( 245 external_jpeg_decoder_->DecodeCapturedData(data, length, frame_format,
246 data, length, format, reference_time, timestamp, std::move(buffer)); 246 reference_time, timestamp,
247 std::move(buffer));
247 return; 248 return;
248 } 249 }
249 } 250 }
250 251
251 if (libyuv::ConvertToI420( 252 if (libyuv::ConvertToI420(data, length, y_plane_data, yplane_stride,
252 data, length, y_plane_data, yplane_stride, u_plane_data, 253 u_plane_data, uv_plane_stride, v_plane_data,
253 uv_plane_stride, v_plane_data, uv_plane_stride, crop_x, crop_y, 254 uv_plane_stride, crop_x, crop_y,
254 format.frame_size.width(), 255 frame_format.frame_size.width(),
255 (flip ? -1 : 1) * format.frame_size.height(), new_unrotated_width, 256 (flip ? -1 : 1) * frame_format.frame_size.height(),
256 new_unrotated_height, rotation_mode, origin_colorspace) != 0) { 257 new_unrotated_width, new_unrotated_height,
258 rotation_mode, origin_colorspace) != 0) {
257 DLOG(WARNING) << "Failed to convert buffer's pixel format to I420 from " 259 DLOG(WARNING) << "Failed to convert buffer's pixel format to I420 from "
258 << media::VideoPixelFormatToString(format.pixel_format); 260 << media::VideoPixelFormatToString(frame_format.pixel_format);
259 return; 261 return;
260 } 262 }
261 263
262 const VideoCaptureFormat output_format = 264 const VideoCaptureFormat output_format =
263 VideoCaptureFormat(dimensions, format.frame_rate, 265 VideoCaptureFormat(dimensions, frame_format.frame_rate,
264 media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); 266 media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU);
265 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, 267 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time,
266 timestamp); 268 timestamp);
267 } 269 }
268 270
269 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> 271 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer>
270 VideoCaptureDeviceClient::ReserveOutputBuffer( 272 VideoCaptureDeviceClient::ReserveOutputBuffer(
271 const gfx::Size& frame_size, 273 const gfx::Size& frame_size,
272 media::VideoPixelFormat pixel_format, 274 media::VideoPixelFormat pixel_format,
273 media::VideoPixelStorage pixel_storage, 275 media::VideoPixelStorage pixel_storage,
(...skipping 11 matching lines...) Expand all
285 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) 287 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId)
286 receiver_->OnBufferDestroyed(buffer_id_to_drop); 288 receiver_->OnBufferDestroyed(buffer_id_to_drop);
287 if (buffer_id == VideoCaptureBufferPool::kInvalidId) 289 if (buffer_id == VideoCaptureBufferPool::kInvalidId)
288 return nullptr; 290 return nullptr;
289 return base::WrapUnique<Buffer>( 291 return base::WrapUnique<Buffer>(
290 new AutoReleaseBuffer(buffer_pool_, buffer_id, frame_feedback_id)); 292 new AutoReleaseBuffer(buffer_pool_, buffer_id, frame_feedback_id));
291 } 293 }
292 294
293 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( 295 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer(
294 std::unique_ptr<Buffer> buffer, 296 std::unique_ptr<Buffer> buffer,
295 const VideoCaptureFormat& format, 297 const VideoCaptureFormat& frame_format,
296 base::TimeTicks reference_time, 298 base::TimeTicks reference_time,
297 base::TimeDelta timestamp) { 299 base::TimeDelta timestamp) {
298 DCHECK(IsFormatSupported(format.pixel_format)); 300 DCHECK(IsFormatSupported(frame_format.pixel_format));
299 DCHECK_EQ(media::PIXEL_STORAGE_CPU, format.pixel_storage); 301 DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage);
300 302
301 scoped_refptr<VideoFrame> frame; 303 scoped_refptr<VideoFrame> frame;
302 if (buffer->IsBackedByVideoFrame()) { 304 if (buffer->IsBackedByVideoFrame()) {
303 frame = buffer->GetVideoFrame(); 305 frame = buffer->GetVideoFrame();
304 frame->set_timestamp(timestamp); 306 frame->set_timestamp(timestamp);
305 } else { 307 } else {
306 frame = VideoFrame::WrapExternalSharedMemory( 308 frame = VideoFrame::WrapExternalSharedMemory(
307 format.pixel_format, format.frame_size, gfx::Rect(format.frame_size), 309 frame_format.pixel_format, frame_format.frame_size,
308 format.frame_size, reinterpret_cast<uint8_t*>(buffer->data()), 310 gfx::Rect(frame_format.frame_size), frame_format.frame_size,
309 VideoFrame::AllocationSize(format.pixel_format, format.frame_size), 311 reinterpret_cast<uint8_t*>(buffer->data()),
312 VideoFrame::AllocationSize(frame_format.pixel_format,
313 frame_format.frame_size),
310 base::SharedMemory::NULLHandle(), 0u, timestamp); 314 base::SharedMemory::NULLHandle(), 0u, timestamp);
311 } 315 }
312 if (!frame) 316 if (!frame)
313 return; 317 return;
314 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, 318 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE,
315 format.frame_rate); 319 frame_format.frame_rate);
316 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, 320 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME,
317 reference_time); 321 reference_time);
318 OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame)); 322 OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame));
319 } 323 }
320 324
321 void VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame( 325 void VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame(
322 std::unique_ptr<Buffer> buffer, 326 std::unique_ptr<Buffer> buffer,
323 scoped_refptr<VideoFrame> frame) { 327 scoped_refptr<VideoFrame> frame) {
324 receiver_->OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame)); 328 receiver_->OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame));
325 } 329 }
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
384 VideoFrame::PlaneSize(format, VideoFrame::kYPlane, dimensions).GetArea(); 388 VideoFrame::PlaneSize(format, VideoFrame::kYPlane, dimensions).GetArea();
385 *v_plane_data = 389 *v_plane_data =
386 *u_plane_data + 390 *u_plane_data +
387 VideoFrame::PlaneSize(format, VideoFrame::kUPlane, dimensions).GetArea(); 391 VideoFrame::PlaneSize(format, VideoFrame::kUPlane, dimensions).GetArea();
388 return buffer; 392 return buffer;
389 } 393 }
390 394
391 void VideoCaptureDeviceClient::OnIncomingCapturedY16Data( 395 void VideoCaptureDeviceClient::OnIncomingCapturedY16Data(
392 const uint8_t* data, 396 const uint8_t* data,
393 int length, 397 int length,
394 const VideoCaptureFormat& format, 398 const VideoCaptureFormat& frame_format,
395 base::TimeTicks reference_time, 399 base::TimeTicks reference_time,
396 base::TimeDelta timestamp, 400 base::TimeDelta timestamp,
397 int frame_feedback_id) { 401 int frame_feedback_id) {
398 std::unique_ptr<Buffer> buffer( 402 std::unique_ptr<Buffer> buffer(
399 ReserveOutputBuffer(format.frame_size, media::PIXEL_FORMAT_Y16, 403 ReserveOutputBuffer(frame_format.frame_size, media::PIXEL_FORMAT_Y16,
400 media::PIXEL_STORAGE_CPU, frame_feedback_id)); 404 media::PIXEL_STORAGE_CPU, frame_feedback_id));
401 // The input |length| can be greater than the required buffer size because of 405 // The input |length| can be greater than the required buffer size because of
402 // paddings and/or alignments, but it cannot be smaller. 406 // paddings and/or alignments, but it cannot be smaller.
403 DCHECK_GE(static_cast<size_t>(length), format.ImageAllocationSize()); 407 DCHECK_GE(static_cast<size_t>(length), frame_format.ImageAllocationSize());
404 #if DCHECK_IS_ON() 408 #if DCHECK_IS_ON()
405 dropped_frame_counter_ = buffer.get() ? 0 : dropped_frame_counter_ + 1; 409 dropped_frame_counter_ = buffer.get() ? 0 : dropped_frame_counter_ + 1;
406 if (dropped_frame_counter_ >= kMaxDroppedFrames) 410 if (dropped_frame_counter_ >= kMaxDroppedFrames)
407 OnError(FROM_HERE, "Too many frames dropped"); 411 OnError(FROM_HERE, "Too many frames dropped");
408 #endif 412 #endif
409 // Failed to reserve output buffer, so drop the frame. 413 // Failed to reserve output buffer, so drop the frame.
410 if (!buffer.get()) 414 if (!buffer.get())
411 return; 415 return;
412 memcpy(buffer->data(), data, length); 416 memcpy(buffer->data(), data, length);
413 const VideoCaptureFormat output_format = 417 const VideoCaptureFormat output_format =
414 VideoCaptureFormat(format.frame_size, format.frame_rate, 418 VideoCaptureFormat(frame_format.frame_size, frame_format.frame_rate,
415 media::PIXEL_FORMAT_Y16, media::PIXEL_STORAGE_CPU); 419 media::PIXEL_FORMAT_Y16, media::PIXEL_STORAGE_CPU);
416 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, 420 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time,
417 timestamp); 421 timestamp);
418 } 422 }
419 423
420 } // namespace media 424 } // namespace media
OLDNEW
« no previous file with comments | « media/capture/video/video_capture_device.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698