Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(455)

Side by Side Diff: media/gpu/v4l2_video_decode_accelerator.cc

Issue 2191263002: V4L2VideoDecodeAccelerator: support external buffer import (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: address Pawel's comments in PS6 Created 4 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/gpu/v4l2_video_decode_accelerator.h" 5 #include "media/gpu/v4l2_video_decode_accelerator.h"
6 6
7 #include <dlfcn.h> 7 #include <dlfcn.h>
8 #include <errno.h> 8 #include <errno.h>
9 #include <fcntl.h> 9 #include <fcntl.h>
10 #include <linux/videodev2.h> 10 #include <linux/videodev2.h>
11 #include <poll.h> 11 #include <poll.h>
12 #include <string.h> 12 #include <string.h>
13 #include <sys/eventfd.h> 13 #include <sys/eventfd.h>
14 #include <sys/ioctl.h> 14 #include <sys/ioctl.h>
15 #include <sys/mman.h> 15 #include <sys/mman.h>
16 16
17 #include "base/bind.h" 17 #include "base/bind.h"
18 #include "base/command_line.h" 18 #include "base/command_line.h"
19 #include "base/message_loop/message_loop.h" 19 #include "base/message_loop/message_loop.h"
20 #include "base/numerics/safe_conversions.h" 20 #include "base/numerics/safe_conversions.h"
21 #include "base/posix/eintr_wrapper.h"
21 #include "base/single_thread_task_runner.h" 22 #include "base/single_thread_task_runner.h"
22 #include "base/threading/thread_task_runner_handle.h" 23 #include "base/threading/thread_task_runner_handle.h"
23 #include "base/trace_event/trace_event.h" 24 #include "base/trace_event/trace_event.h"
24 #include "build/build_config.h" 25 #include "build/build_config.h"
25 #include "media/base/bind_to_current_loop.h" 26 #include "media/base/bind_to_current_loop.h"
26 #include "media/base/media_switches.h" 27 #include "media/base/media_switches.h"
27 #include "media/filters/h264_parser.h" 28 #include "media/filters/h264_parser.h"
28 #include "media/gpu/shared_memory_region.h" 29 #include "media/gpu/shared_memory_region.h"
29 #include "ui/gfx/geometry/rect.h" 30 #include "ui/gfx/geometry/rect.h"
30 #include "ui/gl/gl_context.h" 31 #include "ui/gl/gl_context.h"
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
131 V4L2VideoDecodeAccelerator::InputRecord::InputRecord() 132 V4L2VideoDecodeAccelerator::InputRecord::InputRecord()
132 : at_device(false), address(NULL), length(0), bytes_used(0), input_id(-1) {} 133 : at_device(false), address(NULL), length(0), bytes_used(0), input_id(-1) {}
133 134
134 V4L2VideoDecodeAccelerator::InputRecord::~InputRecord() {} 135 V4L2VideoDecodeAccelerator::InputRecord::~InputRecord() {}
135 136
136 V4L2VideoDecodeAccelerator::OutputRecord::OutputRecord() 137 V4L2VideoDecodeAccelerator::OutputRecord::OutputRecord()
137 : state(kFree), 138 : state(kFree),
138 egl_image(EGL_NO_IMAGE_KHR), 139 egl_image(EGL_NO_IMAGE_KHR),
139 egl_sync(EGL_NO_SYNC_KHR), 140 egl_sync(EGL_NO_SYNC_KHR),
140 picture_id(-1), 141 picture_id(-1),
142 texture_id(0),
141 cleared(false) {} 143 cleared(false) {}
142 144
143 V4L2VideoDecodeAccelerator::OutputRecord::~OutputRecord() {} 145 V4L2VideoDecodeAccelerator::OutputRecord::~OutputRecord() {}
144 146
145 V4L2VideoDecodeAccelerator::PictureRecord::PictureRecord(bool cleared, 147 V4L2VideoDecodeAccelerator::PictureRecord::PictureRecord(bool cleared,
146 const Picture& picture) 148 const Picture& picture)
147 : cleared(cleared), picture(picture) {} 149 : cleared(cleared), picture(picture) {}
148 150
149 V4L2VideoDecodeAccelerator::PictureRecord::~PictureRecord() {} 151 V4L2VideoDecodeAccelerator::PictureRecord::~PictureRecord() {}
150 152
151 V4L2VideoDecodeAccelerator::V4L2VideoDecodeAccelerator( 153 V4L2VideoDecodeAccelerator::V4L2VideoDecodeAccelerator(
152 EGLDisplay egl_display, 154 EGLDisplay egl_display,
153 const GetGLContextCallback& get_gl_context_cb, 155 const GetGLContextCallback& get_gl_context_cb,
154 const MakeGLContextCurrentCallback& make_context_current_cb, 156 const MakeGLContextCurrentCallback& make_context_current_cb,
155 const scoped_refptr<V4L2Device>& device) 157 const scoped_refptr<V4L2Device>& device)
156 : child_task_runner_(base::ThreadTaskRunnerHandle::Get()), 158 : child_task_runner_(base::ThreadTaskRunnerHandle::Get()),
157 decoder_thread_("V4L2DecoderThread"), 159 decoder_thread_("V4L2DecoderThread"),
158 decoder_state_(kUninitialized), 160 decoder_state_(kUninitialized),
161 output_mode_(Config::OutputMode::ALLOCATE),
159 device_(device), 162 device_(device),
160 decoder_delay_bitstream_buffer_id_(-1), 163 decoder_delay_bitstream_buffer_id_(-1),
161 decoder_current_input_buffer_(-1), 164 decoder_current_input_buffer_(-1),
162 decoder_decode_buffer_tasks_scheduled_(0), 165 decoder_decode_buffer_tasks_scheduled_(0),
163 decoder_frames_at_client_(0), 166 decoder_frames_at_client_(0),
164 decoder_flushing_(false), 167 decoder_flushing_(false),
165 reset_pending_(false), 168 reset_pending_(false),
166 decoder_partial_frame_pending_(false), 169 decoder_partial_frame_pending_(false),
167 input_streamon_(false), 170 input_streamon_(false),
168 input_buffer_queued_count_(0), 171 input_buffer_queued_count_(0),
(...skipping 12 matching lines...) Expand all
181 egl_image_planes_count_(0), 184 egl_image_planes_count_(0),
182 weak_this_factory_(this) { 185 weak_this_factory_(this) {
183 weak_this_ = weak_this_factory_.GetWeakPtr(); 186 weak_this_ = weak_this_factory_.GetWeakPtr();
184 } 187 }
185 188
186 V4L2VideoDecodeAccelerator::~V4L2VideoDecodeAccelerator() { 189 V4L2VideoDecodeAccelerator::~V4L2VideoDecodeAccelerator() {
187 DCHECK(!decoder_thread_.IsRunning()); 190 DCHECK(!decoder_thread_.IsRunning());
188 DCHECK(!device_poll_thread_.IsRunning()); 191 DCHECK(!device_poll_thread_.IsRunning());
189 192
190 DestroyInputBuffers(); 193 DestroyInputBuffers();
194 DestroyEGLImages();
191 DestroyOutputBuffers(); 195 DestroyOutputBuffers();
192 196
193 // These maps have members that should be manually destroyed, e.g. file 197 // These maps have members that should be manually destroyed, e.g. file
194 // descriptors, mmap() segments, etc. 198 // descriptors, mmap() segments, etc.
195 DCHECK(input_buffer_map_.empty()); 199 DCHECK(input_buffer_map_.empty());
196 DCHECK(output_buffer_map_.empty()); 200 DCHECK(output_buffer_map_.empty());
197 } 201 }
198 202
199 bool V4L2VideoDecodeAccelerator::Initialize(const Config& config, 203 bool V4L2VideoDecodeAccelerator::Initialize(const Config& config,
200 Client* client) { 204 Client* client) {
201 DVLOGF(3) << "profile: " << config.profile; 205 DVLOGF(3) << "profile: " << config.profile;
202 DCHECK(child_task_runner_->BelongsToCurrentThread()); 206 DCHECK(child_task_runner_->BelongsToCurrentThread());
203 DCHECK_EQ(decoder_state_, kUninitialized); 207 DCHECK_EQ(decoder_state_, kUninitialized);
204 208
205 if (!device_->SupportsDecodeProfileForV4L2PixelFormats( 209 if (!device_->SupportsDecodeProfileForV4L2PixelFormats(
206 config.profile, arraysize(supported_input_fourccs_), 210 config.profile, arraysize(supported_input_fourccs_),
207 supported_input_fourccs_)) { 211 supported_input_fourccs_)) {
208 DVLOGF(1) << "unsupported profile=" << config.profile; 212 DVLOGF(1) << "unsupported profile=" << config.profile;
209 return false; 213 return false;
210 } 214 }
211 215
212 if (config.is_encrypted) { 216 if (config.is_encrypted) {
213 NOTREACHED() << "Encrypted streams are not supported for this VDA"; 217 NOTREACHED() << "Encrypted streams are not supported for this VDA";
214 return false; 218 return false;
215 } 219 }
216 220
217 if (config.output_mode != Config::OutputMode::ALLOCATE) { 221 if (config.output_mode != Config::OutputMode::ALLOCATE &&
218 NOTREACHED() << "Only ALLOCATE OutputMode is supported by this VDA"; 222 config.output_mode != Config::OutputMode::IMPORT) {
223 NOTREACHED() << "Only ALLOCATE and IMPORT OutputModes are supported";
219 return false; 224 return false;
220 } 225 }
221 226
222 if (get_gl_context_cb_.is_null() || make_context_current_cb_.is_null()) {
223 NOTREACHED() << "GL callbacks are required for this VDA";
224 return false;
225 }
226
227 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); 227 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
228 client_ = client_ptr_factory_->GetWeakPtr(); 228 client_ = client_ptr_factory_->GetWeakPtr();
229 // If we haven't been set up to decode on separate thread via 229 // If we haven't been set up to decode on separate thread via
230 // TryToSetupDecodeOnSeparateThread(), use the main thread/client for 230 // TryToSetupDecodeOnSeparateThread(), use the main thread/client for
231 // decode tasks. 231 // decode tasks.
232 if (!decode_task_runner_) { 232 if (!decode_task_runner_) {
233 decode_task_runner_ = child_task_runner_; 233 decode_task_runner_ = child_task_runner_;
234 DCHECK(!decode_client_); 234 DCHECK(!decode_client_);
235 decode_client_ = client_; 235 decode_client_ = client_;
236 } 236 }
237 237
238 video_profile_ = config.profile; 238 video_profile_ = config.profile;
239 239
240 if (egl_display_ == EGL_NO_DISPLAY) { 240 if (egl_display_ == EGL_NO_DISPLAY) {
241 LOGF(ERROR) << "could not get EGLDisplay"; 241 LOGF(ERROR) << "could not get EGLDisplay";
242 return false; 242 return false;
243 } 243 }
244 244
245 // We need the context to be initialized to query extensions. 245 // We need the context to be initialized to query extensions.
246 if (!make_context_current_cb_.Run()) { 246 if (!make_context_current_cb_.is_null()) {
247 LOGF(ERROR) << "could not make context current"; 247 if (!make_context_current_cb_.Run()) {
248 return false; 248 LOGF(ERROR) << "could not make context current";
249 } 249 return false;
250 }
250 251
251 // TODO(posciak): crbug.com/450898. 252 // TODO(posciak): crbug.com/450898.
252 #if defined(ARCH_CPU_ARMEL) 253 #if defined(ARCH_CPU_ARMEL)
253 if (!gl::g_driver_egl.ext.b_EGL_KHR_fence_sync) { 254 if (!gl::g_driver_egl.ext.b_EGL_KHR_fence_sync) {
254 LOGF(ERROR) << "context does not have EGL_KHR_fence_sync"; 255 LOGF(ERROR) << "context does not have EGL_KHR_fence_sync";
255 return false; 256 return false;
257 }
258 #endif
259 } else {
260 DVLOGF(1) << "No GL callbacks provided, initializing without GL support";
256 } 261 }
257 #endif
258 262
259 // Capabilities check. 263 // Capabilities check.
260 struct v4l2_capability caps; 264 struct v4l2_capability caps;
261 const __u32 kCapsRequired = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING; 265 const __u32 kCapsRequired = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING;
262 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps); 266 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
263 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { 267 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
264 LOGF(ERROR) << "ioctl() failed: VIDIOC_QUERYCAP" 268 LOGF(ERROR) << "ioctl() failed: VIDIOC_QUERYCAP"
265 << ", caps check failed: 0x" << std::hex << caps.capabilities; 269 << ", caps check failed: 0x" << std::hex << caps.capabilities;
266 return false; 270 return false;
267 } 271 }
(...skipping 13 matching lines...) Expand all
281 285
282 if (!CreateInputBuffers()) 286 if (!CreateInputBuffers())
283 return false; 287 return false;
284 288
285 if (!decoder_thread_.Start()) { 289 if (!decoder_thread_.Start()) {
286 LOGF(ERROR) << "decoder thread failed to start"; 290 LOGF(ERROR) << "decoder thread failed to start";
287 return false; 291 return false;
288 } 292 }
289 293
290 decoder_state_ = kInitialized; 294 decoder_state_ = kInitialized;
295 output_mode_ = config.output_mode;
291 296
292 // StartDevicePoll will NOTIFY_ERROR on failure, so IgnoreResult is fine here. 297 // StartDevicePoll will NOTIFY_ERROR on failure, so IgnoreResult is fine here.
293 decoder_thread_.task_runner()->PostTask( 298 decoder_thread_.task_runner()->PostTask(
294 FROM_HERE, base::Bind(base::IgnoreResult( 299 FROM_HERE, base::Bind(base::IgnoreResult(
295 &V4L2VideoDecodeAccelerator::StartDevicePoll), 300 &V4L2VideoDecodeAccelerator::StartDevicePoll),
296 base::Unretained(this))); 301 base::Unretained(this)));
297 302
298 return true; 303 return true;
299 } 304 }
300 305
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
352 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 357 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
353 reqbufs.memory = V4L2_MEMORY_MMAP; 358 reqbufs.memory = V4L2_MEMORY_MMAP;
354 IOCTL_OR_ERROR_RETURN(VIDIOC_REQBUFS, &reqbufs); 359 IOCTL_OR_ERROR_RETURN(VIDIOC_REQBUFS, &reqbufs);
355 360
356 if (reqbufs.count != buffers.size()) { 361 if (reqbufs.count != buffers.size()) {
357 DLOGF(ERROR) << "Could not allocate enough output buffers"; 362 DLOGF(ERROR) << "Could not allocate enough output buffers";
358 NOTIFY_ERROR(PLATFORM_FAILURE); 363 NOTIFY_ERROR(PLATFORM_FAILURE);
359 return; 364 return;
360 } 365 }
361 366
362 if (image_processor_device_) { 367 DCHECK(free_output_buffers_.empty());
363 DCHECK(!image_processor_); 368 DCHECK(output_buffer_map_.empty());
364 image_processor_.reset(new V4L2ImageProcessor(image_processor_device_)); 369 output_buffer_map_.resize(buffers.size());
365 // Unretained is safe because |this| owns image processor and there will be 370 if (image_processor_device_ && output_mode_ == Config::OutputMode::ALLOCATE) {
366 // no callbacks after processor destroys. 371 if (!CreateImageProcessor())
367 if (!image_processor_->Initialize(
368 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_),
369 V4L2Device::V4L2PixFmtToVideoPixelFormat(egl_image_format_fourcc_),
370 V4L2_MEMORY_DMABUF, visible_size_, coded_size_, visible_size_,
371 visible_size_, buffers.size(),
372 base::Bind(&V4L2VideoDecodeAccelerator::ImageProcessorError,
373 base::Unretained(this)))) {
374 LOGF(ERROR) << "Initialize image processor failed";
375 NOTIFY_ERROR(PLATFORM_FAILURE);
376 return; 372 return;
377 }
378 DCHECK(image_processor_->output_allocated_size() == egl_image_size_);
379 if (image_processor_->input_allocated_size() != coded_size_) {
380 LOGF(ERROR) << "Image processor should be able to take the output coded "
381 << "size of decoder " << coded_size_.ToString()
382 << " without adjusting to "
383 << image_processor_->input_allocated_size().ToString();
384 NOTIFY_ERROR(PLATFORM_FAILURE);
385 return;
386 }
387 } 373 }
388 374
389 child_task_runner_->PostTask( 375 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
390 FROM_HERE, 376 DCHECK(buffers[i].size() == egl_image_size_);
391 base::Bind(&V4L2VideoDecodeAccelerator::CreateEGLImages, weak_this_, 377
392 buffers, egl_image_format_fourcc_, egl_image_planes_count_)); 378 OutputRecord& output_record = output_buffer_map_[i];
379 DCHECK_EQ(output_record.state, kFree);
380 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR);
381 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
382 DCHECK_EQ(output_record.picture_id, -1);
383 DCHECK_EQ(output_record.cleared, false);
384 DCHECK_EQ(1u, buffers[i].texture_ids().size());
385 DCHECK(output_record.processor_input_fds.empty());
386
387 output_record.picture_id = buffers[i].id();
388 output_record.texture_id = buffers[i].texture_ids()[0];
389 // This will remain kAtClient until ImportBufferForPicture is called, either
390 // by the client, or by ourselves, if we are allocating.
391 output_record.state = kAtClient;
392
393 if (image_processor_device_) {
394 std::vector<base::ScopedFD> dmabuf_fds = device_->GetDmabufsForV4L2Buffer(
395 i, output_planes_count_, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
396 if (dmabuf_fds.empty()) {
397 LOGF(ERROR) << "Failed to get DMABUFs of decoder.";
398 NOTIFY_ERROR(PLATFORM_FAILURE);
399 return;
400 }
401 output_record.processor_input_fds = std::move(dmabuf_fds);
402 }
403
404 if (output_mode_ == Config::OutputMode::ALLOCATE) {
405 std::vector<base::ScopedFD> dmabuf_fds;
406 dmabuf_fds = egl_image_device_->GetDmabufsForV4L2Buffer(
407 i, egl_image_planes_count_, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
408 if (dmabuf_fds.empty()) {
409 LOGF(ERROR) << "Failed to get DMABUFs for EGLImage.";
410 NOTIFY_ERROR(PLATFORM_FAILURE);
411 return;
412 }
413 ImportBufferForPictureTask(output_record.picture_id,
414 std::move(dmabuf_fds),
415 egl_image_size_.width());
416 } // else we'll get triggered via ImportBufferForPicture() from client.
417
418 DVLOGF(3) << "buffer[" << i << "]: picture_id=" << output_record.picture_id;
419 }
393 } 420 }
394 421
395 void V4L2VideoDecodeAccelerator::CreateEGLImages( 422 void V4L2VideoDecodeAccelerator::CreateEGLImageFor(
396 const std::vector<media::PictureBuffer>& buffers, 423 size_t buffer_index,
397 uint32_t output_format_fourcc, 424 int32_t picture_buffer_id,
398 size_t output_planes_count) { 425 std::vector<base::ScopedFD> dmabuf_fds,
399 DVLOGF(3); 426 GLuint texture_id,
427 const gfx::Size& size,
428 uint32_t fourcc) {
429 DVLOGF(3) << "index=" << buffer_index;
400 DCHECK(child_task_runner_->BelongsToCurrentThread()); 430 DCHECK(child_task_runner_->BelongsToCurrentThread());
401 431
402 if (get_gl_context_cb_.is_null() || make_context_current_cb_.is_null()) { 432 if (get_gl_context_cb_.is_null() || make_context_current_cb_.is_null()) {
403 DLOGF(ERROR) << "GL callbacks required for binding to EGLImages"; 433 DLOGF(ERROR) << "GL callbacks required for binding to EGLImages";
404 NOTIFY_ERROR(INVALID_ARGUMENT); 434 NOTIFY_ERROR(INVALID_ARGUMENT);
405 return; 435 return;
406 } 436 }
407 437
408 gl::GLContext* gl_context = get_gl_context_cb_.Run(); 438 gl::GLContext* gl_context = get_gl_context_cb_.Run();
409 if (!gl_context || !make_context_current_cb_.Run()) { 439 if (!gl_context || !make_context_current_cb_.Run()) {
410 DLOGF(ERROR) << "No GL context"; 440 DLOGF(ERROR) << "No GL context";
411 NOTIFY_ERROR(PLATFORM_FAILURE); 441 NOTIFY_ERROR(PLATFORM_FAILURE);
412 return; 442 return;
413 } 443 }
414 444
415 gl::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0); 445 gl::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0);
416 446
417 std::vector<EGLImageKHR> egl_images; 447 EGLImageKHR egl_image = egl_image_device_->CreateEGLImage(
418 for (size_t i = 0; i < buffers.size(); ++i) { 448 egl_display_, gl_context->GetHandle(), texture_id, size, buffer_index,
419 std::vector<base::ScopedFD> dmabuf_fds; 449 fourcc, dmabuf_fds);
420 dmabuf_fds = egl_image_device_->GetDmabufsForV4L2Buffer( 450 if (egl_image == EGL_NO_IMAGE_KHR) {
421 i, egl_image_planes_count_, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE); 451 LOGF(ERROR) << "could not create EGLImageKHR,"
422 if (dmabuf_fds.empty()) { 452 << " index=" << buffer_index << " texture_id=" << texture_id;
423 LOGF(ERROR) << "Failed to get DMABUFs for EGLImage."; 453 NOTIFY_ERROR(PLATFORM_FAILURE);
424 NOTIFY_ERROR(PLATFORM_FAILURE); 454 return;
425 return;
426 }
427
428 EGLImageKHR egl_image = egl_image_device_->CreateEGLImage(
429 egl_display_, gl_context->GetHandle(), buffers[i].texture_ids()[0],
430 buffers[i].size(), i, egl_image_format_fourcc_, dmabuf_fds);
431 if (egl_image == EGL_NO_IMAGE_KHR) {
432 LOGF(ERROR) << "could not create EGLImageKHR,"
433 << " index=" << i
434 << " texture_id=" << buffers[i].texture_ids()[0];
435 for (EGLImageKHR image : egl_images) {
436 if (egl_image_device_->DestroyEGLImage(egl_display_, image) != EGL_TRUE)
437 DVLOGF(1) << "DestroyEGLImage failed.";
438 }
439 NOTIFY_ERROR(PLATFORM_FAILURE);
440 return;
441 }
442 egl_images.push_back(egl_image);
443 } 455 }
444 456
445 decoder_thread_.task_runner()->PostTask( 457 decoder_thread_.task_runner()->PostTask(
446 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::AssignEGLImages, 458 FROM_HERE,
447 base::Unretained(this), buffers, egl_images)); 459 base::Bind(&V4L2VideoDecodeAccelerator::AssignEGLImage,
460 base::Unretained(this), buffer_index, picture_buffer_id,
461 egl_image, base::Passed(&dmabuf_fds)));
448 } 462 }
449 463
450 void V4L2VideoDecodeAccelerator::AssignEGLImages( 464 void V4L2VideoDecodeAccelerator::AssignEGLImage(
451 const std::vector<media::PictureBuffer>& buffers, 465 size_t buffer_index,
452 const std::vector<EGLImageKHR>& egl_images) { 466 int32_t picture_buffer_id,
453 DVLOGF(3); 467 EGLImageKHR egl_image,
468 std::vector<base::ScopedFD> dmabuf_fds) {
469 DVLOGF(3) << "index=" << buffer_index << ", picture_id=" << picture_buffer_id;
454 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 470 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
455 DCHECK_EQ(buffers.size(), egl_images.size());
456 DCHECK(free_output_buffers_.empty());
457 DCHECK(output_buffer_map_.empty());
458 471
459 output_buffer_map_.resize(buffers.size()); 472 // It's possible that while waiting for the EGLImages to be allocated and
460 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { 473 // assigned, we have already decoded more of the stream and saw another
461 DCHECK(buffers[i].size() == egl_image_size_); 474 // resolution change. This is a normal situation, in such a case either there
462 475 // is no output record with this index awaiting an EGLImage to be assigned to
463 OutputRecord& output_record = output_buffer_map_[i]; 476 // it, or the record is already updated to use a newer PictureBuffer and is
464 DCHECK_EQ(output_record.state, kFree); 477 // awaiting an EGLImage associated with a different picture_buffer_id. If so,
465 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR); 478 // just discard this image, we will get the one we are waiting for later.
466 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); 479 if (buffer_index >= output_buffer_map_.size() ||
467 DCHECK_EQ(output_record.picture_id, -1); 480 output_buffer_map_[buffer_index].picture_id != picture_buffer_id) {
468 DCHECK_EQ(output_record.cleared, false); 481 DVLOGF(3) << "Picture set already changed, dropping EGLImage";
469 DCHECK_LE(1u, buffers[i].texture_ids().size()); 482 child_task_runner_->PostTask(
470 483 FROM_HERE, base::Bind(base::IgnoreResult(&V4L2Device::DestroyEGLImage),
471 if (image_processor_device_) { 484 device_, egl_display_, egl_image));
472 std::vector<base::ScopedFD> fds = device_->GetDmabufsForV4L2Buffer( 485 return;
473 i, output_planes_count_, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
474 if (fds.empty()) {
475 LOGF(ERROR) << "Failed to get DMABUFs of decoder.";
476 NOTIFY_ERROR(PLATFORM_FAILURE);
477 return;
478 }
479 output_record.fds = std::move(fds);
480 }
481
482 output_record.egl_image = egl_images[i];
483 output_record.picture_id = buffers[i].id();
484
485 free_output_buffers_.push(i);
486 DVLOGF(3) << "buffer[" << i << "]: picture_id=" << output_record.picture_id;
487 } 486 }
488 487
489 decoder_state_ = kDecoding; 488 OutputRecord& output_record = output_buffer_map_[buffer_index];
490 Enqueue(); 489 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR);
490 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
491 DCHECK_EQ(output_record.state, kFree);
492 DCHECK_EQ(std::count(free_output_buffers_.begin(), free_output_buffers_.end(),
493 buffer_index),
494 0);
495 output_record.egl_image = egl_image;
496 free_output_buffers_.push_back(buffer_index);
497 if (decoder_state_ == kAwaitingPictureBuffers) {
498 DVLOGF(1) << "Change state to kDecoding";
499 decoder_state_ = kDecoding;
500 }
491 if (reset_pending_) { 501 if (reset_pending_) {
492 FinishReset(); 502 FinishReset();
493 return; 503 return;
494 } 504 }
505 if (decoder_state_ != kChangingResolution) {
506 Enqueue();
507 ScheduleDecodeBufferTaskIfNeeded();
508 }
509 }
495 510
496 ScheduleDecodeBufferTaskIfNeeded(); 511 void V4L2VideoDecodeAccelerator::ImportBufferForPicture(
512 int32_t picture_buffer_id,
513 const gfx::GpuMemoryBufferHandle& gpu_memory_buffer_handle) {
514 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id;
515 DCHECK(child_task_runner_->BelongsToCurrentThread());
516
517 if (output_mode_ != Config::OutputMode::IMPORT) {
518 LOGF(ERROR) << "Cannot import in non-import mode";
519 NOTIFY_ERROR(INVALID_ARGUMENT);
520 return;
521 }
522
523 std::vector<base::ScopedFD> dmabuf_fds;
524 int32_t stride = 0;
525 #if defined(USE_OZONE)
526 for (const auto& fd : gpu_memory_buffer_handle.native_pixmap_handle.fds) {
527 DCHECK_NE(fd.fd, -1);
528 dmabuf_fds.push_back(base::ScopedFD(fd.fd));
529 }
530 stride = gpu_memory_buffer_handle.native_pixmap_handle.planes[0].stride;
531 for (const auto& plane :
532 gpu_memory_buffer_handle.native_pixmap_handle.planes) {
533 DVLOGF(3) << ": offset=" << plane.offset << ", stride=" << plane.stride;
534 }
535 #endif
536
537 decoder_thread_.message_loop()->PostTask(
538 FROM_HERE,
539 base::Bind(&V4L2VideoDecodeAccelerator::ImportBufferForPictureTask,
540 base::Unretained(this), picture_buffer_id,
541 base::Passed(&dmabuf_fds), stride));
542 }
543
544 void V4L2VideoDecodeAccelerator::ImportBufferForPictureTask(
545 int32_t picture_buffer_id,
546 std::vector<base::ScopedFD> dmabuf_fds,
547 int32_t stride) {
548 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id
549 << ", dmabuf_fds.size()=" << dmabuf_fds.size()
550 << ", stride=" << stride;
551 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
552 DCHECK(image_processor_device_);
553
554 int plane_horiz_bits_per_pixel = VideoFrame::PlaneHorizontalBitsPerPixel(
555 V4L2Device::V4L2PixFmtToVideoPixelFormat(egl_image_format_fourcc_), 0);
Pawel Osciak 2016/09/14 04:17:29 We should probably verify plane_horiz_bits_per_pix
wuchengli 2016/09/14 05:17:27 Done.
556 int adjusted_coded_width = stride * 8 / plane_horiz_bits_per_pixel;
Pawel Osciak 2016/09/14 04:17:29 Do we need to align up or error out in case (strid
wuchengli 2016/09/14 05:17:27 Done.
557 if (!image_processor_) {
558 // The client may adjust the coded width. We don't have the final coded size
559 // in AssignPictureBuffers yet. Use the adjusted coded width to create the
560 // image processor.
561 DVLOGF(3) << "Original egl_image_size=" << egl_image_size_.ToString()
562 << ", adjusted coded width=" << adjusted_coded_width;
563 egl_image_size_.set_width(adjusted_coded_width);
564 if (!CreateImageProcessor())
565 return;
566 } else {
567 DCHECK_EQ(egl_image_size_.width(), adjusted_coded_width);
568 }
569
570 const auto iter =
571 std::find_if(output_buffer_map_.begin(), output_buffer_map_.end(),
572 [picture_buffer_id](const OutputRecord& output_record) {
573 return output_record.picture_id == picture_buffer_id;
574 });
575 if (iter == output_buffer_map_.end()) {
576 // It's possible that we've already posted a DismissPictureBuffer for this
577 // picture, but it has not yet executed when this ImportBufferForPicture was
578 // posted to us by the client. In that case just ignore this (we've already
579 // dismissed it and accounted for that).
580 DVLOGF(3) << "got picture id=" << picture_buffer_id
581 << " not in use (anymore?).";
582 return;
583 }
584
585 if (iter->state != kAtClient) {
586 LOGF(ERROR) << "Cannot import buffer not owned by client";
587 NOTIFY_ERROR(INVALID_ARGUMENT);
588 return;
589 }
590
591 size_t index = iter - output_buffer_map_.begin();
592 DCHECK_EQ(std::count(free_output_buffers_.begin(), free_output_buffers_.end(),
593 index),
594 0);
595
596 iter->state = kFree;
597 if (iter->texture_id != 0) {
598 if (iter->egl_image != EGL_NO_IMAGE_KHR) {
599 child_task_runner_->PostTask(
600 FROM_HERE,
601 base::Bind(base::IgnoreResult(&V4L2Device::DestroyEGLImage), device_,
602 egl_display_, iter->egl_image));
603 }
604
605 child_task_runner_->PostTask(
606 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::CreateEGLImageFor,
607 weak_this_, index, picture_buffer_id,
608 base::Passed(&dmabuf_fds), iter->texture_id,
609 egl_image_size_, egl_image_format_fourcc_));
610 } else {
611 // No need for an EGLImage, start using this buffer now.
612 DCHECK_EQ(egl_image_planes_count_, dmabuf_fds.size());
613 iter->processor_output_fds.swap(dmabuf_fds);
614 free_output_buffers_.push_back(index);
615 if (decoder_state_ == kAwaitingPictureBuffers) {
616 DVLOGF(1) << "Change state to kDecoding";
617 decoder_state_ = kDecoding;
618 }
619 if (decoder_state_ != kChangingResolution) {
620 Enqueue();
621 ScheduleDecodeBufferTaskIfNeeded();
622 }
623 }
497 } 624 }
498 625
499 void V4L2VideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_buffer_id) { 626 void V4L2VideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_buffer_id) {
500 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id; 627 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id;
501 // Must be run on child thread, as we'll insert a sync in the EGL context. 628 // Must be run on child thread, as we'll insert a sync in the EGL context.
502 DCHECK(child_task_runner_->BelongsToCurrentThread()); 629 DCHECK(child_task_runner_->BelongsToCurrentThread());
503 630
504 if (!make_context_current_cb_.Run()) { 631 std::unique_ptr<EGLSyncKHRRef> egl_sync_ref;
505 LOGF(ERROR) << "could not make context current";
506 NOTIFY_ERROR(PLATFORM_FAILURE);
507 return;
508 }
509 632
510 EGLSyncKHR egl_sync = EGL_NO_SYNC_KHR; 633 if (!make_context_current_cb_.is_null()) {
634 if (!make_context_current_cb_.Run()) {
635 LOGF(ERROR) << "could not make context current";
636 NOTIFY_ERROR(PLATFORM_FAILURE);
637 return;
638 }
639
640 EGLSyncKHR egl_sync = EGL_NO_SYNC_KHR;
511 // TODO(posciak): crbug.com/450898. 641 // TODO(posciak): crbug.com/450898.
512 #if defined(ARCH_CPU_ARMEL) 642 #if defined(ARCH_CPU_ARMEL)
513 egl_sync = eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL); 643 egl_sync = eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL);
514 if (egl_sync == EGL_NO_SYNC_KHR) { 644 if (egl_sync == EGL_NO_SYNC_KHR) {
515 LOGF(ERROR) << "eglCreateSyncKHR() failed"; 645 LOGF(ERROR) << "eglCreateSyncKHR() failed";
516 NOTIFY_ERROR(PLATFORM_FAILURE); 646 NOTIFY_ERROR(PLATFORM_FAILURE);
517 return; 647 return;
518 } 648 }
519 #endif 649 #endif
520 650
521 std::unique_ptr<EGLSyncKHRRef> egl_sync_ref( 651 egl_sync_ref.reset(new EGLSyncKHRRef(egl_display_, egl_sync));
522 new EGLSyncKHRRef(egl_display_, egl_sync)); 652 }
523 653
524 decoder_thread_.task_runner()->PostTask( 654 decoder_thread_.task_runner()->PostTask(
525 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::ReusePictureBufferTask, 655 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::ReusePictureBufferTask,
526 base::Unretained(this), picture_buffer_id, 656 base::Unretained(this), picture_buffer_id,
527 base::Passed(&egl_sync_ref))); 657 base::Passed(&egl_sync_ref)));
528 } 658 }
529 659
530 void V4L2VideoDecodeAccelerator::Flush() { 660 void V4L2VideoDecodeAccelerator::Flush() {
531 DVLOGF(3); 661 DVLOGF(3);
532 DCHECK(child_task_runner_->BelongsToCurrentThread()); 662 DCHECK(child_task_runner_->BelongsToCurrentThread());
(...skipping 663 matching lines...) Expand 10 before | Expand all | Expand 10 after
1196 if (errno == EAGAIN) { 1326 if (errno == EAGAIN) {
1197 // EAGAIN if we're just out of buffers to dequeue. 1327 // EAGAIN if we're just out of buffers to dequeue.
1198 break; 1328 break;
1199 } 1329 }
1200 PLOGF(ERROR) << "ioctl() failed: VIDIOC_DQBUF"; 1330 PLOGF(ERROR) << "ioctl() failed: VIDIOC_DQBUF";
1201 NOTIFY_ERROR(PLATFORM_FAILURE); 1331 NOTIFY_ERROR(PLATFORM_FAILURE);
1202 return; 1332 return;
1203 } 1333 }
1204 OutputRecord& output_record = output_buffer_map_[dqbuf.index]; 1334 OutputRecord& output_record = output_buffer_map_[dqbuf.index];
1205 DCHECK_EQ(output_record.state, kAtDevice); 1335 DCHECK_EQ(output_record.state, kAtDevice);
1206 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
1207 DCHECK_NE(output_record.picture_id, -1); 1336 DCHECK_NE(output_record.picture_id, -1);
1208 output_buffer_queued_count_--; 1337 output_buffer_queued_count_--;
1209 if (dqbuf.m.planes[0].bytesused == 0) { 1338 if (dqbuf.m.planes[0].bytesused == 0) {
1210 // This is an empty output buffer returned as part of a flush. 1339 // This is an empty output buffer returned as part of a flush.
1211 output_record.state = kFree; 1340 output_record.state = kFree;
1212 free_output_buffers_.push(dqbuf.index); 1341 free_output_buffers_.push_back(dqbuf.index);
1213 } else { 1342 } else {
1214 int32_t bitstream_buffer_id = dqbuf.timestamp.tv_sec; 1343 int32_t bitstream_buffer_id = dqbuf.timestamp.tv_sec;
1215 DCHECK_GE(bitstream_buffer_id, 0); 1344 DCHECK_GE(bitstream_buffer_id, 0);
1216 DVLOGF(3) << "Dequeue output buffer: dqbuf index=" << dqbuf.index 1345 DVLOGF(3) << "Dequeue output buffer: dqbuf index=" << dqbuf.index
1217 << " bitstream input_id=" << bitstream_buffer_id; 1346 << " bitstream input_id=" << bitstream_buffer_id;
1218 if (image_processor_device_) { 1347 if (image_processor_device_) {
1219 output_record.state = kAtProcessor; 1348 if (!ProcessFrame(bitstream_buffer_id, dqbuf.index)) {
1220 image_processor_bitstream_buffer_ids_.push(bitstream_buffer_id); 1349 DLOGF(ERROR) << "Processing frame failed";
1221 std::vector<int> fds; 1350 NOTIFY_ERROR(PLATFORM_FAILURE);
1222 for (auto& fd : output_record.fds) { 1351 return;
1223 fds.push_back(fd.get());
1224 } 1352 }
1225 scoped_refptr<VideoFrame> frame = VideoFrame::WrapExternalDmabufs(
1226 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_),
1227 coded_size_, gfx::Rect(visible_size_), visible_size_, fds,
1228 base::TimeDelta());
1229 // Unretained is safe because |this| owns image processor and there will
1230 // be no callbacks after processor destroys. Also, this class ensures it
1231 // is safe to post a task from child thread to decoder thread using
1232 // Unretained.
1233 image_processor_->Process(
1234 frame, dqbuf.index,
1235 BindToCurrentLoop(
1236 base::Bind(&V4L2VideoDecodeAccelerator::FrameProcessed,
1237 base::Unretained(this), bitstream_buffer_id)));
1238 } else { 1353 } else {
1239 output_record.state = kAtClient; 1354 output_record.state = kAtClient;
1240 decoder_frames_at_client_++; 1355 decoder_frames_at_client_++;
1241 const Picture picture(output_record.picture_id, bitstream_buffer_id, 1356 const Picture picture(output_record.picture_id, bitstream_buffer_id,
1242 gfx::Rect(visible_size_), false); 1357 gfx::Rect(visible_size_), false);
1243 pending_picture_ready_.push( 1358 pending_picture_ready_.push(
1244 PictureRecord(output_record.cleared, picture)); 1359 PictureRecord(output_record.cleared, picture));
1245 SendPictureReady(); 1360 SendPictureReady();
1246 output_record.cleared = true; 1361 output_record.cleared = true;
1247 } 1362 }
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
1280 } 1395 }
1281 1396
1282 bool V4L2VideoDecodeAccelerator::EnqueueOutputRecord() { 1397 bool V4L2VideoDecodeAccelerator::EnqueueOutputRecord() {
1283 DCHECK(!free_output_buffers_.empty()); 1398 DCHECK(!free_output_buffers_.empty());
1284 1399
1285 // Enqueue an output (VIDEO_CAPTURE) buffer. 1400 // Enqueue an output (VIDEO_CAPTURE) buffer.
1286 const int buffer = free_output_buffers_.front(); 1401 const int buffer = free_output_buffers_.front();
1287 DVLOGF(3) << "buffer " << buffer; 1402 DVLOGF(3) << "buffer " << buffer;
1288 OutputRecord& output_record = output_buffer_map_[buffer]; 1403 OutputRecord& output_record = output_buffer_map_[buffer];
1289 DCHECK_EQ(output_record.state, kFree); 1404 DCHECK_EQ(output_record.state, kFree);
1290 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
1291 DCHECK_NE(output_record.picture_id, -1); 1405 DCHECK_NE(output_record.picture_id, -1);
1292 if (output_record.egl_sync != EGL_NO_SYNC_KHR) { 1406 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1293 TRACE_EVENT0("Video Decoder", 1407 TRACE_EVENT0("Video Decoder",
1294 "V4L2VDA::EnqueueOutputRecord: eglClientWaitSyncKHR"); 1408 "V4L2VDA::EnqueueOutputRecord: eglClientWaitSyncKHR");
1295 // If we have to wait for completion, wait. Note that 1409 // If we have to wait for completion, wait. Note that
1296 // free_output_buffers_ is a FIFO queue, so we always wait on the 1410 // free_output_buffers_ is a FIFO queue, so we always wait on the
1297 // buffer that has been in the queue the longest. 1411 // buffer that has been in the queue the longest.
1298 if (eglClientWaitSyncKHR(egl_display_, output_record.egl_sync, 0, 1412 if (eglClientWaitSyncKHR(egl_display_, output_record.egl_sync, 0,
1299 EGL_FOREVER_KHR) == EGL_FALSE) { 1413 EGL_FOREVER_KHR) == EGL_FALSE) {
1300 // This will cause tearing, but is safe otherwise. 1414 // This will cause tearing, but is safe otherwise.
(...skipping 10 matching lines...) Expand all
1311 std::unique_ptr<struct v4l2_plane[]> qbuf_planes( 1425 std::unique_ptr<struct v4l2_plane[]> qbuf_planes(
1312 new v4l2_plane[output_planes_count_]); 1426 new v4l2_plane[output_planes_count_]);
1313 memset(&qbuf, 0, sizeof(qbuf)); 1427 memset(&qbuf, 0, sizeof(qbuf));
1314 memset(qbuf_planes.get(), 0, 1428 memset(qbuf_planes.get(), 0,
1315 sizeof(struct v4l2_plane) * output_planes_count_); 1429 sizeof(struct v4l2_plane) * output_planes_count_);
1316 qbuf.index = buffer; 1430 qbuf.index = buffer;
1317 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 1431 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1318 qbuf.memory = V4L2_MEMORY_MMAP; 1432 qbuf.memory = V4L2_MEMORY_MMAP;
1319 qbuf.m.planes = qbuf_planes.get(); 1433 qbuf.m.planes = qbuf_planes.get();
1320 qbuf.length = output_planes_count_; 1434 qbuf.length = output_planes_count_;
1435 DVLOGF(2) << "qbuf.index=" << qbuf.index
1436 << ", output_mode_=" << static_cast<int>(output_mode_)
1437 << ", output_planes_count_=" << output_planes_count_;
1321 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); 1438 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
1322 free_output_buffers_.pop(); 1439 free_output_buffers_.pop_front();
1323 output_record.state = kAtDevice; 1440 output_record.state = kAtDevice;
1324 output_buffer_queued_count_++; 1441 output_buffer_queued_count_++;
1325 return true; 1442 return true;
1326 } 1443 }
1327 1444
1328 void V4L2VideoDecodeAccelerator::ReusePictureBufferTask( 1445 void V4L2VideoDecodeAccelerator::ReusePictureBufferTask(
1329 int32_t picture_buffer_id, 1446 int32_t picture_buffer_id,
1330 std::unique_ptr<EGLSyncKHRRef> egl_sync_ref) { 1447 std::unique_ptr<EGLSyncKHRRef> egl_sync_ref) {
1331 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id; 1448 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id;
1332 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1449 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
(...skipping 27 matching lines...) Expand all
1360 } 1477 }
1361 1478
1362 OutputRecord& output_record = output_buffer_map_[index]; 1479 OutputRecord& output_record = output_buffer_map_[index];
1363 if (output_record.state != kAtClient) { 1480 if (output_record.state != kAtClient) {
1364 LOGF(ERROR) << "picture_buffer_id not reusable"; 1481 LOGF(ERROR) << "picture_buffer_id not reusable";
1365 NOTIFY_ERROR(INVALID_ARGUMENT); 1482 NOTIFY_ERROR(INVALID_ARGUMENT);
1366 return; 1483 return;
1367 } 1484 }
1368 1485
1369 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); 1486 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1370 output_record.egl_sync = egl_sync_ref->egl_sync;
1371 output_record.state = kFree; 1487 output_record.state = kFree;
1372 free_output_buffers_.push(index); 1488 free_output_buffers_.push_back(index);
1373 decoder_frames_at_client_--; 1489 decoder_frames_at_client_--;
1374 // Take ownership of the EGLSync. 1490 if (egl_sync_ref) {
1375 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR; 1491 output_record.egl_sync = egl_sync_ref->egl_sync;
1492 // Take ownership of the EGLSync.
1493 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR;
1494 }
1376 // We got a buffer back, so enqueue it back. 1495 // We got a buffer back, so enqueue it back.
1377 Enqueue(); 1496 Enqueue();
1378 } 1497 }
1379 1498
1380 void V4L2VideoDecodeAccelerator::FlushTask() { 1499 void V4L2VideoDecodeAccelerator::FlushTask() {
1381 DVLOGF(3); 1500 DVLOGF(3);
1382 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1501 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1383 TRACE_EVENT0("Video Decoder", "V4L2VDA::FlushTask"); 1502 TRACE_EVENT0("Video Decoder", "V4L2VDA::FlushTask");
1384 1503
1385 // Flush outstanding buffers. 1504 // Flush outstanding buffers.
(...skipping 245 matching lines...) Expand 10 before | Expand all | Expand 10 after
1631 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type); 1750 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
1632 output_streamon_ = false; 1751 output_streamon_ = false;
1633 1752
1634 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { 1753 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1635 // After streamoff, the device drops ownership of all buffers, even if we 1754 // After streamoff, the device drops ownership of all buffers, even if we
1636 // don't dequeue them explicitly. Some of them may still be owned by the 1755 // don't dequeue them explicitly. Some of them may still be owned by the
1637 // client however. Reuse only those that aren't. 1756 // client however. Reuse only those that aren't.
1638 OutputRecord& output_record = output_buffer_map_[i]; 1757 OutputRecord& output_record = output_buffer_map_[i];
1639 if (output_record.state == kAtDevice) { 1758 if (output_record.state == kAtDevice) {
1640 output_record.state = kFree; 1759 output_record.state = kFree;
1641 free_output_buffers_.push(i); 1760 free_output_buffers_.push_back(i);
1642 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); 1761 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1643 } 1762 }
1644 } 1763 }
1645 output_buffer_queued_count_ = 0; 1764 output_buffer_queued_count_ = 0;
1646 return true; 1765 return true;
1647 } 1766 }
1648 1767
1649 bool V4L2VideoDecodeAccelerator::StopInputStream() { 1768 bool V4L2VideoDecodeAccelerator::StopInputStream() {
1650 DVLOGF(3); 1769 DVLOGF(3);
1651 if (!input_streamon_) 1770 if (!input_streamon_)
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
1701 void V4L2VideoDecodeAccelerator::FinishResolutionChange() { 1820 void V4L2VideoDecodeAccelerator::FinishResolutionChange() {
1702 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1821 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1703 DCHECK_EQ(decoder_state_, kChangingResolution); 1822 DCHECK_EQ(decoder_state_, kChangingResolution);
1704 DVLOGF(3); 1823 DVLOGF(3);
1705 1824
1706 if (decoder_state_ == kError) { 1825 if (decoder_state_ == kError) {
1707 DVLOGF(2) << "early out: kError state"; 1826 DVLOGF(2) << "early out: kError state";
1708 return; 1827 return;
1709 } 1828 }
1710 1829
1830 DestroyOutputBuffers();
1831
1711 struct v4l2_format format; 1832 struct v4l2_format format;
1712 bool again; 1833 bool again;
1713 gfx::Size visible_size; 1834 gfx::Size visible_size;
1714 bool ret = GetFormatInfo(&format, &visible_size, &again); 1835 bool ret = GetFormatInfo(&format, &visible_size, &again);
1715 if (!ret || again) { 1836 if (!ret || again) {
1716 LOGF(ERROR) << "Couldn't get format information after resolution change"; 1837 LOGF(ERROR) << "Couldn't get format information after resolution change";
1717 NOTIFY_ERROR(PLATFORM_FAILURE); 1838 NOTIFY_ERROR(PLATFORM_FAILURE);
1718 return; 1839 return;
1719 } 1840 }
1720 1841
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
1830 &egl_image_planes_count_)) { 1951 &egl_image_planes_count_)) {
1831 LOGF(ERROR) << "Fail to get output size and plane count of processor"; 1952 LOGF(ERROR) << "Fail to get output size and plane count of processor";
1832 return false; 1953 return false;
1833 } 1954 }
1834 } else { 1955 } else {
1835 egl_image_size_ = coded_size_; 1956 egl_image_size_ = coded_size_;
1836 egl_image_planes_count_ = output_planes_count_; 1957 egl_image_planes_count_ = output_planes_count_;
1837 } 1958 }
1838 DVLOGF(3) << "new resolution: " << coded_size_.ToString() 1959 DVLOGF(3) << "new resolution: " << coded_size_.ToString()
1839 << ", visible size: " << visible_size_.ToString() 1960 << ", visible size: " << visible_size_.ToString()
1840 << ", EGLImage size: " << egl_image_size_.ToString(); 1961 << ", EGLImage size: " << egl_image_size_.ToString()
1962 << ", plane count: " << egl_image_planes_count_;
1841 1963
1842 return CreateOutputBuffers(); 1964 return CreateOutputBuffers();
1843 } 1965 }
1844 1966
1845 gfx::Size V4L2VideoDecodeAccelerator::GetVisibleSize( 1967 gfx::Size V4L2VideoDecodeAccelerator::GetVisibleSize(
1846 const gfx::Size& coded_size) { 1968 const gfx::Size& coded_size) {
1847 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1969 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1848 1970
1849 struct v4l2_crop crop_arg; 1971 struct v4l2_crop crop_arg;
1850 memset(&crop_arg, 0, sizeof(crop_arg)); 1972 memset(&crop_arg, 0, sizeof(crop_arg));
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after
1996 LOGF(ERROR) << "Can't find a usable input format from image processor"; 2118 LOGF(ERROR) << "Can't find a usable input format from image processor";
1997 return false; 2119 return false;
1998 } 2120 }
1999 egl_image_format_fourcc_ = FindImageProcessorOutputFormat(); 2121 egl_image_format_fourcc_ = FindImageProcessorOutputFormat();
2000 if (egl_image_format_fourcc_ == 0) { 2122 if (egl_image_format_fourcc_ == 0) {
2001 LOGF(ERROR) << "Can't find a usable output format from image processor"; 2123 LOGF(ERROR) << "Can't find a usable output format from image processor";
2002 return false; 2124 return false;
2003 } 2125 }
2004 egl_image_device_ = image_processor_device_; 2126 egl_image_device_ = image_processor_device_;
2005 } else { 2127 } else {
2128 if (output_mode_ == Config::OutputMode::IMPORT) {
2129 LOGF(ERROR) << "Import mode is unsupported without image processor.";
2130 return false;
2131 }
2006 egl_image_format_fourcc_ = output_format_fourcc_; 2132 egl_image_format_fourcc_ = output_format_fourcc_;
2007 egl_image_device_ = device_; 2133 egl_image_device_ = device_;
2008 } 2134 }
2009 DVLOGF(2) << "Output format=" << output_format_fourcc_; 2135 DVLOGF(2) << "Output format=" << output_format_fourcc_;
2010 2136
2011 // Just set the fourcc for output; resolution, etc., will come from the 2137 // Just set the fourcc for output; resolution, etc., will come from the
2012 // driver once it extracts it from the stream. 2138 // driver once it extracts it from the stream.
2013 memset(&format, 0, sizeof(format)); 2139 memset(&format, 0, sizeof(format));
2014 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 2140 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2015 format.fmt.pix_mp.pixelformat = output_format_fourcc_; 2141 format.fmt.pix_mp.pixelformat = output_format_fourcc_;
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
2062 for (uint32_t processor_output_format : processor_output_formats) { 2188 for (uint32_t processor_output_format : processor_output_formats) {
2063 if (device_->CanCreateEGLImageFrom(processor_output_format)) { 2189 if (device_->CanCreateEGLImageFrom(processor_output_format)) {
2064 DVLOGF(1) << "Image processor output format=" << processor_output_format; 2190 DVLOGF(1) << "Image processor output format=" << processor_output_format;
2065 return processor_output_format; 2191 return processor_output_format;
2066 } 2192 }
2067 } 2193 }
2068 2194
2069 return 0; 2195 return 0;
2070 } 2196 }
2071 2197
2198 bool V4L2VideoDecodeAccelerator::CreateImageProcessor() {
2199 DVLOGF(3);
2200 DCHECK(!image_processor_);
2201 image_processor_.reset(new V4L2ImageProcessor(image_processor_device_));
2202 v4l2_memory output_memory_type =
2203 (output_mode_ == Config::OutputMode::ALLOCATE ? V4L2_MEMORY_MMAP
2204 : V4L2_MEMORY_DMABUF);
2205 // Unretained is safe because |this| owns image processor and there will be
2206 // no callbacks after processor destroys.
2207 if (!image_processor_->Initialize(
2208 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_),
2209 V4L2Device::V4L2PixFmtToVideoPixelFormat(egl_image_format_fourcc_),
2210 V4L2_MEMORY_DMABUF, output_memory_type, visible_size_, coded_size_,
2211 visible_size_, egl_image_size_, output_buffer_map_.size(),
2212 base::Bind(&V4L2VideoDecodeAccelerator::ImageProcessorError,
2213 base::Unretained(this)))) {
2214 LOGF(ERROR) << "Initialize image processor failed";
2215 NOTIFY_ERROR(PLATFORM_FAILURE);
2216 return false;
2217 }
2218 DCHECK(image_processor_->output_allocated_size() == egl_image_size_);
2219 DVLOGF(3) << "image_processor_->output_allocated_size()="
2220 << image_processor_->output_allocated_size().ToString();
2221 if (image_processor_->input_allocated_size() != coded_size_) {
2222 LOGF(ERROR) << "Image processor should be able to take the output coded "
2223 << "size of decoder " << coded_size_.ToString()
2224 << " without adjusting to "
2225 << image_processor_->input_allocated_size().ToString();
2226 NOTIFY_ERROR(PLATFORM_FAILURE);
2227 return false;
2228 }
2229 return true;
2230 }
2231
2232 bool V4L2VideoDecodeAccelerator::ProcessFrame(int32_t bitstream_buffer_id,
2233 int output_buffer_index) {
2234 DVLOGF(3);
2235 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
2236
2237 OutputRecord& output_record = output_buffer_map_[output_buffer_index];
2238 output_record.state = kAtProcessor;
2239 image_processor_bitstream_buffer_ids_.push(bitstream_buffer_id);
2240 std::vector<int> processor_input_fds;
2241 for (auto& fd : output_record.processor_input_fds) {
2242 processor_input_fds.push_back(fd.get());
2243 }
2244 scoped_refptr<VideoFrame> input_frame = VideoFrame::WrapExternalDmabufs(
2245 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_),
2246 coded_size_, gfx::Rect(visible_size_), visible_size_, processor_input_fds,
2247 base::TimeDelta());
2248
2249 std::vector<base::ScopedFD> processor_output_fds;
2250 if (output_mode_ == Config::OutputMode::IMPORT) {
2251 for (auto& fd : output_record.processor_output_fds) {
2252 processor_output_fds.push_back(
2253 base::ScopedFD(HANDLE_EINTR(dup(fd.get()))));
2254 if (!processor_output_fds.back().is_valid()) {
2255 PLOGF(ERROR) << "Failed duplicating a dmabuf fd";
2256 return false;
2257 }
2258 }
2259 }
2260 // Unretained is safe because |this| owns image processor and there will
2261 // be no callbacks after processor destroys. Also, this class ensures it
2262 // is safe to post a task from child thread to decoder thread using
2263 // Unretained.
2264 image_processor_->Process(
2265 input_frame, output_buffer_index, std::move(processor_output_fds),
2266 BindToCurrentLoop(base::Bind(&V4L2VideoDecodeAccelerator::FrameProcessed,
2267 base::Unretained(this),
2268 bitstream_buffer_id)));
2269 return true;
2270 }
2271
2072 bool V4L2VideoDecodeAccelerator::CreateOutputBuffers() { 2272 bool V4L2VideoDecodeAccelerator::CreateOutputBuffers() {
2073 DVLOGF(3); 2273 DVLOGF(3);
2074 DCHECK(decoder_state_ == kInitialized || 2274 DCHECK(decoder_state_ == kInitialized ||
2075 decoder_state_ == kChangingResolution); 2275 decoder_state_ == kChangingResolution);
2076 DCHECK(!output_streamon_); 2276 DCHECK(!output_streamon_);
2077 DCHECK(output_buffer_map_.empty()); 2277 DCHECK(output_buffer_map_.empty());
2078 2278
2079 // Number of output buffers we need. 2279 // Number of output buffers we need.
2080 struct v4l2_control ctrl; 2280 struct v4l2_control ctrl;
2081 memset(&ctrl, 0, sizeof(ctrl)); 2281 memset(&ctrl, 0, sizeof(ctrl));
2082 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE; 2282 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
2083 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_CTRL, &ctrl); 2283 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_CTRL, &ctrl);
2084 output_dpb_size_ = ctrl.value; 2284 output_dpb_size_ = ctrl.value;
2085 2285
2086 // Output format setup in Initialize(). 2286 // Output format setup in Initialize().
2087 2287
2088 const uint32_t buffer_count = output_dpb_size_ + kDpbOutputBufferExtraCount; 2288 const uint32_t buffer_count = output_dpb_size_ + kDpbOutputBufferExtraCount;
2089 DVLOGF(3) << "buffer_count=" << buffer_count 2289 DVLOGF(3) << "buffer_count=" << buffer_count
2090 << ", coded_size=" << egl_image_size_.ToString(); 2290 << ", coded_size=" << egl_image_size_.ToString();
2091 2291
2292 // With ALLOCATE mode the client can sample it as RGB and doesn't need to
2293 // know the precise format.
2294 VideoPixelFormat pixel_format =
2295 (output_mode_ == Config::OutputMode::IMPORT)
2296 ? V4L2Device::V4L2PixFmtToVideoPixelFormat(egl_image_format_fourcc_)
2297 : PIXEL_FORMAT_UNKNOWN;
2298
2092 child_task_runner_->PostTask( 2299 child_task_runner_->PostTask(
2093 FROM_HERE, base::Bind(&Client::ProvidePictureBuffers, client_, 2300 FROM_HERE, base::Bind(&Client::ProvidePictureBuffers, client_,
2094 buffer_count, PIXEL_FORMAT_UNKNOWN, 1, 2301 buffer_count, pixel_format, 1, egl_image_size_,
2095 egl_image_size_, device_->GetTextureTarget())); 2302 device_->GetTextureTarget()));
2096 2303
2097 // Go into kAwaitingPictureBuffers to prevent us from doing any more decoding 2304 // Go into kAwaitingPictureBuffers to prevent us from doing any more decoding
2098 // or event handling while we are waiting for AssignPictureBuffers(). Not 2305 // or event handling while we are waiting for AssignPictureBuffers(). Not
2099 // having Pictures available would not have prevented us from making decoding 2306 // having Pictures available would not have prevented us from making decoding
2100 // progress entirely e.g. in the case of H.264 where we could further decode 2307 // progress entirely e.g. in the case of H.264 where we could further decode
2101 // non-slice NALUs and could even get another resolution change before we were 2308 // non-slice NALUs and could even get another resolution change before we were
2102 // done with this one. After we get the buffers, we'll go back into kIdle and 2309 // done with this one. After we get the buffers, we'll go back into kIdle and
2103 // kick off further event processing, and eventually go back into kDecoding 2310 // kick off further event processing, and eventually go back into kDecoding
2104 // once no more events are pending (if any). 2311 // once no more events are pending (if any).
2105 decoder_state_ = kAwaitingPictureBuffers; 2312 decoder_state_ = kAwaitingPictureBuffers;
(...skipping 18 matching lines...) Expand all
2124 reqbufs.count = 0; 2331 reqbufs.count = 0;
2125 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 2332 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2126 reqbufs.memory = V4L2_MEMORY_MMAP; 2333 reqbufs.memory = V4L2_MEMORY_MMAP;
2127 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); 2334 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
2128 2335
2129 input_buffer_map_.clear(); 2336 input_buffer_map_.clear();
2130 free_input_buffers_.clear(); 2337 free_input_buffers_.clear();
2131 } 2338 }
2132 2339
2133 bool V4L2VideoDecodeAccelerator::DestroyOutputBuffers() { 2340 bool V4L2VideoDecodeAccelerator::DestroyOutputBuffers() {
2341 struct v4l2_requestbuffers reqbufs;
2342 memset(&reqbufs, 0, sizeof(reqbufs));
2343 reqbufs.count = 0;
2344 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2345 reqbufs.memory = V4L2_MEMORY_MMAP;
2346 if (device_->Ioctl(VIDIOC_REQBUFS, &reqbufs) != 0) {
2347 PLOGF(ERROR) << "ioctl() failed: VIDIOC_REQBUFS";
2348 NOTIFY_ERROR(PLATFORM_FAILURE);
2349 return false;
2350 }
2351
2352 output_buffer_map_.clear();
2353 while (!free_output_buffers_.empty())
2354 free_output_buffers_.pop_front();
2355 output_buffer_queued_count_ = 0;
2356 // The client may still hold some buffers. The texture holds a reference to
2357 // the buffer. It is OK to free the buffer and destroy EGLImage here.
2358 decoder_frames_at_client_ = 0;
2359 return true;
2360 }
2361
2362 bool V4L2VideoDecodeAccelerator::DestroyEGLImages() {
2134 DVLOGF(3); 2363 DVLOGF(3);
2135 DCHECK(child_task_runner_->BelongsToCurrentThread()); 2364 DCHECK(child_task_runner_->BelongsToCurrentThread());
2136 DCHECK(!output_streamon_); 2365 DCHECK(!output_streamon_);
2137 bool success = true; 2366 bool success = true;
2138 2367
2139 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { 2368 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
2140 OutputRecord& output_record = output_buffer_map_[i]; 2369 OutputRecord& output_record = output_buffer_map_[i];
2141 2370
2142 if (output_record.egl_image != EGL_NO_IMAGE_KHR) { 2371 if (output_record.egl_image != EGL_NO_IMAGE_KHR) {
2143 if (egl_image_device_->DestroyEGLImage( 2372 if (egl_image_device_->DestroyEGLImage(
2144 egl_display_, output_record.egl_image) != EGL_TRUE) { 2373 egl_display_, output_record.egl_image) != EGL_TRUE) {
2145 DVLOGF(1) << "DestroyEGLImage failed."; 2374 DVLOGF(1) << "DestroyEGLImage failed.";
2146 success = false; 2375 success = false;
2147 } 2376 }
2148 } 2377 }
2149 2378
2150 if (output_record.egl_sync != EGL_NO_SYNC_KHR) { 2379 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
2151 if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) { 2380 if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) {
2152 DVLOGF(1) << "eglDestroySyncKHR failed."; 2381 DVLOGF(1) << "eglDestroySyncKHR failed.";
2153 success = false; 2382 success = false;
2154 } 2383 }
2155 } 2384 }
2156 2385
2157 DVLOGF(1) << "dismissing PictureBuffer id=" << output_record.picture_id; 2386 DVLOGF(1) << "dismissing PictureBuffer id=" << output_record.picture_id;
2158 child_task_runner_->PostTask( 2387 child_task_runner_->PostTask(
2159 FROM_HERE, base::Bind(&Client::DismissPictureBuffer, client_, 2388 FROM_HERE, base::Bind(&Client::DismissPictureBuffer, client_,
2160 output_record.picture_id)); 2389 output_record.picture_id));
2161 } 2390 }
2162 2391
2163 struct v4l2_requestbuffers reqbufs;
2164 memset(&reqbufs, 0, sizeof(reqbufs));
2165 reqbufs.count = 0;
2166 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2167 reqbufs.memory = V4L2_MEMORY_MMAP;
2168 if (device_->Ioctl(VIDIOC_REQBUFS, &reqbufs) != 0) {
2169 PLOGF(ERROR) << "ioctl() failed: VIDIOC_REQBUFS";
2170 success = false;
2171 }
2172
2173 output_buffer_map_.clear();
2174 while (!free_output_buffers_.empty())
2175 free_output_buffers_.pop();
2176 output_buffer_queued_count_ = 0;
2177 // The client may still hold some buffers. The texture holds a reference to
2178 // the buffer. It is OK to free the buffer and destroy EGLImage here.
2179 decoder_frames_at_client_ = 0;
2180
2181 return success; 2392 return success;
2182 } 2393 }
2183 2394
2184 void V4L2VideoDecodeAccelerator::ResolutionChangeDestroyBuffers() { 2395 void V4L2VideoDecodeAccelerator::ResolutionChangeDestroyBuffers() {
2185 DCHECK(child_task_runner_->BelongsToCurrentThread()); 2396 DCHECK(child_task_runner_->BelongsToCurrentThread());
2186 DVLOGF(3); 2397 DVLOGF(3);
2187 2398
2188 if (!DestroyOutputBuffers()) { 2399 if (!DestroyEGLImages()) {
2189 LOGF(ERROR) << "Failed destroying output buffers."; 2400 LOGF(ERROR) << "Failed destroying output buffers.";
2190 NOTIFY_ERROR(PLATFORM_FAILURE); 2401 NOTIFY_ERROR(PLATFORM_FAILURE);
2191 return; 2402 return;
2192 } 2403 }
2193 2404
2194 // Finish resolution change on decoder thread. 2405 // Finish resolution change on decoder thread.
2195 decoder_thread_.task_runner()->PostTask( 2406 decoder_thread_.task_runner()->PostTask(
2196 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::FinishResolutionChange, 2407 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::FinishResolutionChange,
2197 base::Unretained(this))); 2408 base::Unretained(this)));
2198 } 2409 }
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
2253 << ", bitstream_buffer_id=" << bitstream_buffer_id; 2464 << ", bitstream_buffer_id=" << bitstream_buffer_id;
2254 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 2465 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
2255 DCHECK_GE(output_buffer_index, 0); 2466 DCHECK_GE(output_buffer_index, 0);
2256 DCHECK_LT(output_buffer_index, static_cast<int>(output_buffer_map_.size())); 2467 DCHECK_LT(output_buffer_index, static_cast<int>(output_buffer_map_.size()));
2257 2468
2258 OutputRecord& output_record = output_buffer_map_[output_buffer_index]; 2469 OutputRecord& output_record = output_buffer_map_[output_buffer_index];
2259 DCHECK_EQ(output_record.state, kAtProcessor); 2470 DCHECK_EQ(output_record.state, kAtProcessor);
2260 if (!image_processor_bitstream_buffer_ids_.empty() && 2471 if (!image_processor_bitstream_buffer_ids_.empty() &&
2261 image_processor_bitstream_buffer_ids_.front() == bitstream_buffer_id) { 2472 image_processor_bitstream_buffer_ids_.front() == bitstream_buffer_id) {
2262 DVLOGF(3) << "picture_id=" << output_record.picture_id; 2473 DVLOGF(3) << "picture_id=" << output_record.picture_id;
2263 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
2264 DCHECK_NE(output_record.picture_id, -1); 2474 DCHECK_NE(output_record.picture_id, -1);
2265 // Send the processed frame to render. 2475 // Send the processed frame to render.
2266 output_record.state = kAtClient; 2476 output_record.state = kAtClient;
2267 decoder_frames_at_client_++; 2477 decoder_frames_at_client_++;
2268 image_processor_bitstream_buffer_ids_.pop(); 2478 image_processor_bitstream_buffer_ids_.pop();
2269 const Picture picture(output_record.picture_id, bitstream_buffer_id, 2479 const Picture picture(output_record.picture_id, bitstream_buffer_id,
2270 gfx::Rect(visible_size_), false); 2480 gfx::Rect(visible_size_), false);
2271 pending_picture_ready_.push(PictureRecord(output_record.cleared, picture)); 2481 pending_picture_ready_.push(PictureRecord(output_record.cleared, picture));
2272 SendPictureReady(); 2482 SendPictureReady();
2273 output_record.cleared = true; 2483 output_record.cleared = true;
2274 // Flush or resolution change may be waiting image processor to finish. 2484 // Flush or resolution change may be waiting image processor to finish.
2275 if (image_processor_bitstream_buffer_ids_.empty()) { 2485 if (image_processor_bitstream_buffer_ids_.empty()) {
2276 NotifyFlushDoneIfNeeded(); 2486 NotifyFlushDoneIfNeeded();
2277 if (decoder_state_ == kChangingResolution) 2487 if (decoder_state_ == kChangingResolution)
2278 StartResolutionChange(); 2488 StartResolutionChange();
2279 } 2489 }
2280 } else { 2490 } else {
2281 DVLOGF(2) << "Bitstream buffer id " << bitstream_buffer_id << " not found " 2491 DVLOGF(2) << "Bitstream buffer id " << bitstream_buffer_id << " not found "
2282 << "because of Reset. Drop the buffer"; 2492 << "because of Reset. Drop the buffer";
2283 output_record.state = kFree; 2493 output_record.state = kFree;
2284 free_output_buffers_.push(output_buffer_index); 2494 free_output_buffers_.push_back(output_buffer_index);
2285 // Do not queue the buffer if a resolution change is in progress. The queue 2495 // Do not queue the buffer if a resolution change is in progress. The queue
2286 // is about to be destroyed anyway. Otherwise, the queue will be started in 2496 // is about to be destroyed anyway. Otherwise, the queue will be started in
2287 // Enqueue and REQBUFS(0) will fail. 2497 // Enqueue and REQBUFS(0) will fail.
2288 if (decoder_state_ != kChangingResolution) 2498 if (decoder_state_ != kChangingResolution)
2289 Enqueue(); 2499 Enqueue();
2290 } 2500 }
2291 } 2501 }
2292 2502
2293 void V4L2VideoDecodeAccelerator::ImageProcessorError() { 2503 void V4L2VideoDecodeAccelerator::ImageProcessorError() {
2294 LOGF(ERROR) << "Image processor error"; 2504 LOGF(ERROR) << "Image processor error";
2295 NOTIFY_ERROR(PLATFORM_FAILURE); 2505 NOTIFY_ERROR(PLATFORM_FAILURE);
2296 } 2506 }
2297 2507
2298 } // namespace media 2508 } // namespace media
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698