Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(512)

Side by Side Diff: media/gpu/v4l2_video_decode_accelerator.cc

Issue 2191263002: V4L2VideoDecodeAccelerator: support external buffer import (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: V4L2VideoDecodeAccelerator: support external buffer import Created 4 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « media/gpu/v4l2_video_decode_accelerator.h ('k') | media/gpu/v4l2_video_encode_accelerator.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/gpu/v4l2_video_decode_accelerator.h" 5 #include "media/gpu/v4l2_video_decode_accelerator.h"
6 6
7 #include <dlfcn.h> 7 #include <dlfcn.h>
8 #include <errno.h> 8 #include <errno.h>
9 #include <fcntl.h> 9 #include <fcntl.h>
10 #include <linux/videodev2.h> 10 #include <linux/videodev2.h>
11 #include <poll.h> 11 #include <poll.h>
12 #include <string.h> 12 #include <string.h>
13 #include <sys/eventfd.h> 13 #include <sys/eventfd.h>
14 #include <sys/ioctl.h> 14 #include <sys/ioctl.h>
15 #include <sys/mman.h> 15 #include <sys/mman.h>
16 16
17 #include "base/bind.h" 17 #include "base/bind.h"
18 #include "base/command_line.h" 18 #include "base/command_line.h"
19 #include "base/memory/ptr_util.h"
19 #include "base/message_loop/message_loop.h" 20 #include "base/message_loop/message_loop.h"
20 #include "base/numerics/safe_conversions.h" 21 #include "base/numerics/safe_conversions.h"
21 #include "base/single_thread_task_runner.h" 22 #include "base/single_thread_task_runner.h"
22 #include "base/threading/thread_task_runner_handle.h" 23 #include "base/threading/thread_task_runner_handle.h"
23 #include "base/trace_event/trace_event.h" 24 #include "base/trace_event/trace_event.h"
24 #include "build/build_config.h" 25 #include "build/build_config.h"
25 #include "media/base/bind_to_current_loop.h" 26 #include "media/base/bind_to_current_loop.h"
26 #include "media/base/media_switches.h" 27 #include "media/base/media_switches.h"
27 #include "media/filters/h264_parser.h" 28 #include "media/filters/h264_parser.h"
28 #include "media/gpu/shared_memory_region.h" 29 #include "media/gpu/shared_memory_region.h"
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after
131 V4L2VideoDecodeAccelerator::InputRecord::InputRecord() 132 V4L2VideoDecodeAccelerator::InputRecord::InputRecord()
132 : at_device(false), address(NULL), length(0), bytes_used(0), input_id(-1) {} 133 : at_device(false), address(NULL), length(0), bytes_used(0), input_id(-1) {}
133 134
134 V4L2VideoDecodeAccelerator::InputRecord::~InputRecord() {} 135 V4L2VideoDecodeAccelerator::InputRecord::~InputRecord() {}
135 136
136 V4L2VideoDecodeAccelerator::OutputRecord::OutputRecord() 137 V4L2VideoDecodeAccelerator::OutputRecord::OutputRecord()
137 : state(kFree), 138 : state(kFree),
138 egl_image(EGL_NO_IMAGE_KHR), 139 egl_image(EGL_NO_IMAGE_KHR),
139 egl_sync(EGL_NO_SYNC_KHR), 140 egl_sync(EGL_NO_SYNC_KHR),
140 picture_id(-1), 141 picture_id(-1),
142 texture_id(0),
141 cleared(false) {} 143 cleared(false) {}
142 144
143 V4L2VideoDecodeAccelerator::OutputRecord::~OutputRecord() {} 145 V4L2VideoDecodeAccelerator::OutputRecord::~OutputRecord() {}
144 146
145 V4L2VideoDecodeAccelerator::PictureRecord::PictureRecord(bool cleared, 147 V4L2VideoDecodeAccelerator::PictureRecord::PictureRecord(bool cleared,
146 const Picture& picture) 148 const Picture& picture)
147 : cleared(cleared), picture(picture) {} 149 : cleared(cleared), picture(picture) {}
148 150
149 V4L2VideoDecodeAccelerator::PictureRecord::~PictureRecord() {} 151 V4L2VideoDecodeAccelerator::PictureRecord::~PictureRecord() {}
150 152
151 V4L2VideoDecodeAccelerator::V4L2VideoDecodeAccelerator( 153 V4L2VideoDecodeAccelerator::V4L2VideoDecodeAccelerator(
152 EGLDisplay egl_display, 154 EGLDisplay egl_display,
153 const GetGLContextCallback& get_gl_context_cb, 155 const GetGLContextCallback& get_gl_context_cb,
154 const MakeGLContextCurrentCallback& make_context_current_cb, 156 const MakeGLContextCurrentCallback& make_context_current_cb,
155 const scoped_refptr<V4L2Device>& device) 157 const scoped_refptr<V4L2Device>& device)
156 : child_task_runner_(base::ThreadTaskRunnerHandle::Get()), 158 : child_task_runner_(base::ThreadTaskRunnerHandle::Get()),
157 decoder_thread_("V4L2DecoderThread"), 159 decoder_thread_("V4L2DecoderThread"),
158 decoder_state_(kUninitialized), 160 decoder_state_(kUninitialized),
161 output_mode_(Config::OutputMode::ALLOCATE),
159 device_(device), 162 device_(device),
160 decoder_delay_bitstream_buffer_id_(-1), 163 decoder_delay_bitstream_buffer_id_(-1),
161 decoder_current_input_buffer_(-1), 164 decoder_current_input_buffer_(-1),
162 decoder_decode_buffer_tasks_scheduled_(0), 165 decoder_decode_buffer_tasks_scheduled_(0),
163 decoder_frames_at_client_(0), 166 decoder_frames_at_client_(0),
164 decoder_flushing_(false), 167 decoder_flushing_(false),
165 reset_pending_(false), 168 reset_pending_(false),
166 decoder_partial_frame_pending_(false), 169 decoder_partial_frame_pending_(false),
167 input_streamon_(false), 170 input_streamon_(false),
168 input_buffer_queued_count_(0), 171 input_buffer_queued_count_(0),
(...skipping 12 matching lines...) Expand all
181 egl_image_planes_count_(0), 184 egl_image_planes_count_(0),
182 weak_this_factory_(this) { 185 weak_this_factory_(this) {
183 weak_this_ = weak_this_factory_.GetWeakPtr(); 186 weak_this_ = weak_this_factory_.GetWeakPtr();
184 } 187 }
185 188
186 V4L2VideoDecodeAccelerator::~V4L2VideoDecodeAccelerator() { 189 V4L2VideoDecodeAccelerator::~V4L2VideoDecodeAccelerator() {
187 DCHECK(!decoder_thread_.IsRunning()); 190 DCHECK(!decoder_thread_.IsRunning());
188 DCHECK(!device_poll_thread_.IsRunning()); 191 DCHECK(!device_poll_thread_.IsRunning());
189 192
190 DestroyInputBuffers(); 193 DestroyInputBuffers();
194 DestroyEGLImages();
191 DestroyOutputBuffers(); 195 DestroyOutputBuffers();
192 196
193 // These maps have members that should be manually destroyed, e.g. file 197 // These maps have members that should be manually destroyed, e.g. file
194 // descriptors, mmap() segments, etc. 198 // descriptors, mmap() segments, etc.
195 DCHECK(input_buffer_map_.empty()); 199 DCHECK(input_buffer_map_.empty());
196 DCHECK(output_buffer_map_.empty()); 200 DCHECK(output_buffer_map_.empty());
197 } 201 }
198 202
199 bool V4L2VideoDecodeAccelerator::Initialize(const Config& config, 203 bool V4L2VideoDecodeAccelerator::Initialize(const Config& config,
200 Client* client) { 204 Client* client) {
201 DVLOGF(3) << "profile: " << config.profile; 205 DVLOGF(3) << "profile: " << config.profile;
202 DCHECK(child_task_runner_->BelongsToCurrentThread()); 206 DCHECK(child_task_runner_->BelongsToCurrentThread());
203 DCHECK_EQ(decoder_state_, kUninitialized); 207 DCHECK_EQ(decoder_state_, kUninitialized);
204 208
205 if (!device_->SupportsDecodeProfileForV4L2PixelFormats( 209 if (!device_->SupportsDecodeProfileForV4L2PixelFormats(
206 config.profile, arraysize(supported_input_fourccs_), 210 config.profile, arraysize(supported_input_fourccs_),
207 supported_input_fourccs_)) { 211 supported_input_fourccs_)) {
208 DVLOGF(1) << "unsupported profile=" << config.profile; 212 DVLOGF(1) << "unsupported profile=" << config.profile;
209 return false; 213 return false;
210 } 214 }
211 215
212 if (config.is_encrypted) { 216 if (config.is_encrypted) {
213 NOTREACHED() << "Encrypted streams are not supported for this VDA"; 217 NOTREACHED() << "Encrypted streams are not supported for this VDA";
214 return false; 218 return false;
215 } 219 }
216 220
217 if (config.output_mode != Config::OutputMode::ALLOCATE) { 221 if (config.output_mode != Config::OutputMode::ALLOCATE &&
218 NOTREACHED() << "Only ALLOCATE OutputMode is supported by this VDA"; 222 config.output_mode != Config::OutputMode::IMPORT) {
223 NOTREACHED() << "Only ALLOCATE and IMPORT OutputModes are supported";
219 return false; 224 return false;
220 } 225 }
221 226
222 if (get_gl_context_cb_.is_null() || make_context_current_cb_.is_null()) {
223 NOTREACHED() << "GL callbacks are required for this VDA";
224 return false;
225 }
226
227 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); 227 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
228 client_ = client_ptr_factory_->GetWeakPtr(); 228 client_ = client_ptr_factory_->GetWeakPtr();
229 // If we haven't been set up to decode on separate thread via 229 // If we haven't been set up to decode on separate thread via
230 // TryToSetupDecodeOnSeparateThread(), use the main thread/client for 230 // TryToSetupDecodeOnSeparateThread(), use the main thread/client for
231 // decode tasks. 231 // decode tasks.
232 if (!decode_task_runner_) { 232 if (!decode_task_runner_) {
233 decode_task_runner_ = child_task_runner_; 233 decode_task_runner_ = child_task_runner_;
234 DCHECK(!decode_client_); 234 DCHECK(!decode_client_);
235 decode_client_ = client_; 235 decode_client_ = client_;
236 } 236 }
237 237
238 video_profile_ = config.profile; 238 video_profile_ = config.profile;
239 239
240 if (egl_display_ == EGL_NO_DISPLAY) { 240 if (egl_display_ == EGL_NO_DISPLAY) {
241 LOGF(ERROR) << "could not get EGLDisplay"; 241 LOGF(ERROR) << "could not get EGLDisplay";
242 return false; 242 return false;
243 } 243 }
244 244
245 // We need the context to be initialized to query extensions. 245 // We need the context to be initialized to query extensions.
246 if (!make_context_current_cb_.Run()) { 246 if (!make_context_current_cb_.is_null()) {
247 LOGF(ERROR) << "could not make context current"; 247 if (!make_context_current_cb_.Run()) {
248 return false; 248 LOGF(ERROR) << "could not make context current";
249 } 249 return false;
250 }
250 251
251 // TODO(posciak): crbug.com/450898. 252 // TODO(posciak): crbug.com/450898.
252 #if defined(ARCH_CPU_ARMEL) 253 #if defined(ARCH_CPU_ARMEL)
253 if (!gl::g_driver_egl.ext.b_EGL_KHR_fence_sync) { 254 if (!gl::g_driver_egl.ext.b_EGL_KHR_fence_sync) {
254 LOGF(ERROR) << "context does not have EGL_KHR_fence_sync"; 255 LOGF(ERROR) << "context does not have EGL_KHR_fence_sync";
255 return false; 256 return false;
257 }
258 #endif
259 } else {
260 DVLOG(1) << "No GL callbacks provided, initializing without GL support";
256 } 261 }
257 #endif
258 262
259 // Capabilities check. 263 // Capabilities check.
260 struct v4l2_capability caps; 264 struct v4l2_capability caps;
261 const __u32 kCapsRequired = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING; 265 const __u32 kCapsRequired = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING;
262 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps); 266 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
263 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { 267 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
264 LOGF(ERROR) << "ioctl() failed: VIDIOC_QUERYCAP" 268 LOGF(ERROR) << "ioctl() failed: VIDIOC_QUERYCAP"
265 << ", caps check failed: 0x" << std::hex << caps.capabilities; 269 << ", caps check failed: 0x" << std::hex << caps.capabilities;
266 return false; 270 return false;
267 } 271 }
(...skipping 13 matching lines...) Expand all
281 285
282 if (!CreateInputBuffers()) 286 if (!CreateInputBuffers())
283 return false; 287 return false;
284 288
285 if (!decoder_thread_.Start()) { 289 if (!decoder_thread_.Start()) {
286 LOGF(ERROR) << "decoder thread failed to start"; 290 LOGF(ERROR) << "decoder thread failed to start";
287 return false; 291 return false;
288 } 292 }
289 293
290 decoder_state_ = kInitialized; 294 decoder_state_ = kInitialized;
295 output_mode_ = config.output_mode;
291 296
292 // StartDevicePoll will NOTIFY_ERROR on failure, so IgnoreResult is fine here. 297 // StartDevicePoll will NOTIFY_ERROR on failure, so IgnoreResult is fine here.
293 decoder_thread_.task_runner()->PostTask( 298 decoder_thread_.task_runner()->PostTask(
294 FROM_HERE, base::Bind(base::IgnoreResult( 299 FROM_HERE, base::Bind(base::IgnoreResult(
295 &V4L2VideoDecodeAccelerator::StartDevicePoll), 300 &V4L2VideoDecodeAccelerator::StartDevicePoll),
296 base::Unretained(this))); 301 base::Unretained(this)));
297 302
298 return true; 303 return true;
299 } 304 }
300 305
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
352 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 357 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
353 reqbufs.memory = V4L2_MEMORY_MMAP; 358 reqbufs.memory = V4L2_MEMORY_MMAP;
354 IOCTL_OR_ERROR_RETURN(VIDIOC_REQBUFS, &reqbufs); 359 IOCTL_OR_ERROR_RETURN(VIDIOC_REQBUFS, &reqbufs);
355 360
356 if (reqbufs.count != buffers.size()) { 361 if (reqbufs.count != buffers.size()) {
357 DLOGF(ERROR) << "Could not allocate enough output buffers"; 362 DLOGF(ERROR) << "Could not allocate enough output buffers";
358 NOTIFY_ERROR(PLATFORM_FAILURE); 363 NOTIFY_ERROR(PLATFORM_FAILURE);
359 return; 364 return;
360 } 365 }
361 366
362 if (image_processor_device_) {
363 DCHECK(!image_processor_);
364 image_processor_.reset(new V4L2ImageProcessor(image_processor_device_));
365 // Unretained is safe because |this| owns image processor and there will be
366 // no callbacks after processor destroys.
367 if (!image_processor_->Initialize(
368 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_),
369 V4L2Device::V4L2PixFmtToVideoPixelFormat(egl_image_format_fourcc_),
370 V4L2_MEMORY_DMABUF, visible_size_, coded_size_, visible_size_,
371 visible_size_, buffers.size(),
372 base::Bind(&V4L2VideoDecodeAccelerator::ImageProcessorError,
373 base::Unretained(this)))) {
374 LOGF(ERROR) << "Initialize image processor failed";
375 NOTIFY_ERROR(PLATFORM_FAILURE);
376 return;
377 }
378 DCHECK(image_processor_->output_allocated_size() == egl_image_size_);
379 if (image_processor_->input_allocated_size() != coded_size_) {
380 LOGF(ERROR) << "Image processor should be able to take the output coded "
381 << "size of decoder " << coded_size_.ToString()
382 << " without adjusting to "
383 << image_processor_->input_allocated_size().ToString();
384 NOTIFY_ERROR(PLATFORM_FAILURE);
385 return;
386 }
387 }
388
389 child_task_runner_->PostTask(
390 FROM_HERE,
391 base::Bind(&V4L2VideoDecodeAccelerator::CreateEGLImages, weak_this_,
392 buffers, egl_image_format_fourcc_, egl_image_planes_count_));
393 }
394
395 void V4L2VideoDecodeAccelerator::CreateEGLImages(
396 const std::vector<media::PictureBuffer>& buffers,
397 uint32_t output_format_fourcc,
398 size_t output_planes_count) {
399 DVLOGF(3);
400 DCHECK(child_task_runner_->BelongsToCurrentThread());
401
402 if (get_gl_context_cb_.is_null() || make_context_current_cb_.is_null()) {
403 DLOGF(ERROR) << "GL callbacks required for binding to EGLImages";
404 NOTIFY_ERROR(INVALID_ARGUMENT);
405 return;
406 }
407
408 gl::GLContext* gl_context = get_gl_context_cb_.Run();
409 if (!gl_context || !make_context_current_cb_.Run()) {
410 DLOGF(ERROR) << "No GL context";
411 NOTIFY_ERROR(PLATFORM_FAILURE);
412 return;
413 }
414
415 gl::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0);
416
417 std::vector<EGLImageKHR> egl_images;
418 for (size_t i = 0; i < buffers.size(); ++i) {
419 std::vector<base::ScopedFD> dmabuf_fds;
420 dmabuf_fds = egl_image_device_->GetDmabufsForV4L2Buffer(
421 i, egl_image_planes_count_, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
422 if (dmabuf_fds.empty()) {
423 LOGF(ERROR) << "Failed to get DMABUFs for EGLImage.";
424 NOTIFY_ERROR(PLATFORM_FAILURE);
425 return;
426 }
427
428 EGLImageKHR egl_image = egl_image_device_->CreateEGLImage(
429 egl_display_, gl_context->GetHandle(), buffers[i].texture_ids()[0],
430 buffers[i].size(), i, egl_image_format_fourcc_, dmabuf_fds);
431 if (egl_image == EGL_NO_IMAGE_KHR) {
432 LOGF(ERROR) << "could not create EGLImageKHR,"
433 << " index=" << i
434 << " texture_id=" << buffers[i].texture_ids()[0];
435 for (EGLImageKHR image : egl_images) {
436 if (egl_image_device_->DestroyEGLImage(egl_display_, image) != EGL_TRUE)
437 DVLOGF(1) << "DestroyEGLImage failed.";
438 }
439 NOTIFY_ERROR(PLATFORM_FAILURE);
440 return;
441 }
442 egl_images.push_back(egl_image);
443 }
444
445 decoder_thread_.task_runner()->PostTask(
446 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::AssignEGLImages,
447 base::Unretained(this), buffers, egl_images));
448 }
449
450 void V4L2VideoDecodeAccelerator::AssignEGLImages(
451 const std::vector<media::PictureBuffer>& buffers,
452 const std::vector<EGLImageKHR>& egl_images) {
453 DVLOGF(3);
454 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
455 DCHECK_EQ(buffers.size(), egl_images.size());
456 DCHECK(free_output_buffers_.empty()); 367 DCHECK(free_output_buffers_.empty());
457 DCHECK(output_buffer_map_.empty()); 368 DCHECK(output_buffer_map_.empty());
458
459 output_buffer_map_.resize(buffers.size()); 369 output_buffer_map_.resize(buffers.size());
370 if (image_processor_device_ && output_mode_ == Config::OutputMode::ALLOCATE) {
371 CreateImageProcessor();
kcwu 2016/09/07 03:46:23 return if CreateImageProcessor failed.
372 }
373
460 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { 374 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
461 DCHECK(buffers[i].size() == egl_image_size_); 375 DCHECK(buffers[i].size() == egl_image_size_);
376 DCHECK_EQ(1u, buffers[i].texture_ids().size());
462 377
463 OutputRecord& output_record = output_buffer_map_[i]; 378 OutputRecord& output_record = output_buffer_map_[i];
464 DCHECK_EQ(output_record.state, kFree); 379 DCHECK_EQ(output_record.state, kFree);
465 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR); 380 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR);
466 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); 381 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
467 DCHECK_EQ(output_record.picture_id, -1); 382 DCHECK_EQ(output_record.picture_id, -1);
383 DCHECK(output_record.processor_input_fds.empty());
468 DCHECK_EQ(output_record.cleared, false); 384 DCHECK_EQ(output_record.cleared, false);
469 DCHECK_LE(1u, buffers[i].texture_ids().size()); 385
386 output_record.picture_id = buffers[i].id();
387 output_record.texture_id = buffers[i].texture_ids()[0];
388 // This will remain kAtClient until ImportBufferForPicture is called, either
389 // by the client, or by ourselves, if we are allocating.
390 output_record.state = kAtClient;
470 391
471 if (image_processor_device_) { 392 if (image_processor_device_) {
472 std::vector<base::ScopedFD> fds = device_->GetDmabufsForV4L2Buffer( 393 std::vector<base::ScopedFD> dmabuf_fds = device_->GetDmabufsForV4L2Buffer(
473 i, output_planes_count_, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE); 394 i, output_planes_count_, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
474 if (fds.empty()) { 395 if (dmabuf_fds.empty()) {
475 LOGF(ERROR) << "Failed to get DMABUFs of decoder."; 396 LOGF(ERROR) << "Failed to get DMABUFs of decoder.";
476 NOTIFY_ERROR(PLATFORM_FAILURE); 397 NOTIFY_ERROR(PLATFORM_FAILURE);
477 return; 398 return;
478 } 399 }
479 output_record.fds = std::move(fds); 400 output_record.processor_input_fds = std::move(dmabuf_fds);
480 } 401 }
481 402
482 output_record.egl_image = egl_images[i]; 403 if (output_mode_ == Config::OutputMode::ALLOCATE) {
483 output_record.picture_id = buffers[i].id(); 404 std::vector<base::ScopedFD> dmabuf_fds;
484 405 dmabuf_fds = egl_image_device_->GetDmabufsForV4L2Buffer(
485 free_output_buffers_.push(i); 406 i, egl_image_planes_count_, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
486 DVLOGF(3) << "buffer[" << i << "]: picture_id=" << output_record.picture_id; 407 if (dmabuf_fds.empty()) {
487 } 408 LOGF(ERROR) << "Failed to get DMABUFs for EGLImage.";
488 409 NOTIFY_ERROR(PLATFORM_FAILURE);
489 decoder_state_ = kDecoding; 410 return;
490 Enqueue(); 411 }
412 auto passed_dmabuf_fds(base::WrapUnique(
413 new std::vector<base::ScopedFD>(std::move(dmabuf_fds))));
414 ImportBufferForPictureTask(output_record.picture_id,
415 std::move(passed_dmabuf_fds),
416 egl_image_size_.width());
417 } // else we'll get triggered via ImportBufferForPicture() from client.
418
419 DVLOGF(3) << "AssignPictureBuffers(): buffer[" << i
420 << "]: picture_id=" << output_record.picture_id;
421 }
422 }
423
424 void V4L2VideoDecodeAccelerator::CreateEGLImageFor(
425 size_t buffer_index,
426 int32_t picture_buffer_id,
427 std::unique_ptr<std::vector<base::ScopedFD>> passed_dmabuf_fds,
428 GLuint texture_id,
429 const gfx::Size& size,
430 uint32_t fourcc) {
431 DVLOGF(3) << "index=" << buffer_index;
432 DCHECK(child_task_runner_->BelongsToCurrentThread());
433
434 if (get_gl_context_cb_.is_null() || make_context_current_cb_.is_null()) {
435 DLOG(ERROR) << "GL callbacks required for binding to EGLImages";
436 NOTIFY_ERROR(INVALID_ARGUMENT);
437 return;
438 }
439
440 gl::GLContext* gl_context = get_gl_context_cb_.Run();
441 if (!gl_context || !make_context_current_cb_.Run()) {
442 DLOG(ERROR) << "No GL context";
443 NOTIFY_ERROR(PLATFORM_FAILURE);
444 return;
445 }
446
447 gl::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0);
448
449 EGLImageKHR egl_image = egl_image_device_->CreateEGLImage(
450 egl_display_, gl_context->GetHandle(), texture_id, size, buffer_index,
451 fourcc, *passed_dmabuf_fds);
452 if (egl_image == EGL_NO_IMAGE_KHR) {
453 LOGF(ERROR) << "could not create EGLImageKHR,"
454 << " index=" << buffer_index << " texture_id=" << texture_id;
455 NOTIFY_ERROR(PLATFORM_FAILURE);
456 return;
457 }
458
459 decoder_thread_.task_runner()->PostTask(
460 FROM_HERE,
461 base::Bind(&V4L2VideoDecodeAccelerator::AssignEGLImage,
462 base::Unretained(this), buffer_index, picture_buffer_id,
463 egl_image, base::Passed(&passed_dmabuf_fds)));
464 }
465
466 void V4L2VideoDecodeAccelerator::AssignEGLImage(
467 size_t buffer_index,
468 int32_t picture_buffer_id,
469 EGLImageKHR egl_image,
470 std::unique_ptr<std::vector<base::ScopedFD>> passed_dmabuf_fds) {
471 DVLOGF(3) << "index=" << buffer_index;
472 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
473
474 // It's possible that while waiting for the EGLImages to be allocated and
475 // assigned, we have already decoded more of the stream and saw another
476 // resolution change. This is a normal situation, in such a case either there
477 // is no output record with this index awaiting an EGLImage to be assigned to
478 // it, or the record is already updated to use a newer PictureBuffer and is
479 // awaiting an EGLImage associated with a different picture_buffer_id. If so,
480 // just discard this image, we will get the one we are waiting for later.
481 if (buffer_index >= output_buffer_map_.size() ||
482 output_buffer_map_[buffer_index].picture_id != picture_buffer_id) {
483 DVLOGF(3) << "Picture set already changed, dropping EGLImage";
484 child_task_runner_->PostTask(
485 FROM_HERE, base::Bind(base::IgnoreResult(&V4L2Device::DestroyEGLImage),
486 device_, egl_display_, egl_image));
487 return;
488 }
489
490 OutputRecord& output_record = output_buffer_map_[buffer_index];
491 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR);
492 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
493 DCHECK_EQ(output_record.state, kFree);
494 DCHECK_EQ(std::count(free_output_buffers_.begin(), free_output_buffers_.end(),
495 buffer_index),
496 0);
497 output_record.egl_image = egl_image;
498 free_output_buffers_.push_back(buffer_index);
499 if (decoder_state_ == kAwaitingPictureBuffers) {
500 DVLOG(1) << "Change state to kDecoding";
501 decoder_state_ = kDecoding;
502 }
491 if (reset_pending_) { 503 if (reset_pending_) {
492 FinishReset(); 504 FinishReset();
493 return; 505 return;
494 } 506 }
495 507 if (decoder_state_ != kChangingResolution) {
496 ScheduleDecodeBufferTaskIfNeeded(); 508 Enqueue();
509 ScheduleDecodeBufferTaskIfNeeded();
510 }
511 }
512
513 void V4L2VideoDecodeAccelerator::ImportBufferForPicture(
514 int32_t picture_buffer_id,
515 const gfx::GpuMemoryBufferHandle& gpu_memory_buffer_handle) {
516 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id;
517 DCHECK(child_task_runner_->BelongsToCurrentThread());
518
519 auto passed_dmabuf_fds(base::WrapUnique(new std::vector<base::ScopedFD>()));
520 int32_t stride = egl_image_size_.width();
521 #if defined(USE_OZONE)
522 for (const auto& fd : gpu_memory_buffer_handle.native_pixmap_handle.fds) {
523 DCHECK_NE(fd.fd, -1);
524 passed_dmabuf_fds->push_back(base::ScopedFD(fd.fd));
525 }
526 stride = gpu_memory_buffer_handle.native_pixmap_handle.planes[0].stride;
527 for (const auto& plane :
528 gpu_memory_buffer_handle.native_pixmap_handle.planes) {
529 DVLOGF(3) << ": offset=" << plane.offset << ", stride=" << plane.stride;
530 }
531 #endif
532
533 if (output_mode_ != Config::OutputMode::IMPORT) {
534 LOGF(ERROR) << "Cannot import in non-import mode";
535 NOTIFY_ERROR(INVALID_ARGUMENT);
536 return;
537 }
538
539 decoder_thread_.message_loop()->PostTask(
540 FROM_HERE,
541 base::Bind(&V4L2VideoDecodeAccelerator::ImportBufferForPictureTask,
542 base::Unretained(this), picture_buffer_id,
543 base::Passed(&passed_dmabuf_fds), stride));
544 }
545
546 void V4L2VideoDecodeAccelerator::ImportBufferForPictureTask(
547 int32_t picture_buffer_id,
548 std::unique_ptr<std::vector<base::ScopedFD>> passed_dmabuf_fds,
549 int32_t stride) {
550 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id;
551 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
552
553 if (image_processor_device_ && !image_processor_) {
554 DVLOGF(3) << "Original egl_image_size=" << egl_image_size_.ToString()
555 << ", width is adjusted to=" << stride;
556 egl_image_size_.set_width(stride);
557 CreateImageProcessor();
kcwu 2016/09/07 03:46:23 return if CreateImageProcessor failed.
558 }
559
560 const auto iter =
561 std::find_if(output_buffer_map_.begin(), output_buffer_map_.end(),
562 [picture_buffer_id](const OutputRecord& output_record) {
563 return output_record.picture_id == picture_buffer_id;
564 });
565 if (iter == output_buffer_map_.end()) {
566 // It's possible that we've already posted a DismissPictureBuffer for this
567 // picture, but it has not yet executed when this ImportBufferForPicture was
568 // posted to us by the client. In that case just ignore this (we've already
569 // dismissed it and accounted for that).
570 DVLOGF(3) << "got picture id=" << picture_buffer_id
571 << " not in use (anymore?).";
572 return;
573 }
574
575 if (iter->state != kAtClient) {
576 LOGF(ERROR) << "Cannot import buffer that not owned by client";
577 NOTIFY_ERROR(INVALID_ARGUMENT);
578 return;
579 }
580
581 size_t index = iter - output_buffer_map_.begin();
582 DCHECK_EQ(std::count(free_output_buffers_.begin(), free_output_buffers_.end(),
583 index),
584 0);
585
586 iter->state = kFree;
587 if (iter->texture_id != 0) {
588 if (iter->egl_image != EGL_NO_IMAGE_KHR) {
589 child_task_runner_->PostTask(
590 FROM_HERE,
591 base::Bind(base::IgnoreResult(&V4L2Device::DestroyEGLImage), device_,
592 egl_display_, iter->egl_image));
593 }
594
595 child_task_runner_->PostTask(
596 FROM_HERE,
597 base::Bind(&V4L2VideoDecodeAccelerator::CreateEGLImageFor, weak_this_,
598 index, picture_buffer_id, base::Passed(&passed_dmabuf_fds),
599 iter->texture_id, egl_image_size_,
600 egl_image_format_fourcc_));
601 } else {
602 // No need for an EGLImage, start using this buffer now.
603 DVLOGF(2) << "egl_image_planes_count_=" << egl_image_planes_count_
604 << ", passed_dmabuf_fds->size()=" << passed_dmabuf_fds->size();
605 DCHECK_EQ(egl_image_planes_count_, passed_dmabuf_fds->size());
606 iter->processor_output_fds.swap(*passed_dmabuf_fds);
607 free_output_buffers_.push_back(index);
608 if (decoder_state_ == kAwaitingPictureBuffers) {
609 DVLOG(1) << "Change state to kDecoding";
610 decoder_state_ = kDecoding;
611 }
612 if (decoder_state_ != kChangingResolution) {
613 Enqueue();
614 ScheduleDecodeBufferTaskIfNeeded();
615 }
616 }
497 } 617 }
498 618
499 void V4L2VideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_buffer_id) { 619 void V4L2VideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_buffer_id) {
500 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id; 620 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id;
501 // Must be run on child thread, as we'll insert a sync in the EGL context. 621 // Must be run on child thread, as we'll insert a sync in the EGL context.
502 DCHECK(child_task_runner_->BelongsToCurrentThread()); 622 DCHECK(child_task_runner_->BelongsToCurrentThread());
503 623
504 if (!make_context_current_cb_.Run()) { 624 std::unique_ptr<EGLSyncKHRRef> egl_sync_ref;
505 LOGF(ERROR) << "could not make context current";
506 NOTIFY_ERROR(PLATFORM_FAILURE);
507 return;
508 }
509 625
510 EGLSyncKHR egl_sync = EGL_NO_SYNC_KHR; 626 if (!make_context_current_cb_.is_null()) {
627 if (!make_context_current_cb_.Run()) {
628 LOGF(ERROR) << "could not make context current";
629 NOTIFY_ERROR(PLATFORM_FAILURE);
630 return;
631 }
632
633 EGLSyncKHR egl_sync = EGL_NO_SYNC_KHR;
511 // TODO(posciak): crbug.com/450898. 634 // TODO(posciak): crbug.com/450898.
512 #if defined(ARCH_CPU_ARMEL) 635 #if defined(ARCH_CPU_ARMEL)
513 egl_sync = eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL); 636 egl_sync = eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL);
514 if (egl_sync == EGL_NO_SYNC_KHR) { 637 if (egl_sync == EGL_NO_SYNC_KHR) {
515 LOGF(ERROR) << "eglCreateSyncKHR() failed"; 638 LOGF(ERROR) << "eglCreateSyncKHR() failed";
516 NOTIFY_ERROR(PLATFORM_FAILURE); 639 NOTIFY_ERROR(PLATFORM_FAILURE);
517 return; 640 return;
518 } 641 }
519 #endif 642 #endif
520 643
521 std::unique_ptr<EGLSyncKHRRef> egl_sync_ref( 644 egl_sync_ref.reset(new EGLSyncKHRRef(egl_display_, egl_sync));
522 new EGLSyncKHRRef(egl_display_, egl_sync)); 645 }
523 646
524 decoder_thread_.task_runner()->PostTask( 647 decoder_thread_.task_runner()->PostTask(
525 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::ReusePictureBufferTask, 648 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::ReusePictureBufferTask,
526 base::Unretained(this), picture_buffer_id, 649 base::Unretained(this), picture_buffer_id,
527 base::Passed(&egl_sync_ref))); 650 base::Passed(&egl_sync_ref)));
528 } 651 }
529 652
530 void V4L2VideoDecodeAccelerator::Flush() { 653 void V4L2VideoDecodeAccelerator::Flush() {
531 DVLOGF(3); 654 DVLOGF(3);
532 DCHECK(child_task_runner_->BelongsToCurrentThread()); 655 DCHECK(child_task_runner_->BelongsToCurrentThread());
(...skipping 663 matching lines...) Expand 10 before | Expand all | Expand 10 after
1196 if (errno == EAGAIN) { 1319 if (errno == EAGAIN) {
1197 // EAGAIN if we're just out of buffers to dequeue. 1320 // EAGAIN if we're just out of buffers to dequeue.
1198 break; 1321 break;
1199 } 1322 }
1200 PLOGF(ERROR) << "ioctl() failed: VIDIOC_DQBUF"; 1323 PLOGF(ERROR) << "ioctl() failed: VIDIOC_DQBUF";
1201 NOTIFY_ERROR(PLATFORM_FAILURE); 1324 NOTIFY_ERROR(PLATFORM_FAILURE);
1202 return; 1325 return;
1203 } 1326 }
1204 OutputRecord& output_record = output_buffer_map_[dqbuf.index]; 1327 OutputRecord& output_record = output_buffer_map_[dqbuf.index];
1205 DCHECK_EQ(output_record.state, kAtDevice); 1328 DCHECK_EQ(output_record.state, kAtDevice);
1206 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR); 1329 // DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
kcwu 2016/09/07 03:46:23 remove
1207 DCHECK_NE(output_record.picture_id, -1); 1330 DCHECK_NE(output_record.picture_id, -1);
1208 output_buffer_queued_count_--; 1331 output_buffer_queued_count_--;
1209 if (dqbuf.m.planes[0].bytesused == 0) { 1332 if (dqbuf.m.planes[0].bytesused == 0) {
1210 // This is an empty output buffer returned as part of a flush. 1333 // This is an empty output buffer returned as part of a flush.
1211 output_record.state = kFree; 1334 output_record.state = kFree;
1212 free_output_buffers_.push(dqbuf.index); 1335 free_output_buffers_.push_back(dqbuf.index);
1213 } else { 1336 } else {
1214 int32_t bitstream_buffer_id = dqbuf.timestamp.tv_sec; 1337 int32_t bitstream_buffer_id = dqbuf.timestamp.tv_sec;
1215 DCHECK_GE(bitstream_buffer_id, 0); 1338 DCHECK_GE(bitstream_buffer_id, 0);
1216 DVLOGF(3) << "Dequeue output buffer: dqbuf index=" << dqbuf.index 1339 DVLOGF(3) << "Dequeue output buffer: dqbuf index=" << dqbuf.index
1217 << " bitstream input_id=" << bitstream_buffer_id; 1340 << " bitstream input_id=" << bitstream_buffer_id;
1218 if (image_processor_device_) { 1341 if (image_processor_device_) {
1219 output_record.state = kAtProcessor; 1342 output_record.state = kAtProcessor;
1220 image_processor_bitstream_buffer_ids_.push(bitstream_buffer_id); 1343 image_processor_bitstream_buffer_ids_.push(bitstream_buffer_id);
1221 std::vector<int> fds; 1344 std::vector<int> processor_input_fds;
1222 for (auto& fd : output_record.fds) { 1345 for (auto& fd : output_record.processor_input_fds) {
1223 fds.push_back(fd.get()); 1346 processor_input_fds.push_back(fd.get());
1347 }
1348 std::vector<int> processor_output_fds;
1349 for (auto& fd : output_record.processor_output_fds) {
1350 processor_output_fds.push_back(fd.get());
1224 } 1351 }
1225 scoped_refptr<VideoFrame> frame = VideoFrame::WrapExternalDmabufs( 1352 scoped_refptr<VideoFrame> frame = VideoFrame::WrapExternalDmabufs(
1226 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_), 1353 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_),
1227 coded_size_, gfx::Rect(visible_size_), visible_size_, fds, 1354 coded_size_, gfx::Rect(visible_size_), visible_size_,
1228 base::TimeDelta()); 1355 processor_input_fds, base::TimeDelta());
1229 // Unretained is safe because |this| owns image processor and there will 1356 // Unretained is safe because |this| owns image processor and there will
1230 // be no callbacks after processor destroys. Also, this class ensures it 1357 // be no callbacks after processor destroys. Also, this class ensures it
1231 // is safe to post a task from child thread to decoder thread using 1358 // is safe to post a task from child thread to decoder thread using
1232 // Unretained. 1359 // Unretained.
1233 image_processor_->Process( 1360 image_processor_->Process(
1234 frame, dqbuf.index, 1361 frame, dqbuf.index, processor_output_fds,
1235 BindToCurrentLoop( 1362 BindToCurrentLoop(
1236 base::Bind(&V4L2VideoDecodeAccelerator::FrameProcessed, 1363 base::Bind(&V4L2VideoDecodeAccelerator::FrameProcessed,
1237 base::Unretained(this), bitstream_buffer_id))); 1364 base::Unretained(this), bitstream_buffer_id)));
1238 } else { 1365 } else {
1239 output_record.state = kAtClient; 1366 output_record.state = kAtClient;
1240 decoder_frames_at_client_++; 1367 decoder_frames_at_client_++;
1241 const Picture picture(output_record.picture_id, bitstream_buffer_id, 1368 const Picture picture(output_record.picture_id, bitstream_buffer_id,
1242 gfx::Rect(visible_size_), false); 1369 gfx::Rect(visible_size_), false);
1243 pending_picture_ready_.push( 1370 pending_picture_ready_.push(
1244 PictureRecord(output_record.cleared, picture)); 1371 PictureRecord(output_record.cleared, picture));
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
1280 } 1407 }
1281 1408
1282 bool V4L2VideoDecodeAccelerator::EnqueueOutputRecord() { 1409 bool V4L2VideoDecodeAccelerator::EnqueueOutputRecord() {
1283 DCHECK(!free_output_buffers_.empty()); 1410 DCHECK(!free_output_buffers_.empty());
1284 1411
1285 // Enqueue an output (VIDEO_CAPTURE) buffer. 1412 // Enqueue an output (VIDEO_CAPTURE) buffer.
1286 const int buffer = free_output_buffers_.front(); 1413 const int buffer = free_output_buffers_.front();
1287 DVLOGF(3) << "buffer " << buffer; 1414 DVLOGF(3) << "buffer " << buffer;
1288 OutputRecord& output_record = output_buffer_map_[buffer]; 1415 OutputRecord& output_record = output_buffer_map_[buffer];
1289 DCHECK_EQ(output_record.state, kFree); 1416 DCHECK_EQ(output_record.state, kFree);
1290 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
1291 DCHECK_NE(output_record.picture_id, -1); 1417 DCHECK_NE(output_record.picture_id, -1);
1292 if (output_record.egl_sync != EGL_NO_SYNC_KHR) { 1418 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1293 TRACE_EVENT0("Video Decoder", 1419 TRACE_EVENT0("Video Decoder",
1294 "V4L2VDA::EnqueueOutputRecord: eglClientWaitSyncKHR"); 1420 "V4L2VDA::EnqueueOutputRecord: eglClientWaitSyncKHR");
1295 // If we have to wait for completion, wait. Note that 1421 // If we have to wait for completion, wait. Note that
1296 // free_output_buffers_ is a FIFO queue, so we always wait on the 1422 // free_output_buffers_ is a FIFO queue, so we always wait on the
1297 // buffer that has been in the queue the longest. 1423 // buffer that has been in the queue the longest.
1298 if (eglClientWaitSyncKHR(egl_display_, output_record.egl_sync, 0, 1424 if (eglClientWaitSyncKHR(egl_display_, output_record.egl_sync, 0,
1299 EGL_FOREVER_KHR) == EGL_FALSE) { 1425 EGL_FOREVER_KHR) == EGL_FALSE) {
1300 // This will cause tearing, but is safe otherwise. 1426 // This will cause tearing, but is safe otherwise.
(...skipping 10 matching lines...) Expand all
1311 std::unique_ptr<struct v4l2_plane[]> qbuf_planes( 1437 std::unique_ptr<struct v4l2_plane[]> qbuf_planes(
1312 new v4l2_plane[output_planes_count_]); 1438 new v4l2_plane[output_planes_count_]);
1313 memset(&qbuf, 0, sizeof(qbuf)); 1439 memset(&qbuf, 0, sizeof(qbuf));
1314 memset(qbuf_planes.get(), 0, 1440 memset(qbuf_planes.get(), 0,
1315 sizeof(struct v4l2_plane) * output_planes_count_); 1441 sizeof(struct v4l2_plane) * output_planes_count_);
1316 qbuf.index = buffer; 1442 qbuf.index = buffer;
1317 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 1443 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1318 qbuf.memory = V4L2_MEMORY_MMAP; 1444 qbuf.memory = V4L2_MEMORY_MMAP;
1319 qbuf.m.planes = qbuf_planes.get(); 1445 qbuf.m.planes = qbuf_planes.get();
1320 qbuf.length = output_planes_count_; 1446 qbuf.length = output_planes_count_;
1447 DVLOG(2) << "qbuf.index=" << qbuf.index
1448 << ", output_mode_=" << (int)output_mode_
1449 << ", output_planes_count_=" << output_planes_count_;
1321 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); 1450 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
1322 free_output_buffers_.pop(); 1451 free_output_buffers_.pop_front();
1323 output_record.state = kAtDevice; 1452 output_record.state = kAtDevice;
1324 output_buffer_queued_count_++; 1453 output_buffer_queued_count_++;
1325 return true; 1454 return true;
1326 } 1455 }
1327 1456
1328 void V4L2VideoDecodeAccelerator::ReusePictureBufferTask( 1457 void V4L2VideoDecodeAccelerator::ReusePictureBufferTask(
1329 int32_t picture_buffer_id, 1458 int32_t picture_buffer_id,
1330 std::unique_ptr<EGLSyncKHRRef> egl_sync_ref) { 1459 std::unique_ptr<EGLSyncKHRRef> egl_sync_ref) {
1331 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id; 1460 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id;
1332 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1461 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
(...skipping 27 matching lines...) Expand all
1360 } 1489 }
1361 1490
1362 OutputRecord& output_record = output_buffer_map_[index]; 1491 OutputRecord& output_record = output_buffer_map_[index];
1363 if (output_record.state != kAtClient) { 1492 if (output_record.state != kAtClient) {
1364 LOGF(ERROR) << "picture_buffer_id not reusable"; 1493 LOGF(ERROR) << "picture_buffer_id not reusable";
1365 NOTIFY_ERROR(INVALID_ARGUMENT); 1494 NOTIFY_ERROR(INVALID_ARGUMENT);
1366 return; 1495 return;
1367 } 1496 }
1368 1497
1369 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); 1498 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1370 output_record.egl_sync = egl_sync_ref->egl_sync;
1371 output_record.state = kFree; 1499 output_record.state = kFree;
1372 free_output_buffers_.push(index); 1500 free_output_buffers_.push_back(index);
1373 decoder_frames_at_client_--; 1501 decoder_frames_at_client_--;
1374 // Take ownership of the EGLSync. 1502 if (egl_sync_ref) {
1375 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR; 1503 output_record.egl_sync = egl_sync_ref->egl_sync;
1504 // Take ownership of the EGLSync.
1505 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR;
1506 }
1376 // We got a buffer back, so enqueue it back. 1507 // We got a buffer back, so enqueue it back.
1377 Enqueue(); 1508 Enqueue();
1378 } 1509 }
1379 1510
1380 void V4L2VideoDecodeAccelerator::FlushTask() { 1511 void V4L2VideoDecodeAccelerator::FlushTask() {
1381 DVLOGF(3); 1512 DVLOGF(3);
1382 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1513 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1383 TRACE_EVENT0("Video Decoder", "V4L2VDA::FlushTask"); 1514 TRACE_EVENT0("Video Decoder", "V4L2VDA::FlushTask");
1384 1515
1385 // Flush outstanding buffers. 1516 // Flush outstanding buffers.
(...skipping 245 matching lines...) Expand 10 before | Expand all | Expand 10 after
1631 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type); 1762 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
1632 output_streamon_ = false; 1763 output_streamon_ = false;
1633 1764
1634 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { 1765 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1635 // After streamoff, the device drops ownership of all buffers, even if we 1766 // After streamoff, the device drops ownership of all buffers, even if we
1636 // don't dequeue them explicitly. Some of them may still be owned by the 1767 // don't dequeue them explicitly. Some of them may still be owned by the
1637 // client however. Reuse only those that aren't. 1768 // client however. Reuse only those that aren't.
1638 OutputRecord& output_record = output_buffer_map_[i]; 1769 OutputRecord& output_record = output_buffer_map_[i];
1639 if (output_record.state == kAtDevice) { 1770 if (output_record.state == kAtDevice) {
1640 output_record.state = kFree; 1771 output_record.state = kFree;
1641 free_output_buffers_.push(i); 1772 free_output_buffers_.push_back(i);
1642 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); 1773 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1643 } 1774 }
1644 } 1775 }
1645 output_buffer_queued_count_ = 0; 1776 output_buffer_queued_count_ = 0;
1646 return true; 1777 return true;
1647 } 1778 }
1648 1779
1649 bool V4L2VideoDecodeAccelerator::StopInputStream() { 1780 bool V4L2VideoDecodeAccelerator::StopInputStream() {
1650 DVLOGF(3); 1781 DVLOGF(3);
1651 if (!input_streamon_) 1782 if (!input_streamon_)
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
1701 void V4L2VideoDecodeAccelerator::FinishResolutionChange() { 1832 void V4L2VideoDecodeAccelerator::FinishResolutionChange() {
1702 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 1833 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1703 DCHECK_EQ(decoder_state_, kChangingResolution); 1834 DCHECK_EQ(decoder_state_, kChangingResolution);
1704 DVLOGF(3); 1835 DVLOGF(3);
1705 1836
1706 if (decoder_state_ == kError) { 1837 if (decoder_state_ == kError) {
1707 DVLOGF(2) << "early out: kError state"; 1838 DVLOGF(2) << "early out: kError state";
1708 return; 1839 return;
1709 } 1840 }
1710 1841
1842 DestroyOutputBuffers();
1843
1711 struct v4l2_format format; 1844 struct v4l2_format format;
1712 bool again; 1845 bool again;
1713 gfx::Size visible_size; 1846 gfx::Size visible_size;
1714 bool ret = GetFormatInfo(&format, &visible_size, &again); 1847 bool ret = GetFormatInfo(&format, &visible_size, &again);
1715 if (!ret || again) { 1848 if (!ret || again) {
1716 LOGF(ERROR) << "Couldn't get format information after resolution change"; 1849 LOGF(ERROR) << "Couldn't get format information after resolution change";
1717 NOTIFY_ERROR(PLATFORM_FAILURE); 1850 NOTIFY_ERROR(PLATFORM_FAILURE);
1718 return; 1851 return;
1719 } 1852 }
1720 1853
(...skipping 275 matching lines...) Expand 10 before | Expand all | Expand 10 after
1996 LOGF(ERROR) << "Can't find a usable input format from image processor"; 2129 LOGF(ERROR) << "Can't find a usable input format from image processor";
1997 return false; 2130 return false;
1998 } 2131 }
1999 egl_image_format_fourcc_ = FindImageProcessorOutputFormat(); 2132 egl_image_format_fourcc_ = FindImageProcessorOutputFormat();
2000 if (egl_image_format_fourcc_ == 0) { 2133 if (egl_image_format_fourcc_ == 0) {
2001 LOGF(ERROR) << "Can't find a usable output format from image processor"; 2134 LOGF(ERROR) << "Can't find a usable output format from image processor";
2002 return false; 2135 return false;
2003 } 2136 }
2004 egl_image_device_ = image_processor_device_; 2137 egl_image_device_ = image_processor_device_;
2005 } else { 2138 } else {
2139 if (output_mode_ == Config::OutputMode::IMPORT) {
2140 LOGF(ERROR) << "Import mode is unsupported without image processor.";
2141 return false;
2142 }
2006 egl_image_format_fourcc_ = output_format_fourcc_; 2143 egl_image_format_fourcc_ = output_format_fourcc_;
2007 egl_image_device_ = device_; 2144 egl_image_device_ = device_;
2008 } 2145 }
2009 DVLOGF(2) << "Output format=" << output_format_fourcc_; 2146 DVLOGF(2) << "Output format=" << output_format_fourcc_;
2010 2147
2011 // Just set the fourcc for output; resolution, etc., will come from the 2148 // Just set the fourcc for output; resolution, etc., will come from the
2012 // driver once it extracts it from the stream. 2149 // driver once it extracts it from the stream.
2013 memset(&format, 0, sizeof(format)); 2150 memset(&format, 0, sizeof(format));
2014 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 2151 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2015 format.fmt.pix_mp.pixelformat = output_format_fourcc_; 2152 format.fmt.pix_mp.pixelformat = output_format_fourcc_;
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
2062 for (uint32_t processor_output_format : processor_output_formats) { 2199 for (uint32_t processor_output_format : processor_output_formats) {
2063 if (device_->CanCreateEGLImageFrom(processor_output_format)) { 2200 if (device_->CanCreateEGLImageFrom(processor_output_format)) {
2064 DVLOGF(1) << "Image processor output format=" << processor_output_format; 2201 DVLOGF(1) << "Image processor output format=" << processor_output_format;
2065 return processor_output_format; 2202 return processor_output_format;
2066 } 2203 }
2067 } 2204 }
2068 2205
2069 return 0; 2206 return 0;
2070 } 2207 }
2071 2208
2209 void V4L2VideoDecodeAccelerator::CreateImageProcessor() {
2210 DVLOGF(3);
2211 image_processor_.reset(new V4L2ImageProcessor(image_processor_device_));
2212 v4l2_memory output_memory_type =
2213 (output_mode_ == Config::OutputMode::ALLOCATE ? V4L2_MEMORY_MMAP
2214 : V4L2_MEMORY_DMABUF);
2215 // Unretained is safe because |this| owns image processor and there will be
2216 // no callbacks after processor destroys.
2217 if (!image_processor_->Initialize(
2218 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_),
2219 V4L2Device::V4L2PixFmtToVideoPixelFormat(egl_image_format_fourcc_),
2220 V4L2_MEMORY_DMABUF, output_memory_type, visible_size_, coded_size_,
2221 visible_size_, egl_image_size_, output_buffer_map_.size(),
2222 base::Bind(&V4L2VideoDecodeAccelerator::ImageProcessorError,
2223 base::Unretained(this)))) {
2224 LOGF(ERROR) << "Initialize image processor failed";
2225 NOTIFY_ERROR(PLATFORM_FAILURE);
2226 return;
2227 }
2228 DCHECK(image_processor_->output_allocated_size() == egl_image_size_);
2229 DVLOGF(3) << "image_processor_->output_allocated_size()="
2230 << image_processor_->output_allocated_size().ToString();
2231 if (image_processor_->input_allocated_size() != coded_size_) {
2232 LOGF(ERROR) << "Image processor should be able to take the output coded "
2233 << "size of decoder " << coded_size_.ToString()
2234 << " without adjusting to "
2235 << image_processor_->input_allocated_size().ToString();
2236 NOTIFY_ERROR(PLATFORM_FAILURE);
2237 return;
2238 }
2239 }
2240
2241 void V4L2VideoDecodeAccelerator::FrameProcessed(int32_t bitstream_buffer_id,
2242 int output_buffer_index) {
2243 DVLOGF(3) << "output_buffer_index=" << output_buffer_index
2244 << ", bitstream_buffer_id=" << bitstream_buffer_id;
2245 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
2246 DCHECK_GE(output_buffer_index, 0);
2247 DCHECK_LT(output_buffer_index, static_cast<int>(output_buffer_map_.size()));
2248
2249 OutputRecord& output_record = output_buffer_map_[output_buffer_index];
2250 DCHECK_EQ(output_record.state, kAtProcessor);
2251 if (!image_processor_bitstream_buffer_ids_.empty() &&
2252 image_processor_bitstream_buffer_ids_.front() == bitstream_buffer_id) {
2253 DVLOGF(3) << "picture_id=" << output_record.picture_id;
2254 DCHECK_NE(output_record.picture_id, -1);
2255 // Send the processed frame to render.
2256 output_record.state = kAtClient;
2257 decoder_frames_at_client_++;
2258 image_processor_bitstream_buffer_ids_.pop();
2259 const Picture picture(output_record.picture_id, bitstream_buffer_id,
2260 gfx::Rect(visible_size_), false);
2261 pending_picture_ready_.push(PictureRecord(output_record.cleared, picture));
2262 SendPictureReady();
2263 output_record.cleared = true;
2264 // Flush or resolution change may be waiting image processor to finish.
2265 if (image_processor_bitstream_buffer_ids_.empty()) {
2266 NotifyFlushDoneIfNeeded();
2267 if (decoder_state_ == kChangingResolution)
2268 StartResolutionChange();
2269 }
2270 } else {
2271 DVLOGF(2) << "Bitstream buffer id " << bitstream_buffer_id << " not found "
2272 << "because of Reset. Drop the buffer";
2273 output_record.state = kFree;
2274 free_output_buffers_.push_back(output_buffer_index);
2275 // Do not queue the buffer if a resolution change is in progress. The queue
2276 // is about to be destroyed anyway. Otherwise, the queue will be started in
2277 // Enqueue and REQBUFS(0) will fail.
2278 if (decoder_state_ != kChangingResolution)
2279 Enqueue();
2280 }
2281 }
2282
2283 void V4L2VideoDecodeAccelerator::ImageProcessorError() {
2284 LOGF(ERROR) << "Image processor error";
2285 NOTIFY_ERROR(PLATFORM_FAILURE);
2286 }
2287
2072 bool V4L2VideoDecodeAccelerator::CreateOutputBuffers() { 2288 bool V4L2VideoDecodeAccelerator::CreateOutputBuffers() {
2073 DVLOGF(3); 2289 DVLOGF(3);
2074 DCHECK(decoder_state_ == kInitialized || 2290 DCHECK(decoder_state_ == kInitialized ||
2075 decoder_state_ == kChangingResolution); 2291 decoder_state_ == kChangingResolution);
2076 DCHECK(!output_streamon_); 2292 DCHECK(!output_streamon_);
2077 DCHECK(output_buffer_map_.empty()); 2293 DCHECK(output_buffer_map_.empty());
2078 2294
2079 // Number of output buffers we need. 2295 // Number of output buffers we need.
2080 struct v4l2_control ctrl; 2296 struct v4l2_control ctrl;
2081 memset(&ctrl, 0, sizeof(ctrl)); 2297 memset(&ctrl, 0, sizeof(ctrl));
2082 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE; 2298 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
2083 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_CTRL, &ctrl); 2299 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_CTRL, &ctrl);
2084 output_dpb_size_ = ctrl.value; 2300 output_dpb_size_ = ctrl.value;
2085 2301
2086 // Output format setup in Initialize(). 2302 // Output format setup in Initialize().
2087 2303
2088 const uint32_t buffer_count = output_dpb_size_ + kDpbOutputBufferExtraCount; 2304 const uint32_t buffer_count = output_dpb_size_ + kDpbOutputBufferExtraCount;
2089 DVLOGF(3) << "buffer_count=" << buffer_count 2305 DVLOGF(3) << "buffer_count=" << buffer_count
2090 << ", coded_size=" << egl_image_size_.ToString(); 2306 << ", coded_size=" << egl_image_size_.ToString();
2091 2307
2308 // With ALLOCATE mode the client can sample it as RGB and doesn't need to
2309 // know the precise format.
2310 VideoPixelFormat pixel_format =
2311 (output_mode_ == Config::OutputMode::IMPORT)
2312 ? V4L2Device::V4L2PixFmtToVideoPixelFormat(egl_image_format_fourcc_)
2313 : PIXEL_FORMAT_UNKNOWN;
2314
2092 child_task_runner_->PostTask( 2315 child_task_runner_->PostTask(
2093 FROM_HERE, base::Bind(&Client::ProvidePictureBuffers, client_, 2316 FROM_HERE, base::Bind(&Client::ProvidePictureBuffers, client_,
2094 buffer_count, PIXEL_FORMAT_UNKNOWN, 1, 2317 buffer_count, pixel_format, 1, egl_image_size_,
2095 egl_image_size_, device_->GetTextureTarget())); 2318 device_->GetTextureTarget()));
2096 2319
2097 // Go into kAwaitingPictureBuffers to prevent us from doing any more decoding 2320 // Go into kAwaitingPictureBuffers to prevent us from doing any more decoding
2098 // or event handling while we are waiting for AssignPictureBuffers(). Not 2321 // or event handling while we are waiting for AssignPictureBuffers(). Not
2099 // having Pictures available would not have prevented us from making decoding 2322 // having Pictures available would not have prevented us from making decoding
2100 // progress entirely e.g. in the case of H.264 where we could further decode 2323 // progress entirely e.g. in the case of H.264 where we could further decode
2101 // non-slice NALUs and could even get another resolution change before we were 2324 // non-slice NALUs and could even get another resolution change before we were
2102 // done with this one. After we get the buffers, we'll go back into kIdle and 2325 // done with this one. After we get the buffers, we'll go back into kIdle and
2103 // kick off further event processing, and eventually go back into kDecoding 2326 // kick off further event processing, and eventually go back into kDecoding
2104 // once no more events are pending (if any). 2327 // once no more events are pending (if any).
2105 decoder_state_ = kAwaitingPictureBuffers; 2328 decoder_state_ = kAwaitingPictureBuffers;
(...skipping 18 matching lines...) Expand all
2124 reqbufs.count = 0; 2347 reqbufs.count = 0;
2125 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 2348 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2126 reqbufs.memory = V4L2_MEMORY_MMAP; 2349 reqbufs.memory = V4L2_MEMORY_MMAP;
2127 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); 2350 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
2128 2351
2129 input_buffer_map_.clear(); 2352 input_buffer_map_.clear();
2130 free_input_buffers_.clear(); 2353 free_input_buffers_.clear();
2131 } 2354 }
2132 2355
2133 bool V4L2VideoDecodeAccelerator::DestroyOutputBuffers() { 2356 bool V4L2VideoDecodeAccelerator::DestroyOutputBuffers() {
2357 struct v4l2_requestbuffers reqbufs;
2358 memset(&reqbufs, 0, sizeof(reqbufs));
2359 reqbufs.count = 0;
2360 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2361 reqbufs.memory = V4L2_MEMORY_MMAP;
2362 if (device_->Ioctl(VIDIOC_REQBUFS, &reqbufs) != 0) {
2363 PLOGF(ERROR) << "ioctl() failed: VIDIOC_REQBUFS";
2364 NOTIFY_ERROR(PLATFORM_FAILURE);
2365 return false;
2366 }
2367
2368 output_buffer_map_.clear();
2369 while (!free_output_buffers_.empty())
2370 free_output_buffers_.pop_front();
2371 output_buffer_queued_count_ = 0;
2372 // The client may still hold some buffers. The texture holds a reference to
2373 // the buffer. It is OK to free the buffer and destroy EGLImage here.
2374 decoder_frames_at_client_ = 0;
2375 return true;
2376 }
2377
2378 bool V4L2VideoDecodeAccelerator::DestroyEGLImages() {
2134 DVLOGF(3); 2379 DVLOGF(3);
2135 DCHECK(child_task_runner_->BelongsToCurrentThread()); 2380 DCHECK(child_task_runner_->BelongsToCurrentThread());
2136 DCHECK(!output_streamon_); 2381 DCHECK(!output_streamon_);
2137 bool success = true; 2382 bool success = true;
2138 2383
2139 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { 2384 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
2140 OutputRecord& output_record = output_buffer_map_[i]; 2385 OutputRecord& output_record = output_buffer_map_[i];
2141 2386
2142 if (output_record.egl_image != EGL_NO_IMAGE_KHR) { 2387 if (output_record.egl_image != EGL_NO_IMAGE_KHR) {
2143 if (egl_image_device_->DestroyEGLImage( 2388 if (egl_image_device_->DestroyEGLImage(
2144 egl_display_, output_record.egl_image) != EGL_TRUE) { 2389 egl_display_, output_record.egl_image) != EGL_TRUE) {
2145 DVLOGF(1) << "DestroyEGLImage failed."; 2390 DVLOGF(1) << "DestroyEGLImage failed.";
2146 success = false; 2391 success = false;
2147 } 2392 }
2148 } 2393 }
2149 2394
2150 if (output_record.egl_sync != EGL_NO_SYNC_KHR) { 2395 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
2151 if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) { 2396 if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) {
2152 DVLOGF(1) << "eglDestroySyncKHR failed."; 2397 DVLOGF(1) << "eglDestroySyncKHR failed.";
2153 success = false; 2398 success = false;
2154 } 2399 }
2155 } 2400 }
2156 2401
2157 DVLOGF(1) << "dismissing PictureBuffer id=" << output_record.picture_id; 2402 DVLOGF(1) << "dismissing PictureBuffer id=" << output_record.picture_id;
2158 child_task_runner_->PostTask( 2403 child_task_runner_->PostTask(
2159 FROM_HERE, base::Bind(&Client::DismissPictureBuffer, client_, 2404 FROM_HERE, base::Bind(&Client::DismissPictureBuffer, client_,
2160 output_record.picture_id)); 2405 output_record.picture_id));
2161 } 2406 }
2162 2407
2163 struct v4l2_requestbuffers reqbufs;
2164 memset(&reqbufs, 0, sizeof(reqbufs));
2165 reqbufs.count = 0;
2166 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2167 reqbufs.memory = V4L2_MEMORY_MMAP;
2168 if (device_->Ioctl(VIDIOC_REQBUFS, &reqbufs) != 0) {
2169 PLOGF(ERROR) << "ioctl() failed: VIDIOC_REQBUFS";
2170 success = false;
2171 }
2172
2173 output_buffer_map_.clear();
2174 while (!free_output_buffers_.empty())
2175 free_output_buffers_.pop();
2176 output_buffer_queued_count_ = 0;
2177 // The client may still hold some buffers. The texture holds a reference to
2178 // the buffer. It is OK to free the buffer and destroy EGLImage here.
2179 decoder_frames_at_client_ = 0;
2180
2181 return success; 2408 return success;
2182 } 2409 }
2183 2410
2184 void V4L2VideoDecodeAccelerator::ResolutionChangeDestroyBuffers() { 2411 void V4L2VideoDecodeAccelerator::ResolutionChangeDestroyBuffers() {
2185 DCHECK(child_task_runner_->BelongsToCurrentThread()); 2412 DCHECK(child_task_runner_->BelongsToCurrentThread());
2186 DVLOGF(3); 2413 DVLOGF(3);
2187 2414
2188 if (!DestroyOutputBuffers()) { 2415 if (!DestroyEGLImages()) {
2189 LOGF(ERROR) << "Failed destroying output buffers."; 2416 LOGF(ERROR) << "Failed destroying output buffers.";
2190 NOTIFY_ERROR(PLATFORM_FAILURE); 2417 NOTIFY_ERROR(PLATFORM_FAILURE);
2191 return; 2418 return;
2192 } 2419 }
2193 2420
2194 // Finish resolution change on decoder thread. 2421 // Finish resolution change on decoder thread.
2195 decoder_thread_.task_runner()->PostTask( 2422 decoder_thread_.task_runner()->PostTask(
2196 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::FinishResolutionChange, 2423 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::FinishResolutionChange,
2197 base::Unretained(this))); 2424 base::Unretained(this)));
2198 } 2425 }
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
2240 } 2467 }
2241 2468
2242 void V4L2VideoDecodeAccelerator::PictureCleared() { 2469 void V4L2VideoDecodeAccelerator::PictureCleared() {
2243 DVLOGF(3) << "clearing count=" << picture_clearing_count_; 2470 DVLOGF(3) << "clearing count=" << picture_clearing_count_;
2244 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 2471 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
2245 DCHECK_GT(picture_clearing_count_, 0); 2472 DCHECK_GT(picture_clearing_count_, 0);
2246 picture_clearing_count_--; 2473 picture_clearing_count_--;
2247 SendPictureReady(); 2474 SendPictureReady();
2248 } 2475 }
2249 2476
2250 void V4L2VideoDecodeAccelerator::FrameProcessed(int32_t bitstream_buffer_id,
2251 int output_buffer_index) {
2252 DVLOGF(3) << "output_buffer_index=" << output_buffer_index
2253 << ", bitstream_buffer_id=" << bitstream_buffer_id;
2254 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
2255 DCHECK_GE(output_buffer_index, 0);
2256 DCHECK_LT(output_buffer_index, static_cast<int>(output_buffer_map_.size()));
2257
2258 OutputRecord& output_record = output_buffer_map_[output_buffer_index];
2259 DCHECK_EQ(output_record.state, kAtProcessor);
2260 if (!image_processor_bitstream_buffer_ids_.empty() &&
2261 image_processor_bitstream_buffer_ids_.front() == bitstream_buffer_id) {
2262 DVLOGF(3) << "picture_id=" << output_record.picture_id;
2263 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
2264 DCHECK_NE(output_record.picture_id, -1);
2265 // Send the processed frame to render.
2266 output_record.state = kAtClient;
2267 decoder_frames_at_client_++;
2268 image_processor_bitstream_buffer_ids_.pop();
2269 const Picture picture(output_record.picture_id, bitstream_buffer_id,
2270 gfx::Rect(visible_size_), false);
2271 pending_picture_ready_.push(PictureRecord(output_record.cleared, picture));
2272 SendPictureReady();
2273 output_record.cleared = true;
2274 // Flush or resolution change may be waiting image processor to finish.
2275 if (image_processor_bitstream_buffer_ids_.empty()) {
2276 NotifyFlushDoneIfNeeded();
2277 if (decoder_state_ == kChangingResolution)
2278 StartResolutionChange();
2279 }
2280 } else {
2281 DVLOGF(2) << "Bitstream buffer id " << bitstream_buffer_id << " not found "
2282 << "because of Reset. Drop the buffer";
2283 output_record.state = kFree;
2284 free_output_buffers_.push(output_buffer_index);
2285 // Do not queue the buffer if a resolution change is in progress. The queue
2286 // is about to be destroyed anyway. Otherwise, the queue will be started in
2287 // Enqueue and REQBUFS(0) will fail.
2288 if (decoder_state_ != kChangingResolution)
2289 Enqueue();
2290 }
2291 }
2292
2293 void V4L2VideoDecodeAccelerator::ImageProcessorError() {
2294 LOGF(ERROR) << "Image processor error";
2295 NOTIFY_ERROR(PLATFORM_FAILURE);
2296 }
2297
2298 } // namespace media 2477 } // namespace media
OLDNEW
« no previous file with comments | « media/gpu/v4l2_video_decode_accelerator.h ('k') | media/gpu/v4l2_video_encode_accelerator.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698