OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/gpu/v4l2_video_decode_accelerator.h" | 5 #include "media/gpu/v4l2_video_decode_accelerator.h" |
6 | 6 |
7 #include <dlfcn.h> | 7 #include <dlfcn.h> |
8 #include <errno.h> | 8 #include <errno.h> |
9 #include <fcntl.h> | 9 #include <fcntl.h> |
10 #include <linux/videodev2.h> | 10 #include <linux/videodev2.h> |
11 #include <poll.h> | 11 #include <poll.h> |
12 #include <string.h> | 12 #include <string.h> |
13 #include <sys/eventfd.h> | 13 #include <sys/eventfd.h> |
14 #include <sys/ioctl.h> | 14 #include <sys/ioctl.h> |
15 #include <sys/mman.h> | 15 #include <sys/mman.h> |
16 | 16 |
17 #include "base/bind.h" | 17 #include "base/bind.h" |
18 #include "base/command_line.h" | 18 #include "base/command_line.h" |
19 #include "base/message_loop/message_loop.h" | 19 #include "base/message_loop/message_loop.h" |
20 #include "base/numerics/safe_conversions.h" | 20 #include "base/numerics/safe_conversions.h" |
| 21 #include "base/posix/eintr_wrapper.h" |
21 #include "base/single_thread_task_runner.h" | 22 #include "base/single_thread_task_runner.h" |
22 #include "base/threading/thread_task_runner_handle.h" | 23 #include "base/threading/thread_task_runner_handle.h" |
23 #include "base/trace_event/trace_event.h" | 24 #include "base/trace_event/trace_event.h" |
24 #include "build/build_config.h" | 25 #include "build/build_config.h" |
25 #include "media/base/media_switches.h" | 26 #include "media/base/media_switches.h" |
26 #include "media/filters/h264_parser.h" | 27 #include "media/filters/h264_parser.h" |
27 #include "media/gpu/shared_memory_region.h" | 28 #include "media/gpu/shared_memory_region.h" |
28 #include "ui/gfx/geometry/rect.h" | 29 #include "ui/gfx/geometry/rect.h" |
29 #include "ui/gl/gl_context.h" | 30 #include "ui/gl/gl_context.h" |
30 #include "ui/gl/scoped_binders.h" | 31 #include "ui/gl/scoped_binders.h" |
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
130 V4L2VideoDecodeAccelerator::InputRecord::InputRecord() | 131 V4L2VideoDecodeAccelerator::InputRecord::InputRecord() |
131 : at_device(false), address(NULL), length(0), bytes_used(0), input_id(-1) {} | 132 : at_device(false), address(NULL), length(0), bytes_used(0), input_id(-1) {} |
132 | 133 |
133 V4L2VideoDecodeAccelerator::InputRecord::~InputRecord() {} | 134 V4L2VideoDecodeAccelerator::InputRecord::~InputRecord() {} |
134 | 135 |
135 V4L2VideoDecodeAccelerator::OutputRecord::OutputRecord() | 136 V4L2VideoDecodeAccelerator::OutputRecord::OutputRecord() |
136 : state(kFree), | 137 : state(kFree), |
137 egl_image(EGL_NO_IMAGE_KHR), | 138 egl_image(EGL_NO_IMAGE_KHR), |
138 egl_sync(EGL_NO_SYNC_KHR), | 139 egl_sync(EGL_NO_SYNC_KHR), |
139 picture_id(-1), | 140 picture_id(-1), |
| 141 texture_id(0), |
140 cleared(false) {} | 142 cleared(false) {} |
141 | 143 |
142 V4L2VideoDecodeAccelerator::OutputRecord::~OutputRecord() {} | 144 V4L2VideoDecodeAccelerator::OutputRecord::~OutputRecord() {} |
143 | 145 |
144 V4L2VideoDecodeAccelerator::PictureRecord::PictureRecord(bool cleared, | 146 V4L2VideoDecodeAccelerator::PictureRecord::PictureRecord(bool cleared, |
145 const Picture& picture) | 147 const Picture& picture) |
146 : cleared(cleared), picture(picture) {} | 148 : cleared(cleared), picture(picture) {} |
147 | 149 |
148 V4L2VideoDecodeAccelerator::PictureRecord::~PictureRecord() {} | 150 V4L2VideoDecodeAccelerator::PictureRecord::~PictureRecord() {} |
149 | 151 |
150 V4L2VideoDecodeAccelerator::V4L2VideoDecodeAccelerator( | 152 V4L2VideoDecodeAccelerator::V4L2VideoDecodeAccelerator( |
151 EGLDisplay egl_display, | 153 EGLDisplay egl_display, |
152 const GetGLContextCallback& get_gl_context_cb, | 154 const GetGLContextCallback& get_gl_context_cb, |
153 const MakeGLContextCurrentCallback& make_context_current_cb, | 155 const MakeGLContextCurrentCallback& make_context_current_cb, |
154 const scoped_refptr<V4L2Device>& device) | 156 const scoped_refptr<V4L2Device>& device) |
155 : child_task_runner_(base::ThreadTaskRunnerHandle::Get()), | 157 : child_task_runner_(base::ThreadTaskRunnerHandle::Get()), |
156 decoder_thread_("V4L2DecoderThread"), | 158 decoder_thread_("V4L2DecoderThread"), |
157 decoder_state_(kUninitialized), | 159 decoder_state_(kUninitialized), |
| 160 output_mode_(Config::OutputMode::ALLOCATE), |
158 device_(device), | 161 device_(device), |
159 decoder_delay_bitstream_buffer_id_(-1), | 162 decoder_delay_bitstream_buffer_id_(-1), |
160 decoder_current_input_buffer_(-1), | 163 decoder_current_input_buffer_(-1), |
161 decoder_decode_buffer_tasks_scheduled_(0), | 164 decoder_decode_buffer_tasks_scheduled_(0), |
162 decoder_frames_at_client_(0), | 165 decoder_frames_at_client_(0), |
163 decoder_flushing_(false), | 166 decoder_flushing_(false), |
164 reset_pending_(false), | 167 reset_pending_(false), |
165 decoder_partial_frame_pending_(false), | 168 decoder_partial_frame_pending_(false), |
166 input_streamon_(false), | 169 input_streamon_(false), |
167 input_buffer_queued_count_(0), | 170 input_buffer_queued_count_(0), |
(...skipping 19 matching lines...) Expand all Loading... |
187 DCHECK(!device_poll_thread_.IsRunning()); | 190 DCHECK(!device_poll_thread_.IsRunning()); |
188 | 191 |
189 // These maps have members that should be manually destroyed, e.g. file | 192 // These maps have members that should be manually destroyed, e.g. file |
190 // descriptors, mmap() segments, etc. | 193 // descriptors, mmap() segments, etc. |
191 DCHECK(input_buffer_map_.empty()); | 194 DCHECK(input_buffer_map_.empty()); |
192 DCHECK(output_buffer_map_.empty()); | 195 DCHECK(output_buffer_map_.empty()); |
193 } | 196 } |
194 | 197 |
195 bool V4L2VideoDecodeAccelerator::Initialize(const Config& config, | 198 bool V4L2VideoDecodeAccelerator::Initialize(const Config& config, |
196 Client* client) { | 199 Client* client) { |
197 DVLOGF(3) << "profile: " << config.profile; | 200 DVLOGF(3) << "profile: " << config.profile |
| 201 << ", output_mode=" << static_cast<int>(config.output_mode); |
198 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 202 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
199 DCHECK_EQ(decoder_state_, kUninitialized); | 203 DCHECK_EQ(decoder_state_, kUninitialized); |
200 | 204 |
201 if (!device_->SupportsDecodeProfileForV4L2PixelFormats( | 205 if (!device_->SupportsDecodeProfileForV4L2PixelFormats( |
202 config.profile, arraysize(supported_input_fourccs_), | 206 config.profile, arraysize(supported_input_fourccs_), |
203 supported_input_fourccs_)) { | 207 supported_input_fourccs_)) { |
204 DVLOGF(1) << "unsupported profile=" << config.profile; | 208 DVLOGF(1) << "unsupported profile=" << config.profile; |
205 return false; | 209 return false; |
206 } | 210 } |
207 | 211 |
208 if (config.is_encrypted) { | 212 if (config.is_encrypted) { |
209 NOTREACHED() << "Encrypted streams are not supported for this VDA"; | 213 NOTREACHED() << "Encrypted streams are not supported for this VDA"; |
210 return false; | 214 return false; |
211 } | 215 } |
212 | 216 |
213 if (config.output_mode != Config::OutputMode::ALLOCATE) { | 217 if (config.output_mode != Config::OutputMode::ALLOCATE && |
214 NOTREACHED() << "Only ALLOCATE OutputMode is supported by this VDA"; | 218 config.output_mode != Config::OutputMode::IMPORT) { |
| 219 NOTREACHED() << "Only ALLOCATE and IMPORT OutputModes are supported"; |
215 return false; | 220 return false; |
216 } | 221 } |
217 | 222 |
218 if (get_gl_context_cb_.is_null() || make_context_current_cb_.is_null()) { | |
219 NOTREACHED() << "GL callbacks are required for this VDA"; | |
220 return false; | |
221 } | |
222 | |
223 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); | 223 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); |
224 client_ = client_ptr_factory_->GetWeakPtr(); | 224 client_ = client_ptr_factory_->GetWeakPtr(); |
225 // If we haven't been set up to decode on separate thread via | 225 // If we haven't been set up to decode on separate thread via |
226 // TryToSetupDecodeOnSeparateThread(), use the main thread/client for | 226 // TryToSetupDecodeOnSeparateThread(), use the main thread/client for |
227 // decode tasks. | 227 // decode tasks. |
228 if (!decode_task_runner_) { | 228 if (!decode_task_runner_) { |
229 decode_task_runner_ = child_task_runner_; | 229 decode_task_runner_ = child_task_runner_; |
230 DCHECK(!decode_client_); | 230 DCHECK(!decode_client_); |
231 decode_client_ = client_; | 231 decode_client_ = client_; |
232 } | 232 } |
233 | 233 |
234 video_profile_ = config.profile; | 234 video_profile_ = config.profile; |
235 | 235 |
236 if (egl_display_ == EGL_NO_DISPLAY) { | 236 if (egl_display_ == EGL_NO_DISPLAY) { |
237 LOGF(ERROR) << "could not get EGLDisplay"; | 237 LOGF(ERROR) << "could not get EGLDisplay"; |
238 return false; | 238 return false; |
239 } | 239 } |
240 | 240 |
241 // We need the context to be initialized to query extensions. | 241 // We need the context to be initialized to query extensions. |
242 if (!make_context_current_cb_.Run()) { | 242 if (!make_context_current_cb_.is_null()) { |
243 LOGF(ERROR) << "could not make context current"; | 243 if (!make_context_current_cb_.Run()) { |
244 return false; | 244 LOGF(ERROR) << "could not make context current"; |
245 } | 245 return false; |
| 246 } |
246 | 247 |
247 // TODO(posciak): crbug.com/450898. | 248 // TODO(posciak): crbug.com/450898. |
248 #if defined(ARCH_CPU_ARMEL) | 249 #if defined(ARCH_CPU_ARMEL) |
249 if (!gl::g_driver_egl.ext.b_EGL_KHR_fence_sync) { | 250 if (!gl::g_driver_egl.ext.b_EGL_KHR_fence_sync) { |
250 LOGF(ERROR) << "context does not have EGL_KHR_fence_sync"; | 251 LOGF(ERROR) << "context does not have EGL_KHR_fence_sync"; |
251 return false; | 252 return false; |
| 253 } |
| 254 #endif |
| 255 } else { |
| 256 DVLOGF(1) << "No GL callbacks provided, initializing without GL support"; |
252 } | 257 } |
253 #endif | |
254 | 258 |
255 // Capabilities check. | 259 // Capabilities check. |
256 struct v4l2_capability caps; | 260 struct v4l2_capability caps; |
257 const __u32 kCapsRequired = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING; | 261 const __u32 kCapsRequired = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING; |
258 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps); | 262 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps); |
259 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { | 263 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { |
260 LOGF(ERROR) << "ioctl() failed: VIDIOC_QUERYCAP" | 264 LOGF(ERROR) << "ioctl() failed: VIDIOC_QUERYCAP" |
261 << ", caps check failed: 0x" << std::hex << caps.capabilities; | 265 << ", caps check failed: 0x" << std::hex << caps.capabilities; |
262 return false; | 266 return false; |
263 } | 267 } |
(...skipping 13 matching lines...) Expand all Loading... |
277 | 281 |
278 if (!CreateInputBuffers()) | 282 if (!CreateInputBuffers()) |
279 return false; | 283 return false; |
280 | 284 |
281 if (!decoder_thread_.Start()) { | 285 if (!decoder_thread_.Start()) { |
282 LOGF(ERROR) << "decoder thread failed to start"; | 286 LOGF(ERROR) << "decoder thread failed to start"; |
283 return false; | 287 return false; |
284 } | 288 } |
285 | 289 |
286 decoder_state_ = kInitialized; | 290 decoder_state_ = kInitialized; |
| 291 output_mode_ = config.output_mode; |
287 | 292 |
288 // StartDevicePoll will NOTIFY_ERROR on failure, so IgnoreResult is fine here. | 293 // StartDevicePoll will NOTIFY_ERROR on failure, so IgnoreResult is fine here. |
289 decoder_thread_.task_runner()->PostTask( | 294 decoder_thread_.task_runner()->PostTask( |
290 FROM_HERE, base::Bind(base::IgnoreResult( | 295 FROM_HERE, base::Bind(base::IgnoreResult( |
291 &V4L2VideoDecodeAccelerator::StartDevicePoll), | 296 &V4L2VideoDecodeAccelerator::StartDevicePoll), |
292 base::Unretained(this))); | 297 base::Unretained(this))); |
293 | 298 |
294 return true; | 299 return true; |
295 } | 300 } |
296 | 301 |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
348 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | 353 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
349 reqbufs.memory = V4L2_MEMORY_MMAP; | 354 reqbufs.memory = V4L2_MEMORY_MMAP; |
350 IOCTL_OR_ERROR_RETURN(VIDIOC_REQBUFS, &reqbufs); | 355 IOCTL_OR_ERROR_RETURN(VIDIOC_REQBUFS, &reqbufs); |
351 | 356 |
352 if (reqbufs.count != buffers.size()) { | 357 if (reqbufs.count != buffers.size()) { |
353 DLOGF(ERROR) << "Could not allocate enough output buffers"; | 358 DLOGF(ERROR) << "Could not allocate enough output buffers"; |
354 NOTIFY_ERROR(PLATFORM_FAILURE); | 359 NOTIFY_ERROR(PLATFORM_FAILURE); |
355 return; | 360 return; |
356 } | 361 } |
357 | 362 |
358 if (image_processor_device_) { | 363 DCHECK(free_output_buffers_.empty()); |
359 DCHECK(!image_processor_); | 364 DCHECK(output_buffer_map_.empty()); |
360 image_processor_.reset(new V4L2ImageProcessor(image_processor_device_)); | 365 output_buffer_map_.resize(buffers.size()); |
361 // Unretained is safe because |this| owns image processor and there will be | 366 if (image_processor_device_ && output_mode_ == Config::OutputMode::ALLOCATE) { |
362 // no callbacks after processor destroys. | 367 if (!CreateImageProcessor()) |
363 if (!image_processor_->Initialize( | 368 return; |
364 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_), | 369 } |
365 V4L2Device::V4L2PixFmtToVideoPixelFormat(egl_image_format_fourcc_), | 370 |
366 V4L2_MEMORY_DMABUF, visible_size_, coded_size_, visible_size_, | 371 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { |
367 visible_size_, buffers.size(), | 372 DCHECK(buffers[i].size() == egl_image_size_); |
368 base::Bind(&V4L2VideoDecodeAccelerator::ImageProcessorError, | 373 |
369 base::Unretained(this)))) { | 374 OutputRecord& output_record = output_buffer_map_[i]; |
370 LOGF(ERROR) << "Initialize image processor failed"; | 375 DCHECK_EQ(output_record.state, kFree); |
371 NOTIFY_ERROR(PLATFORM_FAILURE); | 376 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR); |
| 377 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); |
| 378 DCHECK_EQ(output_record.picture_id, -1); |
| 379 DCHECK_EQ(output_record.cleared, false); |
| 380 DCHECK_EQ(1u, buffers[i].texture_ids().size()); |
| 381 DCHECK(output_record.processor_input_fds.empty()); |
| 382 |
| 383 output_record.picture_id = buffers[i].id(); |
| 384 output_record.texture_id = buffers[i].texture_ids()[0]; |
| 385 // This will remain kAtClient until ImportBufferForPicture is called, either |
| 386 // by the client, or by ourselves, if we are allocating. |
| 387 output_record.state = kAtClient; |
| 388 |
| 389 if (image_processor_device_) { |
| 390 std::vector<base::ScopedFD> dmabuf_fds = device_->GetDmabufsForV4L2Buffer( |
| 391 i, output_planes_count_, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE); |
| 392 if (dmabuf_fds.empty()) { |
| 393 LOGF(ERROR) << "Failed to get DMABUFs of decoder."; |
| 394 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 395 return; |
| 396 } |
| 397 output_record.processor_input_fds = std::move(dmabuf_fds); |
| 398 } |
| 399 |
| 400 if (output_mode_ == Config::OutputMode::ALLOCATE) { |
| 401 std::vector<base::ScopedFD> dmabuf_fds; |
| 402 dmabuf_fds = egl_image_device_->GetDmabufsForV4L2Buffer( |
| 403 i, egl_image_planes_count_, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE); |
| 404 if (dmabuf_fds.empty()) { |
| 405 LOGF(ERROR) << "Failed to get DMABUFs for EGLImage."; |
| 406 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 407 return; |
| 408 } |
| 409 int plane_horiz_bits_per_pixel = VideoFrame::PlaneHorizontalBitsPerPixel( |
| 410 V4L2Device::V4L2PixFmtToVideoPixelFormat(egl_image_format_fourcc_), |
| 411 0); |
| 412 ImportBufferForPictureTask( |
| 413 output_record.picture_id, std::move(dmabuf_fds), |
| 414 egl_image_size_.width() * plane_horiz_bits_per_pixel / 8); |
| 415 } // else we'll get triggered via ImportBufferForPicture() from client. |
| 416 |
| 417 DVLOGF(3) << "buffer[" << i << "]: picture_id=" << output_record.picture_id; |
| 418 } |
| 419 |
| 420 if (output_mode_ == Config::OutputMode::ALLOCATE) { |
| 421 DCHECK_EQ(kAwaitingPictureBuffers, decoder_state_); |
| 422 DVLOGF(1) << "Change state to kDecoding"; |
| 423 decoder_state_ = kDecoding; |
| 424 if (reset_pending_) { |
| 425 FinishReset(); |
372 return; | 426 return; |
373 } | 427 } |
374 DCHECK(image_processor_->output_allocated_size() == egl_image_size_); | 428 ScheduleDecodeBufferTaskIfNeeded(); |
375 if (image_processor_->input_allocated_size() != coded_size_) { | |
376 LOGF(ERROR) << "Image processor should be able to take the output coded " | |
377 << "size of decoder " << coded_size_.ToString() | |
378 << " without adjusting to " | |
379 << image_processor_->input_allocated_size().ToString(); | |
380 NOTIFY_ERROR(PLATFORM_FAILURE); | |
381 return; | |
382 } | |
383 } | 429 } |
384 | |
385 child_task_runner_->PostTask( | |
386 FROM_HERE, | |
387 base::Bind(&V4L2VideoDecodeAccelerator::CreateEGLImages, weak_this_, | |
388 buffers, egl_image_format_fourcc_, egl_image_planes_count_)); | |
389 } | 430 } |
390 | 431 |
391 void V4L2VideoDecodeAccelerator::CreateEGLImages( | 432 void V4L2VideoDecodeAccelerator::CreateEGLImageFor( |
392 const std::vector<media::PictureBuffer>& buffers, | 433 size_t buffer_index, |
393 uint32_t output_format_fourcc, | 434 int32_t picture_buffer_id, |
394 size_t output_planes_count) { | 435 std::vector<base::ScopedFD> dmabuf_fds, |
395 DVLOGF(3); | 436 GLuint texture_id, |
| 437 const gfx::Size& size, |
| 438 uint32_t fourcc) { |
| 439 DVLOGF(3) << "index=" << buffer_index; |
396 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 440 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
397 | 441 |
398 if (get_gl_context_cb_.is_null() || make_context_current_cb_.is_null()) { | 442 if (get_gl_context_cb_.is_null() || make_context_current_cb_.is_null()) { |
399 DLOGF(ERROR) << "GL callbacks required for binding to EGLImages"; | 443 DLOGF(ERROR) << "GL callbacks required for binding to EGLImages"; |
400 NOTIFY_ERROR(INVALID_ARGUMENT); | 444 NOTIFY_ERROR(INVALID_ARGUMENT); |
401 return; | 445 return; |
402 } | 446 } |
403 | 447 |
404 gl::GLContext* gl_context = get_gl_context_cb_.Run(); | 448 gl::GLContext* gl_context = get_gl_context_cb_.Run(); |
405 if (!gl_context || !make_context_current_cb_.Run()) { | 449 if (!gl_context || !make_context_current_cb_.Run()) { |
406 DLOGF(ERROR) << "No GL context"; | 450 DLOGF(ERROR) << "No GL context"; |
407 NOTIFY_ERROR(PLATFORM_FAILURE); | 451 NOTIFY_ERROR(PLATFORM_FAILURE); |
408 return; | 452 return; |
409 } | 453 } |
410 | 454 |
411 gl::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0); | 455 gl::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0); |
412 | 456 |
413 std::vector<EGLImageKHR> egl_images; | 457 EGLImageKHR egl_image = egl_image_device_->CreateEGLImage( |
414 for (size_t i = 0; i < buffers.size(); ++i) { | 458 egl_display_, gl_context->GetHandle(), texture_id, size, buffer_index, |
415 std::vector<base::ScopedFD> dmabuf_fds; | 459 fourcc, dmabuf_fds); |
416 dmabuf_fds = egl_image_device_->GetDmabufsForV4L2Buffer( | 460 if (egl_image == EGL_NO_IMAGE_KHR) { |
417 i, egl_image_planes_count_, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE); | 461 LOGF(ERROR) << "could not create EGLImageKHR," |
418 if (dmabuf_fds.empty()) { | 462 << " index=" << buffer_index << " texture_id=" << texture_id; |
419 LOGF(ERROR) << "Failed to get DMABUFs for EGLImage."; | 463 NOTIFY_ERROR(PLATFORM_FAILURE); |
420 NOTIFY_ERROR(PLATFORM_FAILURE); | 464 return; |
421 return; | |
422 } | |
423 | |
424 EGLImageKHR egl_image = egl_image_device_->CreateEGLImage( | |
425 egl_display_, gl_context->GetHandle(), buffers[i].texture_ids()[0], | |
426 buffers[i].size(), i, egl_image_format_fourcc_, dmabuf_fds); | |
427 if (egl_image == EGL_NO_IMAGE_KHR) { | |
428 LOGF(ERROR) << "could not create EGLImageKHR," | |
429 << " index=" << i | |
430 << " texture_id=" << buffers[i].texture_ids()[0]; | |
431 for (EGLImageKHR image : egl_images) { | |
432 egl_image_device_->DestroyEGLImage(egl_display_, image); | |
433 } | |
434 NOTIFY_ERROR(PLATFORM_FAILURE); | |
435 return; | |
436 } | |
437 egl_images.push_back(egl_image); | |
438 } | 465 } |
439 | 466 |
440 decoder_thread_.task_runner()->PostTask( | 467 decoder_thread_.task_runner()->PostTask( |
441 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::AssignEGLImages, | 468 FROM_HERE, |
442 base::Unretained(this), buffers, egl_images)); | 469 base::Bind(&V4L2VideoDecodeAccelerator::AssignEGLImage, |
| 470 base::Unretained(this), buffer_index, picture_buffer_id, |
| 471 egl_image, base::Passed(&dmabuf_fds))); |
443 } | 472 } |
444 | 473 |
445 void V4L2VideoDecodeAccelerator::AssignEGLImages( | 474 void V4L2VideoDecodeAccelerator::AssignEGLImage( |
446 const std::vector<media::PictureBuffer>& buffers, | 475 size_t buffer_index, |
447 const std::vector<EGLImageKHR>& egl_images) { | 476 int32_t picture_buffer_id, |
448 DVLOGF(3); | 477 EGLImageKHR egl_image, |
| 478 std::vector<base::ScopedFD> dmabuf_fds) { |
| 479 DVLOGF(3) << "index=" << buffer_index << ", picture_id=" << picture_buffer_id; |
449 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); | 480 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); |
450 DCHECK_EQ(buffers.size(), egl_images.size()); | |
451 DCHECK(free_output_buffers_.empty()); | |
452 DCHECK(output_buffer_map_.empty()); | |
453 | 481 |
454 output_buffer_map_.resize(buffers.size()); | 482 // It's possible that while waiting for the EGLImages to be allocated and |
455 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { | 483 // assigned, we have already decoded more of the stream and saw another |
456 DCHECK(buffers[i].size() == egl_image_size_); | 484 // resolution change. This is a normal situation, in such a case either there |
457 | 485 // is no output record with this index awaiting an EGLImage to be assigned to |
458 OutputRecord& output_record = output_buffer_map_[i]; | 486 // it, or the record is already updated to use a newer PictureBuffer and is |
459 DCHECK_EQ(output_record.state, kFree); | 487 // awaiting an EGLImage associated with a different picture_buffer_id. If so, |
460 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR); | 488 // just discard this image, we will get the one we are waiting for later. |
461 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); | 489 if (buffer_index >= output_buffer_map_.size() || |
462 DCHECK_EQ(output_record.picture_id, -1); | 490 output_buffer_map_[buffer_index].picture_id != picture_buffer_id) { |
463 DCHECK_EQ(output_record.cleared, false); | 491 DVLOGF(3) << "Picture set already changed, dropping EGLImage"; |
464 DCHECK_LE(1u, buffers[i].texture_ids().size()); | 492 child_task_runner_->PostTask( |
465 | 493 FROM_HERE, base::Bind(base::IgnoreResult(&V4L2Device::DestroyEGLImage), |
466 if (image_processor_device_) { | 494 device_, egl_display_, egl_image)); |
467 std::vector<base::ScopedFD> fds = device_->GetDmabufsForV4L2Buffer( | |
468 i, output_planes_count_, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE); | |
469 if (fds.empty()) { | |
470 LOGF(ERROR) << "Failed to get DMABUFs of decoder."; | |
471 NOTIFY_ERROR(PLATFORM_FAILURE); | |
472 return; | |
473 } | |
474 output_record.fds = std::move(fds); | |
475 } | |
476 | |
477 output_record.egl_image = egl_images[i]; | |
478 output_record.picture_id = buffers[i].id(); | |
479 | |
480 free_output_buffers_.push(i); | |
481 DVLOGF(3) << "buffer[" << i << "]: picture_id=" << output_record.picture_id; | |
482 } | |
483 | |
484 decoder_state_ = kDecoding; | |
485 Enqueue(); | |
486 if (reset_pending_) { | |
487 FinishReset(); | |
488 return; | 495 return; |
489 } | 496 } |
490 | 497 |
491 ScheduleDecodeBufferTaskIfNeeded(); | 498 OutputRecord& output_record = output_buffer_map_[buffer_index]; |
| 499 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR); |
| 500 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); |
| 501 DCHECK_EQ(output_record.state, kFree); |
| 502 DCHECK_EQ(std::count(free_output_buffers_.begin(), free_output_buffers_.end(), |
| 503 buffer_index), |
| 504 0); |
| 505 output_record.egl_image = egl_image; |
| 506 free_output_buffers_.push_back(buffer_index); |
| 507 if (decoder_state_ != kChangingResolution) { |
| 508 Enqueue(); |
| 509 ScheduleDecodeBufferTaskIfNeeded(); |
| 510 } |
| 511 } |
| 512 |
| 513 void V4L2VideoDecodeAccelerator::ImportBufferForPicture( |
| 514 int32_t picture_buffer_id, |
| 515 const gfx::GpuMemoryBufferHandle& gpu_memory_buffer_handle) { |
| 516 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id; |
| 517 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
| 518 |
| 519 if (output_mode_ != Config::OutputMode::IMPORT) { |
| 520 LOGF(ERROR) << "Cannot import in non-import mode"; |
| 521 NOTIFY_ERROR(INVALID_ARGUMENT); |
| 522 return; |
| 523 } |
| 524 |
| 525 std::vector<base::ScopedFD> dmabuf_fds; |
| 526 int32_t stride = 0; |
| 527 #if defined(USE_OZONE) |
| 528 for (const auto& fd : gpu_memory_buffer_handle.native_pixmap_handle.fds) { |
| 529 DCHECK_NE(fd.fd, -1); |
| 530 dmabuf_fds.push_back(base::ScopedFD(fd.fd)); |
| 531 } |
| 532 stride = gpu_memory_buffer_handle.native_pixmap_handle.planes[0].stride; |
| 533 for (const auto& plane : |
| 534 gpu_memory_buffer_handle.native_pixmap_handle.planes) { |
| 535 DVLOGF(3) << ": offset=" << plane.offset << ", stride=" << plane.stride; |
| 536 } |
| 537 #endif |
| 538 |
| 539 decoder_thread_.task_runner()->PostTask( |
| 540 FROM_HERE, |
| 541 base::Bind(&V4L2VideoDecodeAccelerator::ImportBufferForPictureTask, |
| 542 base::Unretained(this), picture_buffer_id, |
| 543 base::Passed(&dmabuf_fds), stride)); |
| 544 } |
| 545 |
| 546 void V4L2VideoDecodeAccelerator::ImportBufferForPictureTask( |
| 547 int32_t picture_buffer_id, |
| 548 std::vector<base::ScopedFD> dmabuf_fds, |
| 549 int32_t stride) { |
| 550 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id |
| 551 << ", dmabuf_fds.size()=" << dmabuf_fds.size() |
| 552 << ", stride=" << stride; |
| 553 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); |
| 554 |
| 555 const auto iter = |
| 556 std::find_if(output_buffer_map_.begin(), output_buffer_map_.end(), |
| 557 [picture_buffer_id](const OutputRecord& output_record) { |
| 558 return output_record.picture_id == picture_buffer_id; |
| 559 }); |
| 560 if (iter == output_buffer_map_.end()) { |
| 561 // It's possible that we've already posted a DismissPictureBuffer for this |
| 562 // picture, but it has not yet executed when this ImportBufferForPicture was |
| 563 // posted to us by the client. In that case just ignore this (we've already |
| 564 // dismissed it and accounted for that). |
| 565 DVLOGF(3) << "got picture id=" << picture_buffer_id |
| 566 << " not in use (anymore?)."; |
| 567 return; |
| 568 } |
| 569 |
| 570 if (iter->state != kAtClient) { |
| 571 LOGF(ERROR) << "Cannot import buffer not owned by client"; |
| 572 NOTIFY_ERROR(INVALID_ARGUMENT); |
| 573 return; |
| 574 } |
| 575 |
| 576 int plane_horiz_bits_per_pixel = VideoFrame::PlaneHorizontalBitsPerPixel( |
| 577 V4L2Device::V4L2PixFmtToVideoPixelFormat(egl_image_format_fourcc_), 0); |
| 578 if (plane_horiz_bits_per_pixel == 0 || |
| 579 (stride * 8) % plane_horiz_bits_per_pixel != 0) { |
| 580 LOG(ERROR) << "Invalid format " << egl_image_format_fourcc_ << " or stride " |
| 581 << stride; |
| 582 NOTIFY_ERROR(INVALID_ARGUMENT); |
| 583 return; |
| 584 } |
| 585 int adjusted_coded_width = stride * 8 / plane_horiz_bits_per_pixel; |
| 586 |
| 587 if (image_processor_device_ && !image_processor_) { |
| 588 // This is the first buffer import. Create the image processor and change |
| 589 // the decoder state. The client may adjust the coded width. We don't have |
| 590 // the final coded size in AssignPictureBuffers yet. Use the adjusted coded |
| 591 // width to create the image processor. |
| 592 DVLOGF(3) << "Original egl_image_size=" << egl_image_size_.ToString() |
| 593 << ", adjusted coded width=" << adjusted_coded_width; |
| 594 DCHECK_GE(adjusted_coded_width, egl_image_size_.width()); |
| 595 egl_image_size_.set_width(adjusted_coded_width); |
| 596 if (!CreateImageProcessor()) |
| 597 return; |
| 598 DCHECK_EQ(kAwaitingPictureBuffers, decoder_state_); |
| 599 DVLOGF(1) << "Change state to kDecoding"; |
| 600 decoder_state_ = kDecoding; |
| 601 if (reset_pending_) { |
| 602 FinishReset(); |
| 603 } |
| 604 } else { |
| 605 DCHECK_EQ(egl_image_size_.width(), adjusted_coded_width); |
| 606 } |
| 607 |
| 608 size_t index = iter - output_buffer_map_.begin(); |
| 609 DCHECK_EQ(std::count(free_output_buffers_.begin(), free_output_buffers_.end(), |
| 610 index), |
| 611 0); |
| 612 |
| 613 iter->state = kFree; |
| 614 if (iter->texture_id != 0) { |
| 615 if (iter->egl_image != EGL_NO_IMAGE_KHR) { |
| 616 child_task_runner_->PostTask( |
| 617 FROM_HERE, |
| 618 base::Bind(base::IgnoreResult(&V4L2Device::DestroyEGLImage), device_, |
| 619 egl_display_, iter->egl_image)); |
| 620 } |
| 621 |
| 622 child_task_runner_->PostTask( |
| 623 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::CreateEGLImageFor, |
| 624 weak_this_, index, picture_buffer_id, |
| 625 base::Passed(&dmabuf_fds), iter->texture_id, |
| 626 egl_image_size_, egl_image_format_fourcc_)); |
| 627 } else { |
| 628 // No need for an EGLImage, start using this buffer now. |
| 629 DCHECK_EQ(egl_image_planes_count_, dmabuf_fds.size()); |
| 630 iter->processor_output_fds.swap(dmabuf_fds); |
| 631 free_output_buffers_.push_back(index); |
| 632 if (decoder_state_ != kChangingResolution) { |
| 633 Enqueue(); |
| 634 ScheduleDecodeBufferTaskIfNeeded(); |
| 635 } |
| 636 } |
492 } | 637 } |
493 | 638 |
494 void V4L2VideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_buffer_id) { | 639 void V4L2VideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_buffer_id) { |
495 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id; | 640 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id; |
496 // Must be run on child thread, as we'll insert a sync in the EGL context. | 641 // Must be run on child thread, as we'll insert a sync in the EGL context. |
497 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 642 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
498 | 643 |
499 if (!make_context_current_cb_.Run()) { | 644 std::unique_ptr<EGLSyncKHRRef> egl_sync_ref; |
500 LOGF(ERROR) << "could not make context current"; | |
501 NOTIFY_ERROR(PLATFORM_FAILURE); | |
502 return; | |
503 } | |
504 | 645 |
505 EGLSyncKHR egl_sync = EGL_NO_SYNC_KHR; | 646 if (!make_context_current_cb_.is_null()) { |
| 647 if (!make_context_current_cb_.Run()) { |
| 648 LOGF(ERROR) << "could not make context current"; |
| 649 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 650 return; |
| 651 } |
| 652 |
| 653 EGLSyncKHR egl_sync = EGL_NO_SYNC_KHR; |
506 // TODO(posciak): crbug.com/450898. | 654 // TODO(posciak): crbug.com/450898. |
507 #if defined(ARCH_CPU_ARMEL) | 655 #if defined(ARCH_CPU_ARMEL) |
508 egl_sync = eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL); | 656 egl_sync = eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL); |
509 if (egl_sync == EGL_NO_SYNC_KHR) { | 657 if (egl_sync == EGL_NO_SYNC_KHR) { |
510 LOGF(ERROR) << "eglCreateSyncKHR() failed"; | 658 LOGF(ERROR) << "eglCreateSyncKHR() failed"; |
511 NOTIFY_ERROR(PLATFORM_FAILURE); | 659 NOTIFY_ERROR(PLATFORM_FAILURE); |
512 return; | 660 return; |
513 } | 661 } |
514 #endif | 662 #endif |
515 | 663 |
516 std::unique_ptr<EGLSyncKHRRef> egl_sync_ref( | 664 egl_sync_ref.reset(new EGLSyncKHRRef(egl_display_, egl_sync)); |
517 new EGLSyncKHRRef(egl_display_, egl_sync)); | 665 } |
518 | 666 |
519 decoder_thread_.task_runner()->PostTask( | 667 decoder_thread_.task_runner()->PostTask( |
520 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::ReusePictureBufferTask, | 668 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::ReusePictureBufferTask, |
521 base::Unretained(this), picture_buffer_id, | 669 base::Unretained(this), picture_buffer_id, |
522 base::Passed(&egl_sync_ref))); | 670 base::Passed(&egl_sync_ref))); |
523 } | 671 } |
524 | 672 |
525 void V4L2VideoDecodeAccelerator::Flush() { | 673 void V4L2VideoDecodeAccelerator::Flush() { |
526 DVLOGF(3); | 674 DVLOGF(3); |
527 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 675 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
(...skipping 663 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1191 if (errno == EAGAIN) { | 1339 if (errno == EAGAIN) { |
1192 // EAGAIN if we're just out of buffers to dequeue. | 1340 // EAGAIN if we're just out of buffers to dequeue. |
1193 break; | 1341 break; |
1194 } | 1342 } |
1195 PLOGF(ERROR) << "ioctl() failed: VIDIOC_DQBUF"; | 1343 PLOGF(ERROR) << "ioctl() failed: VIDIOC_DQBUF"; |
1196 NOTIFY_ERROR(PLATFORM_FAILURE); | 1344 NOTIFY_ERROR(PLATFORM_FAILURE); |
1197 return; | 1345 return; |
1198 } | 1346 } |
1199 OutputRecord& output_record = output_buffer_map_[dqbuf.index]; | 1347 OutputRecord& output_record = output_buffer_map_[dqbuf.index]; |
1200 DCHECK_EQ(output_record.state, kAtDevice); | 1348 DCHECK_EQ(output_record.state, kAtDevice); |
1201 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR); | |
1202 DCHECK_NE(output_record.picture_id, -1); | 1349 DCHECK_NE(output_record.picture_id, -1); |
1203 output_buffer_queued_count_--; | 1350 output_buffer_queued_count_--; |
1204 if (dqbuf.m.planes[0].bytesused == 0) { | 1351 if (dqbuf.m.planes[0].bytesused == 0) { |
1205 // This is an empty output buffer returned as part of a flush. | 1352 // This is an empty output buffer returned as part of a flush. |
1206 output_record.state = kFree; | 1353 output_record.state = kFree; |
1207 free_output_buffers_.push(dqbuf.index); | 1354 free_output_buffers_.push_back(dqbuf.index); |
1208 } else { | 1355 } else { |
1209 int32_t bitstream_buffer_id = dqbuf.timestamp.tv_sec; | 1356 int32_t bitstream_buffer_id = dqbuf.timestamp.tv_sec; |
1210 DCHECK_GE(bitstream_buffer_id, 0); | 1357 DCHECK_GE(bitstream_buffer_id, 0); |
1211 DVLOGF(3) << "Dequeue output buffer: dqbuf index=" << dqbuf.index | 1358 DVLOGF(3) << "Dequeue output buffer: dqbuf index=" << dqbuf.index |
1212 << " bitstream input_id=" << bitstream_buffer_id; | 1359 << " bitstream input_id=" << bitstream_buffer_id; |
1213 if (image_processor_device_) { | 1360 if (image_processor_device_) { |
1214 output_record.state = kAtProcessor; | 1361 if (!ProcessFrame(bitstream_buffer_id, dqbuf.index)) { |
1215 image_processor_bitstream_buffer_ids_.push(bitstream_buffer_id); | 1362 DLOGF(ERROR) << "Processing frame failed"; |
1216 std::vector<int> fds; | 1363 NOTIFY_ERROR(PLATFORM_FAILURE); |
1217 for (auto& fd : output_record.fds) { | 1364 return; |
1218 fds.push_back(fd.get()); | |
1219 } | 1365 } |
1220 scoped_refptr<VideoFrame> frame = VideoFrame::WrapExternalDmabufs( | |
1221 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_), | |
1222 coded_size_, gfx::Rect(visible_size_), visible_size_, fds, | |
1223 base::TimeDelta()); | |
1224 // Unretained is safe because |this| owns image processor and there will | |
1225 // be no callbacks after processor destroys. | |
1226 image_processor_->Process( | |
1227 frame, dqbuf.index, | |
1228 base::Bind(&V4L2VideoDecodeAccelerator::FrameProcessed, | |
1229 base::Unretained(this), bitstream_buffer_id)); | |
1230 } else { | 1366 } else { |
1231 output_record.state = kAtClient; | 1367 output_record.state = kAtClient; |
1232 decoder_frames_at_client_++; | 1368 decoder_frames_at_client_++; |
1233 const Picture picture(output_record.picture_id, bitstream_buffer_id, | 1369 const Picture picture(output_record.picture_id, bitstream_buffer_id, |
1234 gfx::Rect(visible_size_), false); | 1370 gfx::Rect(visible_size_), false); |
1235 pending_picture_ready_.push( | 1371 pending_picture_ready_.push( |
1236 PictureRecord(output_record.cleared, picture)); | 1372 PictureRecord(output_record.cleared, picture)); |
1237 SendPictureReady(); | 1373 SendPictureReady(); |
1238 output_record.cleared = true; | 1374 output_record.cleared = true; |
1239 } | 1375 } |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1272 } | 1408 } |
1273 | 1409 |
1274 bool V4L2VideoDecodeAccelerator::EnqueueOutputRecord() { | 1410 bool V4L2VideoDecodeAccelerator::EnqueueOutputRecord() { |
1275 DCHECK(!free_output_buffers_.empty()); | 1411 DCHECK(!free_output_buffers_.empty()); |
1276 | 1412 |
1277 // Enqueue an output (VIDEO_CAPTURE) buffer. | 1413 // Enqueue an output (VIDEO_CAPTURE) buffer. |
1278 const int buffer = free_output_buffers_.front(); | 1414 const int buffer = free_output_buffers_.front(); |
1279 DVLOGF(3) << "buffer " << buffer; | 1415 DVLOGF(3) << "buffer " << buffer; |
1280 OutputRecord& output_record = output_buffer_map_[buffer]; | 1416 OutputRecord& output_record = output_buffer_map_[buffer]; |
1281 DCHECK_EQ(output_record.state, kFree); | 1417 DCHECK_EQ(output_record.state, kFree); |
1282 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR); | |
1283 DCHECK_NE(output_record.picture_id, -1); | 1418 DCHECK_NE(output_record.picture_id, -1); |
1284 if (output_record.egl_sync != EGL_NO_SYNC_KHR) { | 1419 if (output_record.egl_sync != EGL_NO_SYNC_KHR) { |
1285 TRACE_EVENT0("Video Decoder", | 1420 TRACE_EVENT0("Video Decoder", |
1286 "V4L2VDA::EnqueueOutputRecord: eglClientWaitSyncKHR"); | 1421 "V4L2VDA::EnqueueOutputRecord: eglClientWaitSyncKHR"); |
1287 // If we have to wait for completion, wait. Note that | 1422 // If we have to wait for completion, wait. Note that |
1288 // free_output_buffers_ is a FIFO queue, so we always wait on the | 1423 // free_output_buffers_ is a FIFO queue, so we always wait on the |
1289 // buffer that has been in the queue the longest. | 1424 // buffer that has been in the queue the longest. |
1290 if (eglClientWaitSyncKHR(egl_display_, output_record.egl_sync, 0, | 1425 if (eglClientWaitSyncKHR(egl_display_, output_record.egl_sync, 0, |
1291 EGL_FOREVER_KHR) == EGL_FALSE) { | 1426 EGL_FOREVER_KHR) == EGL_FALSE) { |
1292 // This will cause tearing, but is safe otherwise. | 1427 // This will cause tearing, but is safe otherwise. |
(...skipping 10 matching lines...) Expand all Loading... |
1303 std::unique_ptr<struct v4l2_plane[]> qbuf_planes( | 1438 std::unique_ptr<struct v4l2_plane[]> qbuf_planes( |
1304 new v4l2_plane[output_planes_count_]); | 1439 new v4l2_plane[output_planes_count_]); |
1305 memset(&qbuf, 0, sizeof(qbuf)); | 1440 memset(&qbuf, 0, sizeof(qbuf)); |
1306 memset(qbuf_planes.get(), 0, | 1441 memset(qbuf_planes.get(), 0, |
1307 sizeof(struct v4l2_plane) * output_planes_count_); | 1442 sizeof(struct v4l2_plane) * output_planes_count_); |
1308 qbuf.index = buffer; | 1443 qbuf.index = buffer; |
1309 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | 1444 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
1310 qbuf.memory = V4L2_MEMORY_MMAP; | 1445 qbuf.memory = V4L2_MEMORY_MMAP; |
1311 qbuf.m.planes = qbuf_planes.get(); | 1446 qbuf.m.planes = qbuf_planes.get(); |
1312 qbuf.length = output_planes_count_; | 1447 qbuf.length = output_planes_count_; |
| 1448 DVLOGF(2) << "qbuf.index=" << qbuf.index; |
1313 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); | 1449 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); |
1314 free_output_buffers_.pop(); | 1450 free_output_buffers_.pop_front(); |
1315 output_record.state = kAtDevice; | 1451 output_record.state = kAtDevice; |
1316 output_buffer_queued_count_++; | 1452 output_buffer_queued_count_++; |
1317 return true; | 1453 return true; |
1318 } | 1454 } |
1319 | 1455 |
1320 void V4L2VideoDecodeAccelerator::ReusePictureBufferTask( | 1456 void V4L2VideoDecodeAccelerator::ReusePictureBufferTask( |
1321 int32_t picture_buffer_id, | 1457 int32_t picture_buffer_id, |
1322 std::unique_ptr<EGLSyncKHRRef> egl_sync_ref) { | 1458 std::unique_ptr<EGLSyncKHRRef> egl_sync_ref) { |
1323 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id; | 1459 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id; |
1324 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); | 1460 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); |
(...skipping 27 matching lines...) Expand all Loading... |
1352 } | 1488 } |
1353 | 1489 |
1354 OutputRecord& output_record = output_buffer_map_[index]; | 1490 OutputRecord& output_record = output_buffer_map_[index]; |
1355 if (output_record.state != kAtClient) { | 1491 if (output_record.state != kAtClient) { |
1356 LOGF(ERROR) << "picture_buffer_id not reusable"; | 1492 LOGF(ERROR) << "picture_buffer_id not reusable"; |
1357 NOTIFY_ERROR(INVALID_ARGUMENT); | 1493 NOTIFY_ERROR(INVALID_ARGUMENT); |
1358 return; | 1494 return; |
1359 } | 1495 } |
1360 | 1496 |
1361 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); | 1497 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); |
1362 output_record.egl_sync = egl_sync_ref->egl_sync; | |
1363 output_record.state = kFree; | 1498 output_record.state = kFree; |
1364 free_output_buffers_.push(index); | 1499 free_output_buffers_.push_back(index); |
1365 decoder_frames_at_client_--; | 1500 decoder_frames_at_client_--; |
1366 // Take ownership of the EGLSync. | 1501 if (egl_sync_ref) { |
1367 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR; | 1502 output_record.egl_sync = egl_sync_ref->egl_sync; |
| 1503 // Take ownership of the EGLSync. |
| 1504 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR; |
| 1505 } |
1368 // We got a buffer back, so enqueue it back. | 1506 // We got a buffer back, so enqueue it back. |
1369 Enqueue(); | 1507 Enqueue(); |
1370 } | 1508 } |
1371 | 1509 |
1372 void V4L2VideoDecodeAccelerator::FlushTask() { | 1510 void V4L2VideoDecodeAccelerator::FlushTask() { |
1373 DVLOGF(3); | 1511 DVLOGF(3); |
1374 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); | 1512 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); |
1375 TRACE_EVENT0("Video Decoder", "V4L2VDA::FlushTask"); | 1513 TRACE_EVENT0("Video Decoder", "V4L2VDA::FlushTask"); |
1376 | 1514 |
1377 // Flush outstanding buffers. | 1515 // Flush outstanding buffers. |
(...skipping 251 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1629 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type); | 1767 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type); |
1630 output_streamon_ = false; | 1768 output_streamon_ = false; |
1631 | 1769 |
1632 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { | 1770 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { |
1633 // After streamoff, the device drops ownership of all buffers, even if we | 1771 // After streamoff, the device drops ownership of all buffers, even if we |
1634 // don't dequeue them explicitly. Some of them may still be owned by the | 1772 // don't dequeue them explicitly. Some of them may still be owned by the |
1635 // client however. Reuse only those that aren't. | 1773 // client however. Reuse only those that aren't. |
1636 OutputRecord& output_record = output_buffer_map_[i]; | 1774 OutputRecord& output_record = output_buffer_map_[i]; |
1637 if (output_record.state == kAtDevice) { | 1775 if (output_record.state == kAtDevice) { |
1638 output_record.state = kFree; | 1776 output_record.state = kFree; |
1639 free_output_buffers_.push(i); | 1777 free_output_buffers_.push_back(i); |
1640 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); | 1778 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); |
1641 } | 1779 } |
1642 } | 1780 } |
1643 output_buffer_queued_count_ = 0; | 1781 output_buffer_queued_count_ = 0; |
1644 return true; | 1782 return true; |
1645 } | 1783 } |
1646 | 1784 |
1647 bool V4L2VideoDecodeAccelerator::StopInputStream() { | 1785 bool V4L2VideoDecodeAccelerator::StopInputStream() { |
1648 DVLOGF(3); | 1786 DVLOGF(3); |
1649 if (!input_streamon_) | 1787 if (!input_streamon_) |
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1829 &egl_image_planes_count_)) { | 1967 &egl_image_planes_count_)) { |
1830 LOGF(ERROR) << "Fail to get output size and plane count of processor"; | 1968 LOGF(ERROR) << "Fail to get output size and plane count of processor"; |
1831 return false; | 1969 return false; |
1832 } | 1970 } |
1833 } else { | 1971 } else { |
1834 egl_image_size_ = coded_size_; | 1972 egl_image_size_ = coded_size_; |
1835 egl_image_planes_count_ = output_planes_count_; | 1973 egl_image_planes_count_ = output_planes_count_; |
1836 } | 1974 } |
1837 DVLOGF(3) << "new resolution: " << coded_size_.ToString() | 1975 DVLOGF(3) << "new resolution: " << coded_size_.ToString() |
1838 << ", visible size: " << visible_size_.ToString() | 1976 << ", visible size: " << visible_size_.ToString() |
1839 << ", EGLImage size: " << egl_image_size_.ToString(); | 1977 << ", decoder output planes count: " << output_planes_count_ |
| 1978 << ", EGLImage size: " << egl_image_size_.ToString() |
| 1979 << ", EGLImage plane count: " << egl_image_planes_count_; |
1840 | 1980 |
1841 return CreateOutputBuffers(); | 1981 return CreateOutputBuffers(); |
1842 } | 1982 } |
1843 | 1983 |
1844 gfx::Size V4L2VideoDecodeAccelerator::GetVisibleSize( | 1984 gfx::Size V4L2VideoDecodeAccelerator::GetVisibleSize( |
1845 const gfx::Size& coded_size) { | 1985 const gfx::Size& coded_size) { |
1846 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); | 1986 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); |
1847 | 1987 |
1848 struct v4l2_crop crop_arg; | 1988 struct v4l2_crop crop_arg; |
1849 memset(&crop_arg, 0, sizeof(crop_arg)); | 1989 memset(&crop_arg, 0, sizeof(crop_arg)); |
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1995 LOGF(ERROR) << "Can't find a usable input format from image processor"; | 2135 LOGF(ERROR) << "Can't find a usable input format from image processor"; |
1996 return false; | 2136 return false; |
1997 } | 2137 } |
1998 egl_image_format_fourcc_ = FindImageProcessorOutputFormat(); | 2138 egl_image_format_fourcc_ = FindImageProcessorOutputFormat(); |
1999 if (egl_image_format_fourcc_ == 0) { | 2139 if (egl_image_format_fourcc_ == 0) { |
2000 LOGF(ERROR) << "Can't find a usable output format from image processor"; | 2140 LOGF(ERROR) << "Can't find a usable output format from image processor"; |
2001 return false; | 2141 return false; |
2002 } | 2142 } |
2003 egl_image_device_ = image_processor_device_; | 2143 egl_image_device_ = image_processor_device_; |
2004 } else { | 2144 } else { |
| 2145 if (output_mode_ == Config::OutputMode::IMPORT) { |
| 2146 LOGF(ERROR) << "Import mode without image processor is not implemented " |
| 2147 << "yet."; |
| 2148 return false; |
| 2149 } |
2005 egl_image_format_fourcc_ = output_format_fourcc_; | 2150 egl_image_format_fourcc_ = output_format_fourcc_; |
2006 egl_image_device_ = device_; | 2151 egl_image_device_ = device_; |
2007 } | 2152 } |
2008 DVLOGF(2) << "Output format=" << output_format_fourcc_; | 2153 DVLOGF(2) << "Output format=" << output_format_fourcc_; |
2009 | 2154 |
2010 // Just set the fourcc for output; resolution, etc., will come from the | 2155 // Just set the fourcc for output; resolution, etc., will come from the |
2011 // driver once it extracts it from the stream. | 2156 // driver once it extracts it from the stream. |
2012 memset(&format, 0, sizeof(format)); | 2157 memset(&format, 0, sizeof(format)); |
2013 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | 2158 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
2014 format.fmt.pix_mp.pixelformat = output_format_fourcc_; | 2159 format.fmt.pix_mp.pixelformat = output_format_fourcc_; |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2071 bool V4L2VideoDecodeAccelerator::ResetImageProcessor() { | 2216 bool V4L2VideoDecodeAccelerator::ResetImageProcessor() { |
2072 DVLOGF(3); | 2217 DVLOGF(3); |
2073 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); | 2218 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); |
2074 | 2219 |
2075 if (!image_processor_->Reset()) | 2220 if (!image_processor_->Reset()) |
2076 return false; | 2221 return false; |
2077 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { | 2222 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { |
2078 OutputRecord& output_record = output_buffer_map_[i]; | 2223 OutputRecord& output_record = output_buffer_map_[i]; |
2079 if (output_record.state == kAtProcessor) { | 2224 if (output_record.state == kAtProcessor) { |
2080 output_record.state = kFree; | 2225 output_record.state = kFree; |
2081 free_output_buffers_.push(i); | 2226 free_output_buffers_.push_back(i); |
2082 } | 2227 } |
2083 } | 2228 } |
2084 while (!image_processor_bitstream_buffer_ids_.empty()) | 2229 while (!image_processor_bitstream_buffer_ids_.empty()) |
2085 image_processor_bitstream_buffer_ids_.pop(); | 2230 image_processor_bitstream_buffer_ids_.pop(); |
2086 | 2231 |
2087 return true; | 2232 return true; |
2088 } | 2233 } |
2089 | 2234 |
| 2235 bool V4L2VideoDecodeAccelerator::CreateImageProcessor() { |
| 2236 DVLOGF(3); |
| 2237 DCHECK(!image_processor_); |
| 2238 image_processor_.reset(new V4L2ImageProcessor(image_processor_device_)); |
| 2239 v4l2_memory output_memory_type = |
| 2240 (output_mode_ == Config::OutputMode::ALLOCATE ? V4L2_MEMORY_MMAP |
| 2241 : V4L2_MEMORY_DMABUF); |
| 2242 // Unretained is safe because |this| owns image processor and there will be |
| 2243 // no callbacks after processor destroys. |
| 2244 if (!image_processor_->Initialize( |
| 2245 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_), |
| 2246 V4L2Device::V4L2PixFmtToVideoPixelFormat(egl_image_format_fourcc_), |
| 2247 V4L2_MEMORY_DMABUF, output_memory_type, visible_size_, coded_size_, |
| 2248 visible_size_, egl_image_size_, output_buffer_map_.size(), |
| 2249 base::Bind(&V4L2VideoDecodeAccelerator::ImageProcessorError, |
| 2250 base::Unretained(this)))) { |
| 2251 LOGF(ERROR) << "Initialize image processor failed"; |
| 2252 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 2253 return false; |
| 2254 } |
| 2255 DVLOGF(3) << "image_processor_->output_allocated_size()=" |
| 2256 << image_processor_->output_allocated_size().ToString(); |
| 2257 DCHECK(image_processor_->output_allocated_size() == egl_image_size_); |
| 2258 if (image_processor_->input_allocated_size() != coded_size_) { |
| 2259 LOGF(ERROR) << "Image processor should be able to take the output coded " |
| 2260 << "size of decoder " << coded_size_.ToString() |
| 2261 << " without adjusting to " |
| 2262 << image_processor_->input_allocated_size().ToString(); |
| 2263 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 2264 return false; |
| 2265 } |
| 2266 return true; |
| 2267 } |
| 2268 |
| 2269 bool V4L2VideoDecodeAccelerator::ProcessFrame(int32_t bitstream_buffer_id, |
| 2270 int output_buffer_index) { |
| 2271 DVLOGF(3); |
| 2272 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); |
| 2273 |
| 2274 OutputRecord& output_record = output_buffer_map_[output_buffer_index]; |
| 2275 DCHECK_EQ(output_record.state, kAtDevice); |
| 2276 output_record.state = kAtProcessor; |
| 2277 image_processor_bitstream_buffer_ids_.push(bitstream_buffer_id); |
| 2278 std::vector<int> processor_input_fds; |
| 2279 for (auto& fd : output_record.processor_input_fds) { |
| 2280 processor_input_fds.push_back(fd.get()); |
| 2281 } |
| 2282 scoped_refptr<VideoFrame> input_frame = VideoFrame::WrapExternalDmabufs( |
| 2283 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_), |
| 2284 coded_size_, gfx::Rect(visible_size_), visible_size_, processor_input_fds, |
| 2285 base::TimeDelta()); |
| 2286 |
| 2287 std::vector<base::ScopedFD> processor_output_fds; |
| 2288 if (output_mode_ == Config::OutputMode::IMPORT) { |
| 2289 for (auto& fd : output_record.processor_output_fds) { |
| 2290 processor_output_fds.push_back( |
| 2291 base::ScopedFD(HANDLE_EINTR(dup(fd.get())))); |
| 2292 if (!processor_output_fds.back().is_valid()) { |
| 2293 PLOGF(ERROR) << "Failed duplicating a dmabuf fd"; |
| 2294 return false; |
| 2295 } |
| 2296 } |
| 2297 } |
| 2298 // Unretained is safe because |this| owns image processor and there will |
| 2299 // be no callbacks after processor destroys. |
| 2300 image_processor_->Process( |
| 2301 input_frame, output_buffer_index, std::move(processor_output_fds), |
| 2302 base::Bind(&V4L2VideoDecodeAccelerator::FrameProcessed, |
| 2303 base::Unretained(this), bitstream_buffer_id)); |
| 2304 return true; |
| 2305 } |
| 2306 |
2090 bool V4L2VideoDecodeAccelerator::CreateOutputBuffers() { | 2307 bool V4L2VideoDecodeAccelerator::CreateOutputBuffers() { |
2091 DVLOGF(3); | 2308 DVLOGF(3); |
2092 DCHECK(decoder_state_ == kInitialized || | 2309 DCHECK(decoder_state_ == kInitialized || |
2093 decoder_state_ == kChangingResolution); | 2310 decoder_state_ == kChangingResolution); |
2094 DCHECK(!output_streamon_); | 2311 DCHECK(!output_streamon_); |
2095 DCHECK(output_buffer_map_.empty()); | 2312 DCHECK(output_buffer_map_.empty()); |
2096 | 2313 |
2097 // Number of output buffers we need. | 2314 // Number of output buffers we need. |
2098 struct v4l2_control ctrl; | 2315 struct v4l2_control ctrl; |
2099 memset(&ctrl, 0, sizeof(ctrl)); | 2316 memset(&ctrl, 0, sizeof(ctrl)); |
2100 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE; | 2317 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE; |
2101 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_CTRL, &ctrl); | 2318 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_CTRL, &ctrl); |
2102 output_dpb_size_ = ctrl.value; | 2319 output_dpb_size_ = ctrl.value; |
2103 | 2320 |
2104 // Output format setup in Initialize(). | 2321 // Output format setup in Initialize(). |
2105 | 2322 |
2106 const uint32_t buffer_count = output_dpb_size_ + kDpbOutputBufferExtraCount; | 2323 const uint32_t buffer_count = output_dpb_size_ + kDpbOutputBufferExtraCount; |
2107 DVLOGF(3) << "buffer_count=" << buffer_count | 2324 DVLOGF(3) << "buffer_count=" << buffer_count |
2108 << ", coded_size=" << egl_image_size_.ToString(); | 2325 << ", coded_size=" << egl_image_size_.ToString(); |
2109 | 2326 |
| 2327 // With ALLOCATE mode the client can sample it as RGB and doesn't need to |
| 2328 // know the precise format. |
| 2329 VideoPixelFormat pixel_format = |
| 2330 (output_mode_ == Config::OutputMode::IMPORT) |
| 2331 ? V4L2Device::V4L2PixFmtToVideoPixelFormat(egl_image_format_fourcc_) |
| 2332 : PIXEL_FORMAT_UNKNOWN; |
| 2333 |
2110 child_task_runner_->PostTask( | 2334 child_task_runner_->PostTask( |
2111 FROM_HERE, base::Bind(&Client::ProvidePictureBuffers, client_, | 2335 FROM_HERE, base::Bind(&Client::ProvidePictureBuffers, client_, |
2112 buffer_count, PIXEL_FORMAT_UNKNOWN, 1, | 2336 buffer_count, pixel_format, 1, egl_image_size_, |
2113 egl_image_size_, device_->GetTextureTarget())); | 2337 device_->GetTextureTarget())); |
2114 | 2338 |
2115 // Go into kAwaitingPictureBuffers to prevent us from doing any more decoding | 2339 // Go into kAwaitingPictureBuffers to prevent us from doing any more decoding |
2116 // or event handling while we are waiting for AssignPictureBuffers(). Not | 2340 // or event handling while we are waiting for AssignPictureBuffers(). Not |
2117 // having Pictures available would not have prevented us from making decoding | 2341 // having Pictures available would not have prevented us from making decoding |
2118 // progress entirely e.g. in the case of H.264 where we could further decode | 2342 // progress entirely e.g. in the case of H.264 where we could further decode |
2119 // non-slice NALUs and could even get another resolution change before we were | 2343 // non-slice NALUs and could even get another resolution change before we were |
2120 // done with this one. After we get the buffers, we'll go back into kIdle and | 2344 // done with this one. After we get the buffers, we'll go back into kIdle and |
2121 // kick off further event processing, and eventually go back into kDecoding | 2345 // kick off further event processing, and eventually go back into kDecoding |
2122 // once no more events are pending (if any). | 2346 // once no more events are pending (if any). |
2123 decoder_state_ = kAwaitingPictureBuffers; | 2347 decoder_state_ = kAwaitingPictureBuffers; |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2177 output_record.picture_id)); | 2401 output_record.picture_id)); |
2178 } | 2402 } |
2179 | 2403 |
2180 struct v4l2_requestbuffers reqbufs; | 2404 struct v4l2_requestbuffers reqbufs; |
2181 memset(&reqbufs, 0, sizeof(reqbufs)); | 2405 memset(&reqbufs, 0, sizeof(reqbufs)); |
2182 reqbufs.count = 0; | 2406 reqbufs.count = 0; |
2183 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | 2407 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
2184 reqbufs.memory = V4L2_MEMORY_MMAP; | 2408 reqbufs.memory = V4L2_MEMORY_MMAP; |
2185 if (device_->Ioctl(VIDIOC_REQBUFS, &reqbufs) != 0) { | 2409 if (device_->Ioctl(VIDIOC_REQBUFS, &reqbufs) != 0) { |
2186 PLOGF(ERROR) << "ioctl() failed: VIDIOC_REQBUFS"; | 2410 PLOGF(ERROR) << "ioctl() failed: VIDIOC_REQBUFS"; |
| 2411 NOTIFY_ERROR(PLATFORM_FAILURE); |
2187 success = false; | 2412 success = false; |
2188 } | 2413 } |
2189 | 2414 |
2190 output_buffer_map_.clear(); | 2415 output_buffer_map_.clear(); |
2191 while (!free_output_buffers_.empty()) | 2416 while (!free_output_buffers_.empty()) |
2192 free_output_buffers_.pop(); | 2417 free_output_buffers_.pop_front(); |
2193 output_buffer_queued_count_ = 0; | 2418 output_buffer_queued_count_ = 0; |
2194 // The client may still hold some buffers. The texture holds a reference to | 2419 // The client may still hold some buffers. The texture holds a reference to |
2195 // the buffer. It is OK to free the buffer and destroy EGLImage here. | 2420 // the buffer. It is OK to free the buffer and destroy EGLImage here. |
2196 decoder_frames_at_client_ = 0; | 2421 decoder_frames_at_client_ = 0; |
2197 | 2422 |
2198 return success; | 2423 return success; |
2199 } | 2424 } |
2200 | 2425 |
2201 void V4L2VideoDecodeAccelerator::SendPictureReady() { | 2426 void V4L2VideoDecodeAccelerator::SendPictureReady() { |
2202 DVLOGF(3); | 2427 DVLOGF(3); |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2254 << ", bitstream_buffer_id=" << bitstream_buffer_id; | 2479 << ", bitstream_buffer_id=" << bitstream_buffer_id; |
2255 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); | 2480 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); |
2256 DCHECK(!image_processor_bitstream_buffer_ids_.empty()); | 2481 DCHECK(!image_processor_bitstream_buffer_ids_.empty()); |
2257 DCHECK(image_processor_bitstream_buffer_ids_.front() == bitstream_buffer_id); | 2482 DCHECK(image_processor_bitstream_buffer_ids_.front() == bitstream_buffer_id); |
2258 DCHECK_GE(output_buffer_index, 0); | 2483 DCHECK_GE(output_buffer_index, 0); |
2259 DCHECK_LT(output_buffer_index, static_cast<int>(output_buffer_map_.size())); | 2484 DCHECK_LT(output_buffer_index, static_cast<int>(output_buffer_map_.size())); |
2260 | 2485 |
2261 OutputRecord& output_record = output_buffer_map_[output_buffer_index]; | 2486 OutputRecord& output_record = output_buffer_map_[output_buffer_index]; |
2262 DVLOGF(3) << "picture_id=" << output_record.picture_id; | 2487 DVLOGF(3) << "picture_id=" << output_record.picture_id; |
2263 DCHECK_EQ(output_record.state, kAtProcessor); | 2488 DCHECK_EQ(output_record.state, kAtProcessor); |
2264 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR); | |
2265 DCHECK_NE(output_record.picture_id, -1); | 2489 DCHECK_NE(output_record.picture_id, -1); |
2266 | 2490 |
2267 // Send the processed frame to render. | 2491 // Send the processed frame to render. |
2268 output_record.state = kAtClient; | 2492 output_record.state = kAtClient; |
2269 decoder_frames_at_client_++; | 2493 decoder_frames_at_client_++; |
2270 image_processor_bitstream_buffer_ids_.pop(); | 2494 image_processor_bitstream_buffer_ids_.pop(); |
2271 const Picture picture(output_record.picture_id, bitstream_buffer_id, | 2495 const Picture picture(output_record.picture_id, bitstream_buffer_id, |
2272 gfx::Rect(visible_size_), false); | 2496 gfx::Rect(visible_size_), false); |
2273 pending_picture_ready_.push(PictureRecord(output_record.cleared, picture)); | 2497 pending_picture_ready_.push(PictureRecord(output_record.cleared, picture)); |
2274 SendPictureReady(); | 2498 SendPictureReady(); |
2275 output_record.cleared = true; | 2499 output_record.cleared = true; |
2276 // Flush or resolution change may be waiting image processor to finish. | 2500 // Flush or resolution change may be waiting image processor to finish. |
2277 if (image_processor_bitstream_buffer_ids_.empty()) { | 2501 if (image_processor_bitstream_buffer_ids_.empty()) { |
2278 NotifyFlushDoneIfNeeded(); | 2502 NotifyFlushDoneIfNeeded(); |
2279 if (decoder_state_ == kChangingResolution) | 2503 if (decoder_state_ == kChangingResolution) |
2280 StartResolutionChange(); | 2504 StartResolutionChange(); |
2281 } | 2505 } |
2282 } | 2506 } |
2283 | 2507 |
2284 void V4L2VideoDecodeAccelerator::ImageProcessorError() { | 2508 void V4L2VideoDecodeAccelerator::ImageProcessorError() { |
2285 LOGF(ERROR) << "Image processor error"; | 2509 LOGF(ERROR) << "Image processor error"; |
2286 NOTIFY_ERROR(PLATFORM_FAILURE); | 2510 NOTIFY_ERROR(PLATFORM_FAILURE); |
2287 } | 2511 } |
2288 | 2512 |
2289 } // namespace media | 2513 } // namespace media |
OLD | NEW |