Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(76)

Side by Side Diff: media/gpu/vt_video_decode_accelerator_mac.cc

Issue 1882373004: Migrate content/common/gpu/media code to media/gpu (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Fix prefix to content references in content_gpu.gypi Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/common/gpu/media/vt_video_decode_accelerator_mac.h" 5 #include "media/gpu/vt_video_decode_accelerator_mac.h"
6 6
7 #include <CoreVideo/CoreVideo.h> 7 #include <CoreVideo/CoreVideo.h>
8 #include <OpenGL/CGLIOSurface.h> 8 #include <OpenGL/CGLIOSurface.h>
9 #include <OpenGL/gl.h> 9 #include <OpenGL/gl.h>
10 #include <stddef.h> 10 #include <stddef.h>
11 11
12 #include <algorithm> 12 #include <algorithm>
13 13
14 #include "base/bind.h" 14 #include "base/bind.h"
15 #include "base/logging.h" 15 #include "base/logging.h"
16 #include "base/mac/mac_logging.h" 16 #include "base/mac/mac_logging.h"
17 #include "base/macros.h" 17 #include "base/macros.h"
18 #include "base/memory/ptr_util.h" 18 #include "base/memory/ptr_util.h"
19 #include "base/metrics/histogram_macros.h" 19 #include "base/metrics/histogram_macros.h"
20 #include "base/sys_byteorder.h" 20 #include "base/sys_byteorder.h"
21 #include "base/sys_info.h" 21 #include "base/sys_info.h"
22 #include "base/thread_task_runner_handle.h" 22 #include "base/thread_task_runner_handle.h"
23 #include "base/version.h" 23 #include "base/version.h"
24 #include "media/base/limits.h" 24 #include "media/base/limits.h"
25 #include "ui/gl/gl_context.h" 25 #include "ui/gl/gl_context.h"
26 #include "ui/gl/gl_image_io_surface.h" 26 #include "ui/gl/gl_image_io_surface.h"
27 #include "ui/gl/gl_implementation.h" 27 #include "ui/gl/gl_implementation.h"
28 #include "ui/gl/scoped_binders.h" 28 #include "ui/gl/scoped_binders.h"
29 29
30 using content_common_gpu_media::kModuleVt; 30 using media_gpu::kModuleVt;
31 using content_common_gpu_media::InitializeStubs; 31 using media_gpu::InitializeStubs;
32 using content_common_gpu_media::IsVtInitialized; 32 using media_gpu::IsVtInitialized;
33 using content_common_gpu_media::StubPathMap; 33 using media_gpu::StubPathMap;
34 34
35 #define NOTIFY_STATUS(name, status, session_failure) \ 35 #define NOTIFY_STATUS(name, status, session_failure) \
36 do { \ 36 do { \
37 OSSTATUS_DLOG(ERROR, status) << name; \ 37 OSSTATUS_DLOG(ERROR, status) << name; \
38 NotifyError(PLATFORM_FAILURE, session_failure); \ 38 NotifyError(PLATFORM_FAILURE, session_failure); \
39 } while (0) 39 } while (0)
40 40
41 namespace content { 41 namespace media {
42 42
43 // Only H.264 with 4:2:0 chroma sampling is supported. 43 // Only H.264 with 4:2:0 chroma sampling is supported.
44 static const media::VideoCodecProfile kSupportedProfiles[] = { 44 static const media::VideoCodecProfile kSupportedProfiles[] = {
45 media::H264PROFILE_BASELINE, 45 media::H264PROFILE_BASELINE, media::H264PROFILE_MAIN,
46 media::H264PROFILE_MAIN, 46 media::H264PROFILE_EXTENDED, media::H264PROFILE_HIGH,
47 media::H264PROFILE_EXTENDED, 47 // TODO(hubbe): Try to re-enable this again somehow. Currently it seems
48 media::H264PROFILE_HIGH, 48 // that some codecs fail to check the profile during initialization and
49 // TODO(hubbe): Try to re-enable this again somehow. Currently it seems 49 // then fail on the first frame decode, which currently results in a
50 // that some codecs fail to check the profile during initialization and 50 // pipeline failure.
51 // then fail on the first frame decode, which currently results in a 51 // media::H264PROFILE_HIGH10PROFILE,
52 // pipeline failure. 52 media::H264PROFILE_SCALABLEBASELINE, media::H264PROFILE_SCALABLEHIGH,
53 // media::H264PROFILE_HIGH10PROFILE, 53 media::H264PROFILE_STEREOHIGH, media::H264PROFILE_MULTIVIEWHIGH,
54 media::H264PROFILE_SCALABLEBASELINE,
55 media::H264PROFILE_SCALABLEHIGH,
56 media::H264PROFILE_STEREOHIGH,
57 media::H264PROFILE_MULTIVIEWHIGH,
58 }; 54 };
59 55
60 // Size to use for NALU length headers in AVC format (can be 1, 2, or 4). 56 // Size to use for NALU length headers in AVC format (can be 1, 2, or 4).
61 static const int kNALUHeaderLength = 4; 57 static const int kNALUHeaderLength = 4;
62 58
63 // We request 5 picture buffers from the client, each of which has a texture ID 59 // We request 5 picture buffers from the client, each of which has a texture ID
64 // that we can bind decoded frames to. We need enough to satisfy preroll, and 60 // that we can bind decoded frames to. We need enough to satisfy preroll, and
65 // enough to avoid unnecessary stalling, but no more than that. The resource 61 // enough to avoid unnecessary stalling, but no more than that. The resource
66 // requirements are low, as we don't need the textures to be backed by storage. 62 // requirements are low, as we don't need the textures to be backed by storage.
67 static const int kNumPictureBuffers = media::limits::kMaxVideoFrames + 1; 63 static const int kNumPictureBuffers = media::limits::kMaxVideoFrames + 1;
68 64
69 // Maximum number of frames to queue for reordering before we stop asking for 65 // Maximum number of frames to queue for reordering before we stop asking for
70 // more. (NotifyEndOfBitstreamBuffer() is called when frames are moved into the 66 // more. (NotifyEndOfBitstreamBuffer() is called when frames are moved into the
71 // reorder queue.) 67 // reorder queue.)
72 static const int kMaxReorderQueueSize = 16; 68 static const int kMaxReorderQueueSize = 16;
73 69
74 // Build an |image_config| dictionary for VideoToolbox initialization. 70 // Build an |image_config| dictionary for VideoToolbox initialization.
75 static base::ScopedCFTypeRef<CFMutableDictionaryRef> 71 static base::ScopedCFTypeRef<CFMutableDictionaryRef> BuildImageConfig(
76 BuildImageConfig(CMVideoDimensions coded_dimensions) { 72 CMVideoDimensions coded_dimensions) {
77 base::ScopedCFTypeRef<CFMutableDictionaryRef> image_config; 73 base::ScopedCFTypeRef<CFMutableDictionaryRef> image_config;
78 74
79 // Note that 4:2:0 textures cannot be used directly as RGBA in OpenGL, but are 75 // Note that 4:2:0 textures cannot be used directly as RGBA in OpenGL, but are
80 // lower power than 4:2:2 when composited directly by CoreAnimation. 76 // lower power than 4:2:2 when composited directly by CoreAnimation.
81 int32_t pixel_format = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; 77 int32_t pixel_format = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
82 #define CFINT(i) CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &i) 78 #define CFINT(i) CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &i)
83 base::ScopedCFTypeRef<CFNumberRef> cf_pixel_format(CFINT(pixel_format)); 79 base::ScopedCFTypeRef<CFNumberRef> cf_pixel_format(CFINT(pixel_format));
84 base::ScopedCFTypeRef<CFNumberRef> cf_width(CFINT(coded_dimensions.width)); 80 base::ScopedCFTypeRef<CFNumberRef> cf_width(CFINT(coded_dimensions.width));
85 base::ScopedCFTypeRef<CFNumberRef> cf_height(CFINT(coded_dimensions.height)); 81 base::ScopedCFTypeRef<CFNumberRef> cf_height(CFINT(coded_dimensions.height));
86 #undef CFINT 82 #undef CFINT
87 if (!cf_pixel_format.get() || !cf_width.get() || !cf_height.get()) 83 if (!cf_pixel_format.get() || !cf_width.get() || !cf_height.get())
88 return image_config; 84 return image_config;
89 85
90 image_config.reset( 86 image_config.reset(CFDictionaryCreateMutable(
91 CFDictionaryCreateMutable( 87 kCFAllocatorDefault,
92 kCFAllocatorDefault, 88 3, // capacity
93 3, // capacity 89 &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
94 &kCFTypeDictionaryKeyCallBacks,
95 &kCFTypeDictionaryValueCallBacks));
96 if (!image_config.get()) 90 if (!image_config.get())
97 return image_config; 91 return image_config;
98 92
99 CFDictionarySetValue(image_config, kCVPixelBufferPixelFormatTypeKey, 93 CFDictionarySetValue(image_config, kCVPixelBufferPixelFormatTypeKey,
100 cf_pixel_format); 94 cf_pixel_format);
101 CFDictionarySetValue(image_config, kCVPixelBufferWidthKey, cf_width); 95 CFDictionarySetValue(image_config, kCVPixelBufferWidthKey, cf_width);
102 CFDictionarySetValue(image_config, kCVPixelBufferHeightKey, cf_height); 96 CFDictionarySetValue(image_config, kCVPixelBufferHeightKey, cf_height);
103 97
104 return image_config; 98 return image_config;
105 } 99 }
106 100
107 // Create a VTDecompressionSession using the provided |pps| and |sps|. If 101 // Create a VTDecompressionSession using the provided |pps| and |sps|. If
108 // |require_hardware| is true, the session must uses real hardware decoding 102 // |require_hardware| is true, the session must uses real hardware decoding
109 // (as opposed to software decoding inside of VideoToolbox) to be considered 103 // (as opposed to software decoding inside of VideoToolbox) to be considered
110 // successful. 104 // successful.
111 // 105 //
112 // TODO(sandersd): Merge with ConfigureDecoder(), as the code is very similar. 106 // TODO(sandersd): Merge with ConfigureDecoder(), as the code is very similar.
113 static bool CreateVideoToolboxSession(const uint8_t* sps, size_t sps_size, 107 static bool CreateVideoToolboxSession(const uint8_t* sps,
114 const uint8_t* pps, size_t pps_size, 108 size_t sps_size,
109 const uint8_t* pps,
110 size_t pps_size,
115 bool require_hardware) { 111 bool require_hardware) {
116 const uint8_t* data_ptrs[] = {sps, pps}; 112 const uint8_t* data_ptrs[] = {sps, pps};
117 const size_t data_sizes[] = {sps_size, pps_size}; 113 const size_t data_sizes[] = {sps_size, pps_size};
118 114
119 base::ScopedCFTypeRef<CMFormatDescriptionRef> format; 115 base::ScopedCFTypeRef<CMFormatDescriptionRef> format;
120 OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets( 116 OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(
121 kCFAllocatorDefault, 117 kCFAllocatorDefault,
122 2, // parameter_set_count 118 2, // parameter_set_count
123 data_ptrs, // &parameter_set_pointers 119 data_ptrs, // &parameter_set_pointers
124 data_sizes, // &parameter_set_sizes 120 data_sizes, // &parameter_set_sizes
125 kNALUHeaderLength, // nal_unit_header_length 121 kNALUHeaderLength, // nal_unit_header_length
126 format.InitializeInto()); 122 format.InitializeInto());
127 if (status) { 123 if (status) {
128 OSSTATUS_DLOG(WARNING, status) 124 OSSTATUS_DLOG(WARNING, status)
129 << "Failed to create CMVideoFormatDescription."; 125 << "Failed to create CMVideoFormatDescription.";
130 return false; 126 return false;
131 } 127 }
132 128
133 base::ScopedCFTypeRef<CFMutableDictionaryRef> decoder_config( 129 base::ScopedCFTypeRef<CFMutableDictionaryRef> decoder_config(
134 CFDictionaryCreateMutable( 130 CFDictionaryCreateMutable(kCFAllocatorDefault,
135 kCFAllocatorDefault, 131 1, // capacity
136 1, // capacity 132 &kCFTypeDictionaryKeyCallBacks,
137 &kCFTypeDictionaryKeyCallBacks, 133 &kCFTypeDictionaryValueCallBacks));
138 &kCFTypeDictionaryValueCallBacks));
139 if (!decoder_config.get()) 134 if (!decoder_config.get())
140 return false; 135 return false;
141 136
142 if (require_hardware) { 137 if (require_hardware) {
143 CFDictionarySetValue( 138 CFDictionarySetValue(
144 decoder_config, 139 decoder_config,
145 // kVTVideoDecoderSpecification_RequireHardwareAcceleratedVideoDecoder 140 // kVTVideoDecoderSpecification_RequireHardwareAcceleratedVideoDecoder
146 CFSTR("RequireHardwareAcceleratedVideoDecoder"), 141 CFSTR("RequireHardwareAcceleratedVideoDecoder"), kCFBooleanTrue);
147 kCFBooleanTrue);
148 } 142 }
149 143
150 base::ScopedCFTypeRef<CFMutableDictionaryRef> image_config( 144 base::ScopedCFTypeRef<CFMutableDictionaryRef> image_config(
151 BuildImageConfig(CMVideoFormatDescriptionGetDimensions(format))); 145 BuildImageConfig(CMVideoFormatDescriptionGetDimensions(format)));
152 if (!image_config.get()) 146 if (!image_config.get())
153 return false; 147 return false;
154 148
155 VTDecompressionOutputCallbackRecord callback = {0}; 149 VTDecompressionOutputCallbackRecord callback = {0};
156 150
157 base::ScopedCFTypeRef<VTDecompressionSessionRef> session; 151 base::ScopedCFTypeRef<VTDecompressionSessionRef> session;
158 status = VTDecompressionSessionCreate( 152 status = VTDecompressionSessionCreate(
159 kCFAllocatorDefault, 153 kCFAllocatorDefault,
160 format, // video_format_description 154 format, // video_format_description
161 decoder_config, // video_decoder_specification 155 decoder_config, // video_decoder_specification
162 image_config, // destination_image_buffer_attributes 156 image_config, // destination_image_buffer_attributes
163 &callback, // output_callback 157 &callback, // output_callback
164 session.InitializeInto()); 158 session.InitializeInto());
165 if (status) { 159 if (status) {
166 OSSTATUS_DLOG(WARNING, status) << "Failed to create VTDecompressionSession"; 160 OSSTATUS_DLOG(WARNING, status) << "Failed to create VTDecompressionSession";
167 return false; 161 return false;
168 } 162 }
169 163
170 return true; 164 return true;
171 } 165 }
172 166
173 // The purpose of this function is to preload the generic and hardware-specific 167 // The purpose of this function is to preload the generic and hardware-specific
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
229 223
230 if (!attempted) { 224 if (!attempted) {
231 attempted = true; 225 attempted = true;
232 succeeded = InitializeVideoToolboxInternal(); 226 succeeded = InitializeVideoToolboxInternal();
233 } 227 }
234 228
235 return succeeded; 229 return succeeded;
236 } 230 }
237 231
238 // Route decoded frame callbacks back into the VTVideoDecodeAccelerator. 232 // Route decoded frame callbacks back into the VTVideoDecodeAccelerator.
239 static void OutputThunk( 233 static void OutputThunk(void* decompression_output_refcon,
240 void* decompression_output_refcon, 234 void* source_frame_refcon,
241 void* source_frame_refcon, 235 OSStatus status,
242 OSStatus status, 236 VTDecodeInfoFlags info_flags,
243 VTDecodeInfoFlags info_flags, 237 CVImageBufferRef image_buffer,
244 CVImageBufferRef image_buffer, 238 CMTime presentation_time_stamp,
245 CMTime presentation_time_stamp, 239 CMTime presentation_duration) {
246 CMTime presentation_duration) {
247 VTVideoDecodeAccelerator* vda = 240 VTVideoDecodeAccelerator* vda =
248 reinterpret_cast<VTVideoDecodeAccelerator*>(decompression_output_refcon); 241 reinterpret_cast<VTVideoDecodeAccelerator*>(decompression_output_refcon);
249 vda->Output(source_frame_refcon, status, image_buffer); 242 vda->Output(source_frame_refcon, status, image_buffer);
250 } 243 }
251 244
252 VTVideoDecodeAccelerator::Task::Task(TaskType type) : type(type) { 245 VTVideoDecodeAccelerator::Task::Task(TaskType type) : type(type) {}
253 }
254 246
255 VTVideoDecodeAccelerator::Task::Task(const Task& other) = default; 247 VTVideoDecodeAccelerator::Task::Task(const Task& other) = default;
256 248
257 VTVideoDecodeAccelerator::Task::~Task() { 249 VTVideoDecodeAccelerator::Task::~Task() {}
258 }
259 250
260 VTVideoDecodeAccelerator::Frame::Frame(int32_t bitstream_id) 251 VTVideoDecodeAccelerator::Frame::Frame(int32_t bitstream_id)
261 : bitstream_id(bitstream_id), 252 : bitstream_id(bitstream_id),
262 pic_order_cnt(0), 253 pic_order_cnt(0),
263 is_idr(false), 254 is_idr(false),
264 reorder_window(0) { 255 reorder_window(0) {}
265 }
266 256
267 VTVideoDecodeAccelerator::Frame::~Frame() { 257 VTVideoDecodeAccelerator::Frame::~Frame() {}
268 }
269 258
270 VTVideoDecodeAccelerator::PictureInfo::PictureInfo(uint32_t client_texture_id, 259 VTVideoDecodeAccelerator::PictureInfo::PictureInfo(uint32_t client_texture_id,
271 uint32_t service_texture_id) 260 uint32_t service_texture_id)
272 : client_texture_id(client_texture_id), 261 : client_texture_id(client_texture_id),
273 service_texture_id(service_texture_id) {} 262 service_texture_id(service_texture_id) {}
274 263
275 VTVideoDecodeAccelerator::PictureInfo::~PictureInfo() { 264 VTVideoDecodeAccelerator::PictureInfo::~PictureInfo() {
276 if (gl_image) 265 if (gl_image)
277 gl_image->Destroy(false); 266 gl_image->Destroy(false);
278 } 267 }
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
343 } 332 }
344 if (!profile_supported) 333 if (!profile_supported)
345 return false; 334 return false;
346 335
347 // Spawn a thread to handle parsing and calling VideoToolbox. 336 // Spawn a thread to handle parsing and calling VideoToolbox.
348 if (!decoder_thread_.Start()) 337 if (!decoder_thread_.Start())
349 return false; 338 return false;
350 339
351 // Count the session as successfully initialized. 340 // Count the session as successfully initialized.
352 UMA_HISTOGRAM_ENUMERATION("Media.VTVDA.SessionFailureReason", 341 UMA_HISTOGRAM_ENUMERATION("Media.VTVDA.SessionFailureReason",
353 SFT_SUCCESSFULLY_INITIALIZED, 342 SFT_SUCCESSFULLY_INITIALIZED, SFT_MAX + 1);
354 SFT_MAX + 1);
355 return true; 343 return true;
356 } 344 }
357 345
358 bool VTVideoDecodeAccelerator::FinishDelayedFrames() { 346 bool VTVideoDecodeAccelerator::FinishDelayedFrames() {
359 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 347 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
360 if (session_) { 348 if (session_) {
361 OSStatus status = VTDecompressionSessionWaitForAsynchronousFrames(session_); 349 OSStatus status = VTDecompressionSessionWaitForAsynchronousFrames(session_);
362 if (status) { 350 if (status) {
363 NOTIFY_STATUS("VTDecompressionSessionWaitForAsynchronousFrames()", 351 NOTIFY_STATUS("VTDecompressionSessionWaitForAsynchronousFrames()", status,
364 status, SFT_PLATFORM_ERROR); 352 SFT_PLATFORM_ERROR);
365 return false; 353 return false;
366 } 354 }
367 } 355 }
368 return true; 356 return true;
369 } 357 }
370 358
371 bool VTVideoDecodeAccelerator::ConfigureDecoder() { 359 bool VTVideoDecodeAccelerator::ConfigureDecoder() {
372 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 360 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
373 DCHECK(!last_sps_.empty()); 361 DCHECK(!last_sps_.empty());
374 DCHECK(!last_pps_.empty()); 362 DCHECK(!last_pps_.empty());
375 363
376 // Build the configuration records. 364 // Build the configuration records.
377 std::vector<const uint8_t*> nalu_data_ptrs; 365 std::vector<const uint8_t*> nalu_data_ptrs;
378 std::vector<size_t> nalu_data_sizes; 366 std::vector<size_t> nalu_data_sizes;
379 nalu_data_ptrs.reserve(3); 367 nalu_data_ptrs.reserve(3);
380 nalu_data_sizes.reserve(3); 368 nalu_data_sizes.reserve(3);
381 nalu_data_ptrs.push_back(&last_sps_.front()); 369 nalu_data_ptrs.push_back(&last_sps_.front());
382 nalu_data_sizes.push_back(last_sps_.size()); 370 nalu_data_sizes.push_back(last_sps_.size());
383 if (!last_spsext_.empty()) { 371 if (!last_spsext_.empty()) {
384 nalu_data_ptrs.push_back(&last_spsext_.front()); 372 nalu_data_ptrs.push_back(&last_spsext_.front());
385 nalu_data_sizes.push_back(last_spsext_.size()); 373 nalu_data_sizes.push_back(last_spsext_.size());
386 } 374 }
387 nalu_data_ptrs.push_back(&last_pps_.front()); 375 nalu_data_ptrs.push_back(&last_pps_.front());
388 nalu_data_sizes.push_back(last_pps_.size()); 376 nalu_data_sizes.push_back(last_pps_.size());
389 377
390 // Construct a new format description from the parameter sets. 378 // Construct a new format description from the parameter sets.
391 format_.reset(); 379 format_.reset();
392 OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets( 380 OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(
393 kCFAllocatorDefault, 381 kCFAllocatorDefault,
394 nalu_data_ptrs.size(), // parameter_set_count 382 nalu_data_ptrs.size(), // parameter_set_count
395 &nalu_data_ptrs.front(), // &parameter_set_pointers 383 &nalu_data_ptrs.front(), // &parameter_set_pointers
396 &nalu_data_sizes.front(), // &parameter_set_sizes 384 &nalu_data_sizes.front(), // &parameter_set_sizes
397 kNALUHeaderLength, // nal_unit_header_length 385 kNALUHeaderLength, // nal_unit_header_length
398 format_.InitializeInto()); 386 format_.InitializeInto());
399 if (status) { 387 if (status) {
400 NOTIFY_STATUS("CMVideoFormatDescriptionCreateFromH264ParameterSets()", 388 NOTIFY_STATUS("CMVideoFormatDescriptionCreateFromH264ParameterSets()",
401 status, SFT_PLATFORM_ERROR); 389 status, SFT_PLATFORM_ERROR);
402 return false; 390 return false;
403 } 391 }
404 392
405 // Store the new configuration data. 393 // Store the new configuration data.
406 // TODO(sandersd): Despite the documentation, this seems to return the visible 394 // TODO(sandersd): Despite the documentation, this seems to return the visible
407 // size. However, the output always appears to be top-left aligned, so it 395 // size. However, the output always appears to be top-left aligned, so it
408 // makes no difference. Re-verify this and update the variable name. 396 // makes no difference. Re-verify this and update the variable name.
409 CMVideoDimensions coded_dimensions = 397 CMVideoDimensions coded_dimensions =
410 CMVideoFormatDescriptionGetDimensions(format_); 398 CMVideoFormatDescriptionGetDimensions(format_);
411 coded_size_.SetSize(coded_dimensions.width, coded_dimensions.height); 399 coded_size_.SetSize(coded_dimensions.width, coded_dimensions.height);
412 400
413 // Prepare VideoToolbox configuration dictionaries. 401 // Prepare VideoToolbox configuration dictionaries.
414 base::ScopedCFTypeRef<CFMutableDictionaryRef> decoder_config( 402 base::ScopedCFTypeRef<CFMutableDictionaryRef> decoder_config(
415 CFDictionaryCreateMutable( 403 CFDictionaryCreateMutable(kCFAllocatorDefault,
416 kCFAllocatorDefault, 404 1, // capacity
417 1, // capacity 405 &kCFTypeDictionaryKeyCallBacks,
418 &kCFTypeDictionaryKeyCallBacks, 406 &kCFTypeDictionaryValueCallBacks));
419 &kCFTypeDictionaryValueCallBacks));
420 if (!decoder_config.get()) { 407 if (!decoder_config.get()) {
421 DLOG(ERROR) << "Failed to create CFMutableDictionary."; 408 DLOG(ERROR) << "Failed to create CFMutableDictionary.";
422 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR); 409 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR);
423 return false; 410 return false;
424 } 411 }
425 412
426 CFDictionarySetValue( 413 CFDictionarySetValue(
427 decoder_config, 414 decoder_config,
428 // kVTVideoDecoderSpecification_EnableHardwareAcceleratedVideoDecoder 415 // kVTVideoDecoderSpecification_EnableHardwareAcceleratedVideoDecoder
429 CFSTR("EnableHardwareAcceleratedVideoDecoder"), 416 CFSTR("EnableHardwareAcceleratedVideoDecoder"), kCFBooleanTrue);
430 kCFBooleanTrue);
431 417
432 base::ScopedCFTypeRef<CFMutableDictionaryRef> image_config( 418 base::ScopedCFTypeRef<CFMutableDictionaryRef> image_config(
433 BuildImageConfig(coded_dimensions)); 419 BuildImageConfig(coded_dimensions));
434 if (!image_config.get()) { 420 if (!image_config.get()) {
435 DLOG(ERROR) << "Failed to create decoder image configuration."; 421 DLOG(ERROR) << "Failed to create decoder image configuration.";
436 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR); 422 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR);
437 return false; 423 return false;
438 } 424 }
439 425
440 // Ensure that the old decoder emits all frames before the new decoder can 426 // Ensure that the old decoder emits all frames before the new decoder can
441 // emit any. 427 // emit any.
442 if (!FinishDelayedFrames()) 428 if (!FinishDelayedFrames())
443 return false; 429 return false;
444 430
445 session_.reset(); 431 session_.reset();
446 status = VTDecompressionSessionCreate( 432 status = VTDecompressionSessionCreate(
447 kCFAllocatorDefault, 433 kCFAllocatorDefault,
448 format_, // video_format_description 434 format_, // video_format_description
449 decoder_config, // video_decoder_specification 435 decoder_config, // video_decoder_specification
450 image_config, // destination_image_buffer_attributes 436 image_config, // destination_image_buffer_attributes
451 &callback_, // output_callback 437 &callback_, // output_callback
452 session_.InitializeInto()); 438 session_.InitializeInto());
453 if (status) { 439 if (status) {
454 NOTIFY_STATUS("VTDecompressionSessionCreate()", status, 440 NOTIFY_STATUS("VTDecompressionSessionCreate()", status,
455 SFT_UNSUPPORTED_STREAM_PARAMETERS); 441 SFT_UNSUPPORTED_STREAM_PARAMETERS);
456 return false; 442 return false;
457 } 443 }
458 444
459 // Report whether hardware decode is being used. 445 // Report whether hardware decode is being used.
460 bool using_hardware = false; 446 bool using_hardware = false;
461 base::ScopedCFTypeRef<CFBooleanRef> cf_using_hardware; 447 base::ScopedCFTypeRef<CFBooleanRef> cf_using_hardware;
462 if (VTSessionCopyProperty( 448 if (VTSessionCopyProperty(
463 session_, 449 session_,
464 // kVTDecompressionPropertyKey_UsingHardwareAcceleratedVideoDecoder 450 // kVTDecompressionPropertyKey_UsingHardwareAcceleratedVideoDecoder
465 CFSTR("UsingHardwareAcceleratedVideoDecoder"), 451 CFSTR("UsingHardwareAcceleratedVideoDecoder"), kCFAllocatorDefault,
466 kCFAllocatorDefault,
467 cf_using_hardware.InitializeInto()) == 0) { 452 cf_using_hardware.InitializeInto()) == 0) {
468 using_hardware = CFBooleanGetValue(cf_using_hardware); 453 using_hardware = CFBooleanGetValue(cf_using_hardware);
469 } 454 }
470 UMA_HISTOGRAM_BOOLEAN("Media.VTVDA.HardwareAccelerated", using_hardware); 455 UMA_HISTOGRAM_BOOLEAN("Media.VTVDA.HardwareAccelerated", using_hardware);
471 456
472 return true; 457 return true;
473 } 458 }
474 459
475 void VTVideoDecodeAccelerator::DecodeTask( 460 void VTVideoDecodeAccelerator::DecodeTask(
476 const media::BitstreamBuffer& bitstream, 461 const media::BitstreamBuffer& bitstream,
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
606 DLOG(ERROR) << "Unable to compute POC"; 591 DLOG(ERROR) << "Unable to compute POC";
607 NotifyError(UNREADABLE_INPUT, SFT_INVALID_STREAM); 592 NotifyError(UNREADABLE_INPUT, SFT_INVALID_STREAM);
608 return; 593 return;
609 } 594 }
610 595
611 if (nalu.nal_unit_type == media::H264NALU::kIDRSlice) 596 if (nalu.nal_unit_type == media::H264NALU::kIDRSlice)
612 frame->is_idr = true; 597 frame->is_idr = true;
613 598
614 if (sps->vui_parameters_present_flag && 599 if (sps->vui_parameters_present_flag &&
615 sps->bitstream_restriction_flag) { 600 sps->bitstream_restriction_flag) {
616 frame->reorder_window = std::min(sps->max_num_reorder_frames, 601 frame->reorder_window =
617 kMaxReorderQueueSize - 1); 602 std::min(sps->max_num_reorder_frames, kMaxReorderQueueSize - 1);
618 } 603 }
619 } 604 }
620 has_slice = true; 605 has_slice = true;
621 default: 606 default:
622 nalus.push_back(nalu); 607 nalus.push_back(nalu);
623 data_size += kNALUHeaderLength + nalu.size; 608 data_size += kNALUHeaderLength + nalu.size;
624 break; 609 break;
625 } 610 }
626 } 611 }
627 612
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
666 } 651 }
667 has_slice = false; 652 has_slice = false;
668 } 653 }
669 654
670 // If there is nothing to decode, drop the bitstream buffer by returning an 655 // If there is nothing to decode, drop the bitstream buffer by returning an
671 // empty frame. 656 // empty frame.
672 if (!has_slice) { 657 if (!has_slice) {
673 // Keep everything in order by flushing first. 658 // Keep everything in order by flushing first.
674 if (!FinishDelayedFrames()) 659 if (!FinishDelayedFrames())
675 return; 660 return;
676 gpu_task_runner_->PostTask(FROM_HERE, base::Bind( 661 gpu_task_runner_->PostTask(
677 &VTVideoDecodeAccelerator::DecodeDone, weak_this_, frame)); 662 FROM_HERE,
663 base::Bind(&VTVideoDecodeAccelerator::DecodeDone, weak_this_, frame));
678 return; 664 return;
679 } 665 }
680 666
681 // If the session is not configured by this point, fail. 667 // If the session is not configured by this point, fail.
682 if (!session_) { 668 if (!session_) {
683 DLOG(ERROR) << "Cannot decode without configuration"; 669 DLOG(ERROR) << "Cannot decode without configuration";
684 NotifyError(INVALID_ARGUMENT, SFT_INVALID_STREAM); 670 NotifyError(INVALID_ARGUMENT, SFT_INVALID_STREAM);
685 return; 671 return;
686 } 672 }
687 673
(...skipping 27 matching lines...) Expand all
715 NOTIFY_STATUS("CMBlockBufferAssureBlockMemory()", status, 701 NOTIFY_STATUS("CMBlockBufferAssureBlockMemory()", status,
716 SFT_PLATFORM_ERROR); 702 SFT_PLATFORM_ERROR);
717 return; 703 return;
718 } 704 }
719 705
720 // Copy NALU data into the CMBlockBuffer, inserting length headers. 706 // Copy NALU data into the CMBlockBuffer, inserting length headers.
721 size_t offset = 0; 707 size_t offset = 0;
722 for (size_t i = 0; i < nalus.size(); i++) { 708 for (size_t i = 0; i < nalus.size(); i++) {
723 media::H264NALU& nalu = nalus[i]; 709 media::H264NALU& nalu = nalus[i];
724 uint32_t header = base::HostToNet32(static_cast<uint32_t>(nalu.size)); 710 uint32_t header = base::HostToNet32(static_cast<uint32_t>(nalu.size));
725 status = CMBlockBufferReplaceDataBytes( 711 status =
726 &header, data, offset, kNALUHeaderLength); 712 CMBlockBufferReplaceDataBytes(&header, data, offset, kNALUHeaderLength);
727 if (status) { 713 if (status) {
728 NOTIFY_STATUS("CMBlockBufferReplaceDataBytes()", status, 714 NOTIFY_STATUS("CMBlockBufferReplaceDataBytes()", status,
729 SFT_PLATFORM_ERROR); 715 SFT_PLATFORM_ERROR);
730 return; 716 return;
731 } 717 }
732 offset += kNALUHeaderLength; 718 offset += kNALUHeaderLength;
733 status = CMBlockBufferReplaceDataBytes(nalu.data, data, offset, nalu.size); 719 status = CMBlockBufferReplaceDataBytes(nalu.data, data, offset, nalu.size);
734 if (status) { 720 if (status) {
735 NOTIFY_STATUS("CMBlockBufferReplaceDataBytes()", status, 721 NOTIFY_STATUS("CMBlockBufferReplaceDataBytes()", status,
736 SFT_PLATFORM_ERROR); 722 SFT_PLATFORM_ERROR);
737 return; 723 return;
738 } 724 }
739 offset += nalu.size; 725 offset += nalu.size;
740 } 726 }
741 727
742 // Package the data in a CMSampleBuffer. 728 // Package the data in a CMSampleBuffer.
743 base::ScopedCFTypeRef<CMSampleBufferRef> sample; 729 base::ScopedCFTypeRef<CMSampleBufferRef> sample;
744 status = CMSampleBufferCreate( 730 status = CMSampleBufferCreate(kCFAllocatorDefault,
745 kCFAllocatorDefault, 731 data, // data_buffer
746 data, // data_buffer 732 true, // data_ready
747 true, // data_ready 733 nullptr, // make_data_ready_callback
748 nullptr, // make_data_ready_callback 734 nullptr, // make_data_ready_refcon
749 nullptr, // make_data_ready_refcon 735 format_, // format_description
750 format_, // format_description 736 1, // num_samples
751 1, // num_samples 737 0, // num_sample_timing_entries
752 0, // num_sample_timing_entries 738 nullptr, // &sample_timing_array
753 nullptr, // &sample_timing_array 739 1, // num_sample_size_entries
754 1, // num_sample_size_entries 740 &data_size, // &sample_size_array
755 &data_size, // &sample_size_array 741 sample.InitializeInto());
756 sample.InitializeInto());
757 if (status) { 742 if (status) {
758 NOTIFY_STATUS("CMSampleBufferCreate()", status, SFT_PLATFORM_ERROR); 743 NOTIFY_STATUS("CMSampleBufferCreate()", status, SFT_PLATFORM_ERROR);
759 return; 744 return;
760 } 745 }
761 746
762 // Send the frame for decoding. 747 // Send the frame for decoding.
763 // Asynchronous Decompression allows for parallel submission of frames 748 // Asynchronous Decompression allows for parallel submission of frames
764 // (without it, DecodeFrame() does not return until the frame has been 749 // (without it, DecodeFrame() does not return until the frame has been
765 // decoded). We don't enable Temporal Processing so that frames are always 750 // decoded). We don't enable Temporal Processing so that frames are always
766 // returned in decode order; this makes it easier to avoid deadlock. 751 // returned in decode order; this makes it easier to avoid deadlock.
767 VTDecodeFrameFlags decode_flags = 752 VTDecodeFrameFlags decode_flags =
768 kVTDecodeFrame_EnableAsynchronousDecompression; 753 kVTDecodeFrame_EnableAsynchronousDecompression;
769 status = VTDecompressionSessionDecodeFrame( 754 status = VTDecompressionSessionDecodeFrame(
770 session_, 755 session_,
771 sample, // sample_buffer 756 sample, // sample_buffer
772 decode_flags, // decode_flags 757 decode_flags, // decode_flags
773 reinterpret_cast<void*>(frame), // source_frame_refcon 758 reinterpret_cast<void*>(frame), // source_frame_refcon
774 nullptr); // &info_flags_out 759 nullptr); // &info_flags_out
775 if (status) { 760 if (status) {
776 NOTIFY_STATUS("VTDecompressionSessionDecodeFrame()", status, 761 NOTIFY_STATUS("VTDecompressionSessionDecodeFrame()", status,
777 SFT_DECODE_ERROR); 762 SFT_DECODE_ERROR);
778 return; 763 return;
779 } 764 }
780 } 765 }
781 766
782 // This method may be called on any VideoToolbox thread. 767 // This method may be called on any VideoToolbox thread.
783 void VTVideoDecodeAccelerator::Output( 768 void VTVideoDecodeAccelerator::Output(void* source_frame_refcon,
784 void* source_frame_refcon, 769 OSStatus status,
785 OSStatus status, 770 CVImageBufferRef image_buffer) {
786 CVImageBufferRef image_buffer) {
787 if (status) { 771 if (status) {
788 NOTIFY_STATUS("Decoding", status, SFT_DECODE_ERROR); 772 NOTIFY_STATUS("Decoding", status, SFT_DECODE_ERROR);
789 return; 773 return;
790 } 774 }
791 775
792 // The type of |image_buffer| is CVImageBuffer, but we only handle 776 // The type of |image_buffer| is CVImageBuffer, but we only handle
793 // CVPixelBuffers. This should be guaranteed as we set 777 // CVPixelBuffers. This should be guaranteed as we set
794 // kCVPixelBufferOpenGLCompatibilityKey in |image_config|. 778 // kCVPixelBufferOpenGLCompatibilityKey in |image_config|.
795 // 779 //
796 // Sometimes, for unknown reasons (http://crbug.com/453050), |image_buffer| is 780 // Sometimes, for unknown reasons (http://crbug.com/453050), |image_buffer| is
797 // NULL, which causes CFGetTypeID() to crash. While the rest of the code would 781 // NULL, which causes CFGetTypeID() to crash. While the rest of the code would
798 // smoothly handle NULL as a dropped frame, we choose to fail permanantly here 782 // smoothly handle NULL as a dropped frame, we choose to fail permanantly here
799 // until the issue is better understood. 783 // until the issue is better understood.
800 if (!image_buffer || CFGetTypeID(image_buffer) != CVPixelBufferGetTypeID()) { 784 if (!image_buffer || CFGetTypeID(image_buffer) != CVPixelBufferGetTypeID()) {
801 DLOG(ERROR) << "Decoded frame is not a CVPixelBuffer"; 785 DLOG(ERROR) << "Decoded frame is not a CVPixelBuffer";
802 NotifyError(PLATFORM_FAILURE, SFT_DECODE_ERROR); 786 NotifyError(PLATFORM_FAILURE, SFT_DECODE_ERROR);
803 return; 787 return;
804 } 788 }
805 789
806 Frame* frame = reinterpret_cast<Frame*>(source_frame_refcon); 790 Frame* frame = reinterpret_cast<Frame*>(source_frame_refcon);
807 frame->image.reset(image_buffer, base::scoped_policy::RETAIN); 791 frame->image.reset(image_buffer, base::scoped_policy::RETAIN);
808 gpu_task_runner_->PostTask(FROM_HERE, base::Bind( 792 gpu_task_runner_->PostTask(
809 &VTVideoDecodeAccelerator::DecodeDone, weak_this_, frame)); 793 FROM_HERE,
794 base::Bind(&VTVideoDecodeAccelerator::DecodeDone, weak_this_, frame));
810 } 795 }
811 796
812 void VTVideoDecodeAccelerator::DecodeDone(Frame* frame) { 797 void VTVideoDecodeAccelerator::DecodeDone(Frame* frame) {
813 DCHECK(gpu_thread_checker_.CalledOnValidThread()); 798 DCHECK(gpu_thread_checker_.CalledOnValidThread());
814 DCHECK_EQ(1u, pending_frames_.count(frame->bitstream_id)); 799 DCHECK_EQ(1u, pending_frames_.count(frame->bitstream_id));
815 Task task(TASK_FRAME); 800 Task task(TASK_FRAME);
816 task.frame = pending_frames_[frame->bitstream_id]; 801 task.frame = pending_frames_[frame->bitstream_id];
817 pending_frames_.erase(frame->bitstream_id); 802 pending_frames_.erase(frame->bitstream_id);
818 task_queue_.push(task); 803 task_queue_.push(task);
819 ProcessWorkQueues(); 804 ProcessWorkQueues();
820 } 805 }
821 806
822 void VTVideoDecodeAccelerator::FlushTask(TaskType type) { 807 void VTVideoDecodeAccelerator::FlushTask(TaskType type) {
823 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 808 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
824 FinishDelayedFrames(); 809 FinishDelayedFrames();
825 810
826 // Always queue a task, even if FinishDelayedFrames() fails, so that 811 // Always queue a task, even if FinishDelayedFrames() fails, so that
827 // destruction always completes. 812 // destruction always completes.
828 gpu_task_runner_->PostTask(FROM_HERE, base::Bind( 813 gpu_task_runner_->PostTask(
829 &VTVideoDecodeAccelerator::FlushDone, weak_this_, type)); 814 FROM_HERE,
815 base::Bind(&VTVideoDecodeAccelerator::FlushDone, weak_this_, type));
830 } 816 }
831 817
832 void VTVideoDecodeAccelerator::FlushDone(TaskType type) { 818 void VTVideoDecodeAccelerator::FlushDone(TaskType type) {
833 DCHECK(gpu_thread_checker_.CalledOnValidThread()); 819 DCHECK(gpu_thread_checker_.CalledOnValidThread());
834 task_queue_.push(Task(type)); 820 task_queue_.push(Task(type));
835 ProcessWorkQueues(); 821 ProcessWorkQueues();
836 } 822 }
837 823
838 void VTVideoDecodeAccelerator::Decode(const media::BitstreamBuffer& bitstream) { 824 void VTVideoDecodeAccelerator::Decode(const media::BitstreamBuffer& bitstream) {
839 DCHECK(gpu_thread_checker_.CalledOnValidThread()); 825 DCHECK(gpu_thread_checker_.CalledOnValidThread());
(...skipping 25 matching lines...) Expand all
865 DCHECK_LE(1u, picture.texture_ids().size()); 851 DCHECK_LE(1u, picture.texture_ids().size());
866 picture_info_map_.insert(std::make_pair( 852 picture_info_map_.insert(std::make_pair(
867 picture.id(), 853 picture.id(),
868 base::WrapUnique(new PictureInfo(picture.internal_texture_ids()[0], 854 base::WrapUnique(new PictureInfo(picture.internal_texture_ids()[0],
869 picture.texture_ids()[0])))); 855 picture.texture_ids()[0]))));
870 } 856 }
871 857
872 // Pictures are not marked as uncleared until after this method returns, and 858 // Pictures are not marked as uncleared until after this method returns, and
873 // they will be broken if they are used before that happens. So, schedule 859 // they will be broken if they are used before that happens. So, schedule
874 // future work after that happens. 860 // future work after that happens.
875 gpu_task_runner_->PostTask(FROM_HERE, base::Bind( 861 gpu_task_runner_->PostTask(
876 &VTVideoDecodeAccelerator::ProcessWorkQueues, weak_this_)); 862 FROM_HERE,
863 base::Bind(&VTVideoDecodeAccelerator::ProcessWorkQueues, weak_this_));
877 } 864 }
878 865
879 void VTVideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_id) { 866 void VTVideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_id) {
880 DCHECK(gpu_thread_checker_.CalledOnValidThread()); 867 DCHECK(gpu_thread_checker_.CalledOnValidThread());
881 DCHECK(picture_info_map_.count(picture_id)); 868 DCHECK(picture_info_map_.count(picture_id));
882 PictureInfo* picture_info = picture_info_map_.find(picture_id)->second.get(); 869 PictureInfo* picture_info = picture_info_map_.find(picture_id)->second.get();
883 picture_info->cv_image.reset(); 870 picture_info->cv_image.reset();
884 picture_info->gl_image->Destroy(false); 871 picture_info->gl_image->Destroy(false);
885 picture_info->gl_image = nullptr; 872 picture_info->gl_image = nullptr;
886 873
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
971 bool VTVideoDecodeAccelerator::ProcessReorderQueue() { 958 bool VTVideoDecodeAccelerator::ProcessReorderQueue() {
972 DCHECK(gpu_thread_checker_.CalledOnValidThread()); 959 DCHECK(gpu_thread_checker_.CalledOnValidThread());
973 DCHECK_EQ(state_, STATE_DECODING); 960 DCHECK_EQ(state_, STATE_DECODING);
974 961
975 if (reorder_queue_.empty()) 962 if (reorder_queue_.empty())
976 return false; 963 return false;
977 964
978 // If the next task is a flush (because there is a pending flush or becuase 965 // If the next task is a flush (because there is a pending flush or becuase
979 // the next frame is an IDR), then we don't need a full reorder buffer to send 966 // the next frame is an IDR), then we don't need a full reorder buffer to send
980 // the next frame. 967 // the next frame.
981 bool flushing = !task_queue_.empty() && 968 bool flushing =
982 (task_queue_.front().type != TASK_FRAME || 969 !task_queue_.empty() && (task_queue_.front().type != TASK_FRAME ||
983 task_queue_.front().frame->is_idr); 970 task_queue_.front().frame->is_idr);
984 971
985 size_t reorder_window = std::max(0, reorder_queue_.top()->reorder_window); 972 size_t reorder_window = std::max(0, reorder_queue_.top()->reorder_window);
986 if (flushing || reorder_queue_.size() > reorder_window) { 973 if (flushing || reorder_queue_.size() > reorder_window) {
987 if (ProcessFrame(*reorder_queue_.top())) { 974 if (ProcessFrame(*reorder_queue_.top())) {
988 reorder_queue_.pop(); 975 reorder_queue_.pop();
989 return true; 976 return true;
990 } 977 }
991 } 978 }
992 979
993 return false; 980 return false;
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
1046 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR); 1033 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR);
1047 return false; 1034 return false;
1048 } 1035 }
1049 1036
1050 scoped_refptr<gl::GLImageIOSurface> gl_image( 1037 scoped_refptr<gl::GLImageIOSurface> gl_image(
1051 new gl::GLImageIOSurface(frame.coded_size, GL_BGRA_EXT)); 1038 new gl::GLImageIOSurface(frame.coded_size, GL_BGRA_EXT));
1052 if (!gl_image->InitializeWithCVPixelBuffer( 1039 if (!gl_image->InitializeWithCVPixelBuffer(
1053 frame.image.get(), gfx::GenericSharedMemoryId(), 1040 frame.image.get(), gfx::GenericSharedMemoryId(),
1054 gfx::BufferFormat::YUV_420_BIPLANAR)) { 1041 gfx::BufferFormat::YUV_420_BIPLANAR)) {
1055 NOTIFY_STATUS("Failed to initialize GLImageIOSurface", PLATFORM_FAILURE, 1042 NOTIFY_STATUS("Failed to initialize GLImageIOSurface", PLATFORM_FAILURE,
1056 SFT_PLATFORM_ERROR); 1043 SFT_PLATFORM_ERROR);
1057 } 1044 }
1058 1045
1059 if (!bind_image_cb_.Run(picture_info->client_texture_id, 1046 if (!bind_image_cb_.Run(picture_info->client_texture_id,
1060 GL_TEXTURE_RECTANGLE_ARB, gl_image, false)) { 1047 GL_TEXTURE_RECTANGLE_ARB, gl_image, false)) {
1061 DLOG(ERROR) << "Failed to bind image"; 1048 DLOG(ERROR) << "Failed to bind image";
1062 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR); 1049 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR);
1063 return false; 1050 return false;
1064 } 1051 }
1065 1052
1066 // Assign the new image(s) to the the picture info. 1053 // Assign the new image(s) to the the picture info.
1067 picture_info->gl_image = gl_image; 1054 picture_info->gl_image = gl_image;
1068 picture_info->cv_image = frame.image; 1055 picture_info->cv_image = frame.image;
1069 available_picture_ids_.pop_back(); 1056 available_picture_ids_.pop_back();
1070 1057
1071 // TODO(sandersd): Currently, the size got from 1058 // TODO(sandersd): Currently, the size got from
1072 // CMVideoFormatDescriptionGetDimensions is visible size. We pass it to 1059 // CMVideoFormatDescriptionGetDimensions is visible size. We pass it to
1073 // GpuVideoDecoder so that GpuVideoDecoder can use correct visible size in 1060 // GpuVideoDecoder so that GpuVideoDecoder can use correct visible size in
1074 // resolution changed. We should find the correct API to get the real 1061 // resolution changed. We should find the correct API to get the real
1075 // coded size and fix it. 1062 // coded size and fix it.
1076 client_->PictureReady(media::Picture(picture_id, frame.bitstream_id, 1063 client_->PictureReady(media::Picture(picture_id, frame.bitstream_id,
1077 gfx::Rect(frame.coded_size), 1064 gfx::Rect(frame.coded_size), true));
1078 true));
1079 return true; 1065 return true;
1080 } 1066 }
1081 1067
1082 void VTVideoDecodeAccelerator::NotifyError( 1068 void VTVideoDecodeAccelerator::NotifyError(
1083 Error vda_error_type, 1069 Error vda_error_type,
1084 VTVDASessionFailureType session_failure_type) { 1070 VTVDASessionFailureType session_failure_type) {
1085 DCHECK_LT(session_failure_type, SFT_MAX + 1); 1071 DCHECK_LT(session_failure_type, SFT_MAX + 1);
1086 if (!gpu_thread_checker_.CalledOnValidThread()) { 1072 if (!gpu_thread_checker_.CalledOnValidThread()) {
1087 gpu_task_runner_->PostTask(FROM_HERE, base::Bind( 1073 gpu_task_runner_->PostTask(
1088 &VTVideoDecodeAccelerator::NotifyError, weak_this_, vda_error_type, 1074 FROM_HERE,
1089 session_failure_type)); 1075 base::Bind(&VTVideoDecodeAccelerator::NotifyError, weak_this_,
1076 vda_error_type, session_failure_type));
1090 } else if (state_ == STATE_DECODING) { 1077 } else if (state_ == STATE_DECODING) {
1091 state_ = STATE_ERROR; 1078 state_ = STATE_ERROR;
1092 UMA_HISTOGRAM_ENUMERATION("Media.VTVDA.SessionFailureReason", 1079 UMA_HISTOGRAM_ENUMERATION("Media.VTVDA.SessionFailureReason",
1093 session_failure_type, 1080 session_failure_type, SFT_MAX + 1);
1094 SFT_MAX + 1);
1095 client_->NotifyError(vda_error_type); 1081 client_->NotifyError(vda_error_type);
1096 } 1082 }
1097 } 1083 }
1098 1084
1099 void VTVideoDecodeAccelerator::QueueFlush(TaskType type) { 1085 void VTVideoDecodeAccelerator::QueueFlush(TaskType type) {
1100 DCHECK(gpu_thread_checker_.CalledOnValidThread()); 1086 DCHECK(gpu_thread_checker_.CalledOnValidThread());
1101 pending_flush_tasks_.push(type); 1087 pending_flush_tasks_.push(type);
1102 decoder_thread_.task_runner()->PostTask( 1088 decoder_thread_.task_runner()->PostTask(
1103 FROM_HERE, base::Bind(&VTVideoDecodeAccelerator::FlushTask, 1089 FROM_HERE, base::Bind(&VTVideoDecodeAccelerator::FlushTask,
1104 base::Unretained(this), type)); 1090 base::Unretained(this), type));
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
1151 for (const auto& supported_profile : kSupportedProfiles) { 1137 for (const auto& supported_profile : kSupportedProfiles) {
1152 SupportedProfile profile; 1138 SupportedProfile profile;
1153 profile.profile = supported_profile; 1139 profile.profile = supported_profile;
1154 profile.min_resolution.SetSize(16, 16); 1140 profile.min_resolution.SetSize(16, 16);
1155 profile.max_resolution.SetSize(4096, 2160); 1141 profile.max_resolution.SetSize(4096, 2160);
1156 profiles.push_back(profile); 1142 profiles.push_back(profile);
1157 } 1143 }
1158 return profiles; 1144 return profiles;
1159 } 1145 }
1160 1146
1161 } // namespace content 1147 } // namespace media
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698