Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(397)

Side by Side Diff: media/gpu/vt_video_decode_accelerator_mac.cc

Issue 1939683002: Test X11 header pollution (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/common/gpu/media/vt_video_decode_accelerator_mac.h" 5 #include "media/gpu/vt_video_decode_accelerator_mac.h"
6 6
7 #include <CoreVideo/CoreVideo.h> 7 #include <CoreVideo/CoreVideo.h>
8 #include <OpenGL/CGLIOSurface.h> 8 #include <OpenGL/CGLIOSurface.h>
9 #include <OpenGL/gl.h> 9 #include <OpenGL/gl.h>
10 #include <stddef.h> 10 #include <stddef.h>
11 11
12 #include <algorithm> 12 #include <algorithm>
13 #include <memory> 13 #include <memory>
14 14
15 #include "base/bind.h" 15 #include "base/bind.h"
16 #include "base/logging.h" 16 #include "base/logging.h"
17 #include "base/mac/mac_logging.h" 17 #include "base/mac/mac_logging.h"
18 #include "base/macros.h" 18 #include "base/macros.h"
19 #include "base/memory/ptr_util.h" 19 #include "base/memory/ptr_util.h"
20 #include "base/metrics/histogram_macros.h" 20 #include "base/metrics/histogram_macros.h"
21 #include "base/sys_byteorder.h" 21 #include "base/sys_byteorder.h"
22 #include "base/sys_info.h" 22 #include "base/sys_info.h"
23 #include "base/thread_task_runner_handle.h" 23 #include "base/thread_task_runner_handle.h"
24 #include "base/version.h" 24 #include "base/version.h"
25 #include "media/base/limits.h" 25 #include "media/base/limits.h"
26 #include "ui/gl/gl_context.h" 26 #include "ui/gl/gl_context.h"
27 #include "ui/gl/gl_image_io_surface.h" 27 #include "ui/gl/gl_image_io_surface.h"
28 #include "ui/gl/gl_implementation.h" 28 #include "ui/gl/gl_implementation.h"
29 #include "ui/gl/scoped_binders.h" 29 #include "ui/gl/scoped_binders.h"
30 30
31 using content_common_gpu_media::kModuleVt; 31 using media_gpu::kModuleVt;
32 using content_common_gpu_media::InitializeStubs; 32 using media_gpu::InitializeStubs;
33 using content_common_gpu_media::IsVtInitialized; 33 using media_gpu::IsVtInitialized;
34 using content_common_gpu_media::StubPathMap; 34 using media_gpu::StubPathMap;
35 35
36 #define NOTIFY_STATUS(name, status, session_failure) \ 36 #define NOTIFY_STATUS(name, status, session_failure) \
37 do { \ 37 do { \
38 OSSTATUS_DLOG(ERROR, status) << name; \ 38 OSSTATUS_DLOG(ERROR, status) << name; \
39 NotifyError(PLATFORM_FAILURE, session_failure); \ 39 NotifyError(PLATFORM_FAILURE, session_failure); \
40 } while (0) 40 } while (0)
41 41
42 namespace content { 42 namespace media {
43 43
44 // Only H.264 with 4:2:0 chroma sampling is supported. 44 // Only H.264 with 4:2:0 chroma sampling is supported.
45 static const media::VideoCodecProfile kSupportedProfiles[] = { 45 static const media::VideoCodecProfile kSupportedProfiles[] = {
46 media::H264PROFILE_BASELINE, 46 media::H264PROFILE_BASELINE, media::H264PROFILE_MAIN,
47 media::H264PROFILE_MAIN, 47 media::H264PROFILE_EXTENDED, media::H264PROFILE_HIGH,
48 media::H264PROFILE_EXTENDED, 48 // TODO(hubbe): Try to re-enable this again somehow. Currently it seems
49 media::H264PROFILE_HIGH, 49 // that some codecs fail to check the profile during initialization and
50 // TODO(hubbe): Try to re-enable this again somehow. Currently it seems 50 // then fail on the first frame decode, which currently results in a
51 // that some codecs fail to check the profile during initialization and 51 // pipeline failure.
52 // then fail on the first frame decode, which currently results in a 52 // media::H264PROFILE_HIGH10PROFILE,
53 // pipeline failure. 53 media::H264PROFILE_SCALABLEBASELINE, media::H264PROFILE_SCALABLEHIGH,
54 // media::H264PROFILE_HIGH10PROFILE, 54 media::H264PROFILE_STEREOHIGH, media::H264PROFILE_MULTIVIEWHIGH,
55 media::H264PROFILE_SCALABLEBASELINE,
56 media::H264PROFILE_SCALABLEHIGH,
57 media::H264PROFILE_STEREOHIGH,
58 media::H264PROFILE_MULTIVIEWHIGH,
59 }; 55 };
60 56
61 // Size to use for NALU length headers in AVC format (can be 1, 2, or 4). 57 // Size to use for NALU length headers in AVC format (can be 1, 2, or 4).
62 static const int kNALUHeaderLength = 4; 58 static const int kNALUHeaderLength = 4;
63 59
64 // We request 5 picture buffers from the client, each of which has a texture ID 60 // We request 5 picture buffers from the client, each of which has a texture ID
65 // that we can bind decoded frames to. We need enough to satisfy preroll, and 61 // that we can bind decoded frames to. We need enough to satisfy preroll, and
66 // enough to avoid unnecessary stalling, but no more than that. The resource 62 // enough to avoid unnecessary stalling, but no more than that. The resource
67 // requirements are low, as we don't need the textures to be backed by storage. 63 // requirements are low, as we don't need the textures to be backed by storage.
68 static const int kNumPictureBuffers = media::limits::kMaxVideoFrames + 1; 64 static const int kNumPictureBuffers = media::limits::kMaxVideoFrames + 1;
69 65
70 // Maximum number of frames to queue for reordering before we stop asking for 66 // Maximum number of frames to queue for reordering before we stop asking for
71 // more. (NotifyEndOfBitstreamBuffer() is called when frames are moved into the 67 // more. (NotifyEndOfBitstreamBuffer() is called when frames are moved into the
72 // reorder queue.) 68 // reorder queue.)
73 static const int kMaxReorderQueueSize = 16; 69 static const int kMaxReorderQueueSize = 16;
74 70
75 // Build an |image_config| dictionary for VideoToolbox initialization. 71 // Build an |image_config| dictionary for VideoToolbox initialization.
76 static base::ScopedCFTypeRef<CFMutableDictionaryRef> 72 static base::ScopedCFTypeRef<CFMutableDictionaryRef> BuildImageConfig(
77 BuildImageConfig(CMVideoDimensions coded_dimensions) { 73 CMVideoDimensions coded_dimensions) {
78 base::ScopedCFTypeRef<CFMutableDictionaryRef> image_config; 74 base::ScopedCFTypeRef<CFMutableDictionaryRef> image_config;
79 75
80 // Note that 4:2:0 textures cannot be used directly as RGBA in OpenGL, but are 76 // Note that 4:2:0 textures cannot be used directly as RGBA in OpenGL, but are
81 // lower power than 4:2:2 when composited directly by CoreAnimation. 77 // lower power than 4:2:2 when composited directly by CoreAnimation.
82 int32_t pixel_format = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; 78 int32_t pixel_format = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
83 #define CFINT(i) CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &i) 79 #define CFINT(i) CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &i)
84 base::ScopedCFTypeRef<CFNumberRef> cf_pixel_format(CFINT(pixel_format)); 80 base::ScopedCFTypeRef<CFNumberRef> cf_pixel_format(CFINT(pixel_format));
85 base::ScopedCFTypeRef<CFNumberRef> cf_width(CFINT(coded_dimensions.width)); 81 base::ScopedCFTypeRef<CFNumberRef> cf_width(CFINT(coded_dimensions.width));
86 base::ScopedCFTypeRef<CFNumberRef> cf_height(CFINT(coded_dimensions.height)); 82 base::ScopedCFTypeRef<CFNumberRef> cf_height(CFINT(coded_dimensions.height));
87 #undef CFINT 83 #undef CFINT
88 if (!cf_pixel_format.get() || !cf_width.get() || !cf_height.get()) 84 if (!cf_pixel_format.get() || !cf_width.get() || !cf_height.get())
89 return image_config; 85 return image_config;
90 86
91 image_config.reset( 87 image_config.reset(CFDictionaryCreateMutable(
92 CFDictionaryCreateMutable( 88 kCFAllocatorDefault,
93 kCFAllocatorDefault, 89 3, // capacity
94 3, // capacity 90 &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
95 &kCFTypeDictionaryKeyCallBacks,
96 &kCFTypeDictionaryValueCallBacks));
97 if (!image_config.get()) 91 if (!image_config.get())
98 return image_config; 92 return image_config;
99 93
100 CFDictionarySetValue(image_config, kCVPixelBufferPixelFormatTypeKey, 94 CFDictionarySetValue(image_config, kCVPixelBufferPixelFormatTypeKey,
101 cf_pixel_format); 95 cf_pixel_format);
102 CFDictionarySetValue(image_config, kCVPixelBufferWidthKey, cf_width); 96 CFDictionarySetValue(image_config, kCVPixelBufferWidthKey, cf_width);
103 CFDictionarySetValue(image_config, kCVPixelBufferHeightKey, cf_height); 97 CFDictionarySetValue(image_config, kCVPixelBufferHeightKey, cf_height);
104 98
105 return image_config; 99 return image_config;
106 } 100 }
107 101
108 // Create a VTDecompressionSession using the provided |pps| and |sps|. If 102 // Create a VTDecompressionSession using the provided |pps| and |sps|. If
109 // |require_hardware| is true, the session must uses real hardware decoding 103 // |require_hardware| is true, the session must uses real hardware decoding
110 // (as opposed to software decoding inside of VideoToolbox) to be considered 104 // (as opposed to software decoding inside of VideoToolbox) to be considered
111 // successful. 105 // successful.
112 // 106 //
113 // TODO(sandersd): Merge with ConfigureDecoder(), as the code is very similar. 107 // TODO(sandersd): Merge with ConfigureDecoder(), as the code is very similar.
114 static bool CreateVideoToolboxSession(const uint8_t* sps, size_t sps_size, 108 static bool CreateVideoToolboxSession(const uint8_t* sps,
115 const uint8_t* pps, size_t pps_size, 109 size_t sps_size,
110 const uint8_t* pps,
111 size_t pps_size,
116 bool require_hardware) { 112 bool require_hardware) {
117 const uint8_t* data_ptrs[] = {sps, pps}; 113 const uint8_t* data_ptrs[] = {sps, pps};
118 const size_t data_sizes[] = {sps_size, pps_size}; 114 const size_t data_sizes[] = {sps_size, pps_size};
119 115
120 base::ScopedCFTypeRef<CMFormatDescriptionRef> format; 116 base::ScopedCFTypeRef<CMFormatDescriptionRef> format;
121 OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets( 117 OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(
122 kCFAllocatorDefault, 118 kCFAllocatorDefault,
123 2, // parameter_set_count 119 2, // parameter_set_count
124 data_ptrs, // &parameter_set_pointers 120 data_ptrs, // &parameter_set_pointers
125 data_sizes, // &parameter_set_sizes 121 data_sizes, // &parameter_set_sizes
126 kNALUHeaderLength, // nal_unit_header_length 122 kNALUHeaderLength, // nal_unit_header_length
127 format.InitializeInto()); 123 format.InitializeInto());
128 if (status) { 124 if (status) {
129 OSSTATUS_DLOG(WARNING, status) 125 OSSTATUS_DLOG(WARNING, status)
130 << "Failed to create CMVideoFormatDescription"; 126 << "Failed to create CMVideoFormatDescription";
131 return false; 127 return false;
132 } 128 }
133 129
134 base::ScopedCFTypeRef<CFMutableDictionaryRef> decoder_config( 130 base::ScopedCFTypeRef<CFMutableDictionaryRef> decoder_config(
135 CFDictionaryCreateMutable( 131 CFDictionaryCreateMutable(kCFAllocatorDefault,
136 kCFAllocatorDefault, 132 1, // capacity
137 1, // capacity 133 &kCFTypeDictionaryKeyCallBacks,
138 &kCFTypeDictionaryKeyCallBacks, 134 &kCFTypeDictionaryValueCallBacks));
139 &kCFTypeDictionaryValueCallBacks));
140 if (!decoder_config.get()) 135 if (!decoder_config.get())
141 return false; 136 return false;
142 137
143 if (require_hardware) { 138 if (require_hardware) {
144 CFDictionarySetValue( 139 CFDictionarySetValue(
145 decoder_config, 140 decoder_config,
146 // kVTVideoDecoderSpecification_RequireHardwareAcceleratedVideoDecoder 141 // kVTVideoDecoderSpecification_RequireHardwareAcceleratedVideoDecoder
147 CFSTR("RequireHardwareAcceleratedVideoDecoder"), 142 CFSTR("RequireHardwareAcceleratedVideoDecoder"), kCFBooleanTrue);
148 kCFBooleanTrue);
149 } 143 }
150 144
151 base::ScopedCFTypeRef<CFMutableDictionaryRef> image_config( 145 base::ScopedCFTypeRef<CFMutableDictionaryRef> image_config(
152 BuildImageConfig(CMVideoFormatDescriptionGetDimensions(format))); 146 BuildImageConfig(CMVideoFormatDescriptionGetDimensions(format)));
153 if (!image_config.get()) 147 if (!image_config.get())
154 return false; 148 return false;
155 149
156 VTDecompressionOutputCallbackRecord callback = {0}; 150 VTDecompressionOutputCallbackRecord callback = {0};
157 151
158 base::ScopedCFTypeRef<VTDecompressionSessionRef> session; 152 base::ScopedCFTypeRef<VTDecompressionSessionRef> session;
159 status = VTDecompressionSessionCreate( 153 status = VTDecompressionSessionCreate(
160 kCFAllocatorDefault, 154 kCFAllocatorDefault,
161 format, // video_format_description 155 format, // video_format_description
162 decoder_config, // video_decoder_specification 156 decoder_config, // video_decoder_specification
163 image_config, // destination_image_buffer_attributes 157 image_config, // destination_image_buffer_attributes
164 &callback, // output_callback 158 &callback, // output_callback
165 session.InitializeInto()); 159 session.InitializeInto());
166 if (status) { 160 if (status) {
167 OSSTATUS_DLOG(WARNING, status) 161 OSSTATUS_DLOG(WARNING, status)
168 << "Failed to create VTDecompressionSession"; 162 << "Failed to create VTDecompressionSession";
169 return false; 163 return false;
170 } 164 }
171 165
172 return true; 166 return true;
173 } 167 }
174 168
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
228 222
229 if (!attempted) { 223 if (!attempted) {
230 attempted = true; 224 attempted = true;
231 succeeded = InitializeVideoToolboxInternal(); 225 succeeded = InitializeVideoToolboxInternal();
232 } 226 }
233 227
234 return succeeded; 228 return succeeded;
235 } 229 }
236 230
237 // Route decoded frame callbacks back into the VTVideoDecodeAccelerator. 231 // Route decoded frame callbacks back into the VTVideoDecodeAccelerator.
238 static void OutputThunk( 232 static void OutputThunk(void* decompression_output_refcon,
239 void* decompression_output_refcon, 233 void* source_frame_refcon,
240 void* source_frame_refcon, 234 OSStatus status,
241 OSStatus status, 235 VTDecodeInfoFlags info_flags,
242 VTDecodeInfoFlags info_flags, 236 CVImageBufferRef image_buffer,
243 CVImageBufferRef image_buffer, 237 CMTime presentation_time_stamp,
244 CMTime presentation_time_stamp, 238 CMTime presentation_duration) {
245 CMTime presentation_duration) {
246 VTVideoDecodeAccelerator* vda = 239 VTVideoDecodeAccelerator* vda =
247 reinterpret_cast<VTVideoDecodeAccelerator*>(decompression_output_refcon); 240 reinterpret_cast<VTVideoDecodeAccelerator*>(decompression_output_refcon);
248 vda->Output(source_frame_refcon, status, image_buffer); 241 vda->Output(source_frame_refcon, status, image_buffer);
249 } 242 }
250 243
251 VTVideoDecodeAccelerator::Task::Task(TaskType type) : type(type) { 244 VTVideoDecodeAccelerator::Task::Task(TaskType type) : type(type) {}
252 }
253 245
254 VTVideoDecodeAccelerator::Task::Task(const Task& other) = default; 246 VTVideoDecodeAccelerator::Task::Task(const Task& other) = default;
255 247
256 VTVideoDecodeAccelerator::Task::~Task() { 248 VTVideoDecodeAccelerator::Task::~Task() {}
257 }
258 249
259 VTVideoDecodeAccelerator::Frame::Frame(int32_t bitstream_id) 250 VTVideoDecodeAccelerator::Frame::Frame(int32_t bitstream_id)
260 : bitstream_id(bitstream_id), 251 : bitstream_id(bitstream_id),
261 pic_order_cnt(0), 252 pic_order_cnt(0),
262 is_idr(false), 253 is_idr(false),
263 reorder_window(0) { 254 reorder_window(0) {}
264 }
265 255
266 VTVideoDecodeAccelerator::Frame::~Frame() { 256 VTVideoDecodeAccelerator::Frame::~Frame() {}
267 }
268 257
269 VTVideoDecodeAccelerator::PictureInfo::PictureInfo(uint32_t client_texture_id, 258 VTVideoDecodeAccelerator::PictureInfo::PictureInfo(uint32_t client_texture_id,
270 uint32_t service_texture_id) 259 uint32_t service_texture_id)
271 : client_texture_id(client_texture_id), 260 : client_texture_id(client_texture_id),
272 service_texture_id(service_texture_id) {} 261 service_texture_id(service_texture_id) {}
273 262
274 VTVideoDecodeAccelerator::PictureInfo::~PictureInfo() { 263 VTVideoDecodeAccelerator::PictureInfo::~PictureInfo() {
275 if (gl_image) 264 if (gl_image)
276 gl_image->Destroy(false); 265 gl_image->Destroy(false);
277 } 266 }
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
347 } 336 }
348 if (!profile_supported) 337 if (!profile_supported)
349 return false; 338 return false;
350 339
351 // Spawn a thread to handle parsing and calling VideoToolbox. 340 // Spawn a thread to handle parsing and calling VideoToolbox.
352 if (!decoder_thread_.Start()) 341 if (!decoder_thread_.Start())
353 return false; 342 return false;
354 343
355 // Count the session as successfully initialized. 344 // Count the session as successfully initialized.
356 UMA_HISTOGRAM_ENUMERATION("Media.VTVDA.SessionFailureReason", 345 UMA_HISTOGRAM_ENUMERATION("Media.VTVDA.SessionFailureReason",
357 SFT_SUCCESSFULLY_INITIALIZED, 346 SFT_SUCCESSFULLY_INITIALIZED, SFT_MAX + 1);
358 SFT_MAX + 1);
359 return true; 347 return true;
360 } 348 }
361 349
362 bool VTVideoDecodeAccelerator::FinishDelayedFrames() { 350 bool VTVideoDecodeAccelerator::FinishDelayedFrames() {
363 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 351 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
364 if (session_) { 352 if (session_) {
365 OSStatus status = VTDecompressionSessionWaitForAsynchronousFrames(session_); 353 OSStatus status = VTDecompressionSessionWaitForAsynchronousFrames(session_);
366 if (status) { 354 if (status) {
367 NOTIFY_STATUS("VTDecompressionSessionWaitForAsynchronousFrames()", 355 NOTIFY_STATUS("VTDecompressionSessionWaitForAsynchronousFrames()", status,
368 status, SFT_PLATFORM_ERROR); 356 SFT_PLATFORM_ERROR);
369 return false; 357 return false;
370 } 358 }
371 } 359 }
372 return true; 360 return true;
373 } 361 }
374 362
375 bool VTVideoDecodeAccelerator::ConfigureDecoder() { 363 bool VTVideoDecodeAccelerator::ConfigureDecoder() {
376 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 364 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
377 DCHECK(!last_sps_.empty()); 365 DCHECK(!last_sps_.empty());
378 DCHECK(!last_pps_.empty()); 366 DCHECK(!last_pps_.empty());
379 367
380 // Build the configuration records. 368 // Build the configuration records.
381 std::vector<const uint8_t*> nalu_data_ptrs; 369 std::vector<const uint8_t*> nalu_data_ptrs;
382 std::vector<size_t> nalu_data_sizes; 370 std::vector<size_t> nalu_data_sizes;
383 nalu_data_ptrs.reserve(3); 371 nalu_data_ptrs.reserve(3);
384 nalu_data_sizes.reserve(3); 372 nalu_data_sizes.reserve(3);
385 nalu_data_ptrs.push_back(&last_sps_.front()); 373 nalu_data_ptrs.push_back(&last_sps_.front());
386 nalu_data_sizes.push_back(last_sps_.size()); 374 nalu_data_sizes.push_back(last_sps_.size());
387 if (!last_spsext_.empty()) { 375 if (!last_spsext_.empty()) {
388 nalu_data_ptrs.push_back(&last_spsext_.front()); 376 nalu_data_ptrs.push_back(&last_spsext_.front());
389 nalu_data_sizes.push_back(last_spsext_.size()); 377 nalu_data_sizes.push_back(last_spsext_.size());
390 } 378 }
391 nalu_data_ptrs.push_back(&last_pps_.front()); 379 nalu_data_ptrs.push_back(&last_pps_.front());
392 nalu_data_sizes.push_back(last_pps_.size()); 380 nalu_data_sizes.push_back(last_pps_.size());
393 381
394 // Construct a new format description from the parameter sets. 382 // Construct a new format description from the parameter sets.
395 format_.reset(); 383 format_.reset();
396 OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets( 384 OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(
397 kCFAllocatorDefault, 385 kCFAllocatorDefault,
398 nalu_data_ptrs.size(), // parameter_set_count 386 nalu_data_ptrs.size(), // parameter_set_count
399 &nalu_data_ptrs.front(), // &parameter_set_pointers 387 &nalu_data_ptrs.front(), // &parameter_set_pointers
400 &nalu_data_sizes.front(), // &parameter_set_sizes 388 &nalu_data_sizes.front(), // &parameter_set_sizes
401 kNALUHeaderLength, // nal_unit_header_length 389 kNALUHeaderLength, // nal_unit_header_length
402 format_.InitializeInto()); 390 format_.InitializeInto());
403 if (status) { 391 if (status) {
404 NOTIFY_STATUS("CMVideoFormatDescriptionCreateFromH264ParameterSets()", 392 NOTIFY_STATUS("CMVideoFormatDescriptionCreateFromH264ParameterSets()",
405 status, SFT_PLATFORM_ERROR); 393 status, SFT_PLATFORM_ERROR);
406 return false; 394 return false;
407 } 395 }
408 396
409 // Store the new configuration data. 397 // Store the new configuration data.
410 // TODO(sandersd): Despite the documentation, this seems to return the visible 398 // TODO(sandersd): Despite the documentation, this seems to return the visible
411 // size. However, the output always appears to be top-left aligned, so it 399 // size. However, the output always appears to be top-left aligned, so it
412 // makes no difference. Re-verify this and update the variable name. 400 // makes no difference. Re-verify this and update the variable name.
413 CMVideoDimensions coded_dimensions = 401 CMVideoDimensions coded_dimensions =
414 CMVideoFormatDescriptionGetDimensions(format_); 402 CMVideoFormatDescriptionGetDimensions(format_);
415 coded_size_.SetSize(coded_dimensions.width, coded_dimensions.height); 403 coded_size_.SetSize(coded_dimensions.width, coded_dimensions.height);
416 404
417 // Prepare VideoToolbox configuration dictionaries. 405 // Prepare VideoToolbox configuration dictionaries.
418 base::ScopedCFTypeRef<CFMutableDictionaryRef> decoder_config( 406 base::ScopedCFTypeRef<CFMutableDictionaryRef> decoder_config(
419 CFDictionaryCreateMutable( 407 CFDictionaryCreateMutable(kCFAllocatorDefault,
420 kCFAllocatorDefault, 408 1, // capacity
421 1, // capacity 409 &kCFTypeDictionaryKeyCallBacks,
422 &kCFTypeDictionaryKeyCallBacks, 410 &kCFTypeDictionaryValueCallBacks));
423 &kCFTypeDictionaryValueCallBacks));
424 if (!decoder_config.get()) { 411 if (!decoder_config.get()) {
425 DLOG(ERROR) << "Failed to create CFMutableDictionary"; 412 DLOG(ERROR) << "Failed to create CFMutableDictionary";
426 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR); 413 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR);
427 return false; 414 return false;
428 } 415 }
429 416
430 CFDictionarySetValue( 417 CFDictionarySetValue(
431 decoder_config, 418 decoder_config,
432 // kVTVideoDecoderSpecification_EnableHardwareAcceleratedVideoDecoder 419 // kVTVideoDecoderSpecification_EnableHardwareAcceleratedVideoDecoder
433 CFSTR("EnableHardwareAcceleratedVideoDecoder"), 420 CFSTR("EnableHardwareAcceleratedVideoDecoder"), kCFBooleanTrue);
434 kCFBooleanTrue);
435 421
436 base::ScopedCFTypeRef<CFMutableDictionaryRef> image_config( 422 base::ScopedCFTypeRef<CFMutableDictionaryRef> image_config(
437 BuildImageConfig(coded_dimensions)); 423 BuildImageConfig(coded_dimensions));
438 if (!image_config.get()) { 424 if (!image_config.get()) {
439 DLOG(ERROR) << "Failed to create decoder image configuration"; 425 DLOG(ERROR) << "Failed to create decoder image configuration";
440 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR); 426 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR);
441 return false; 427 return false;
442 } 428 }
443 429
444 // Ensure that the old decoder emits all frames before the new decoder can 430 // Ensure that the old decoder emits all frames before the new decoder can
445 // emit any. 431 // emit any.
446 if (!FinishDelayedFrames()) 432 if (!FinishDelayedFrames())
447 return false; 433 return false;
448 434
449 session_.reset(); 435 session_.reset();
450 status = VTDecompressionSessionCreate( 436 status = VTDecompressionSessionCreate(
451 kCFAllocatorDefault, 437 kCFAllocatorDefault,
452 format_, // video_format_description 438 format_, // video_format_description
453 decoder_config, // video_decoder_specification 439 decoder_config, // video_decoder_specification
454 image_config, // destination_image_buffer_attributes 440 image_config, // destination_image_buffer_attributes
455 &callback_, // output_callback 441 &callback_, // output_callback
456 session_.InitializeInto()); 442 session_.InitializeInto());
457 if (status) { 443 if (status) {
458 NOTIFY_STATUS("VTDecompressionSessionCreate()", status, 444 NOTIFY_STATUS("VTDecompressionSessionCreate()", status,
459 SFT_UNSUPPORTED_STREAM_PARAMETERS); 445 SFT_UNSUPPORTED_STREAM_PARAMETERS);
460 return false; 446 return false;
461 } 447 }
462 448
463 // Report whether hardware decode is being used. 449 // Report whether hardware decode is being used.
464 bool using_hardware = false; 450 bool using_hardware = false;
465 base::ScopedCFTypeRef<CFBooleanRef> cf_using_hardware; 451 base::ScopedCFTypeRef<CFBooleanRef> cf_using_hardware;
466 if (VTSessionCopyProperty( 452 if (VTSessionCopyProperty(
467 session_, 453 session_,
468 // kVTDecompressionPropertyKey_UsingHardwareAcceleratedVideoDecoder 454 // kVTDecompressionPropertyKey_UsingHardwareAcceleratedVideoDecoder
469 CFSTR("UsingHardwareAcceleratedVideoDecoder"), 455 CFSTR("UsingHardwareAcceleratedVideoDecoder"), kCFAllocatorDefault,
470 kCFAllocatorDefault,
471 cf_using_hardware.InitializeInto()) == 0) { 456 cf_using_hardware.InitializeInto()) == 0) {
472 using_hardware = CFBooleanGetValue(cf_using_hardware); 457 using_hardware = CFBooleanGetValue(cf_using_hardware);
473 } 458 }
474 UMA_HISTOGRAM_BOOLEAN("Media.VTVDA.HardwareAccelerated", using_hardware); 459 UMA_HISTOGRAM_BOOLEAN("Media.VTVDA.HardwareAccelerated", using_hardware);
475 460
476 return true; 461 return true;
477 } 462 }
478 463
479 void VTVideoDecodeAccelerator::DecodeTask( 464 void VTVideoDecodeAccelerator::DecodeTask(
480 const media::BitstreamBuffer& bitstream, 465 const media::BitstreamBuffer& bitstream,
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
610 DLOG(ERROR) << "Unable to compute POC"; 595 DLOG(ERROR) << "Unable to compute POC";
611 NotifyError(UNREADABLE_INPUT, SFT_INVALID_STREAM); 596 NotifyError(UNREADABLE_INPUT, SFT_INVALID_STREAM);
612 return; 597 return;
613 } 598 }
614 599
615 if (nalu.nal_unit_type == media::H264NALU::kIDRSlice) 600 if (nalu.nal_unit_type == media::H264NALU::kIDRSlice)
616 frame->is_idr = true; 601 frame->is_idr = true;
617 602
618 if (sps->vui_parameters_present_flag && 603 if (sps->vui_parameters_present_flag &&
619 sps->bitstream_restriction_flag) { 604 sps->bitstream_restriction_flag) {
620 frame->reorder_window = std::min(sps->max_num_reorder_frames, 605 frame->reorder_window =
621 kMaxReorderQueueSize - 1); 606 std::min(sps->max_num_reorder_frames, kMaxReorderQueueSize - 1);
622 } 607 }
623 } 608 }
624 has_slice = true; 609 has_slice = true;
625 default: 610 default:
626 nalus.push_back(nalu); 611 nalus.push_back(nalu);
627 data_size += kNALUHeaderLength + nalu.size; 612 data_size += kNALUHeaderLength + nalu.size;
628 break; 613 break;
629 } 614 }
630 } 615 }
631 616
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
670 } 655 }
671 has_slice = false; 656 has_slice = false;
672 } 657 }
673 658
674 // If there is nothing to decode, drop the bitstream buffer by returning an 659 // If there is nothing to decode, drop the bitstream buffer by returning an
675 // empty frame. 660 // empty frame.
676 if (!has_slice) { 661 if (!has_slice) {
677 // Keep everything in order by flushing first. 662 // Keep everything in order by flushing first.
678 if (!FinishDelayedFrames()) 663 if (!FinishDelayedFrames())
679 return; 664 return;
680 gpu_task_runner_->PostTask(FROM_HERE, base::Bind( 665 gpu_task_runner_->PostTask(
681 &VTVideoDecodeAccelerator::DecodeDone, weak_this_, frame)); 666 FROM_HERE,
667 base::Bind(&VTVideoDecodeAccelerator::DecodeDone, weak_this_, frame));
682 return; 668 return;
683 } 669 }
684 670
685 // If the session is not configured by this point, fail. 671 // If the session is not configured by this point, fail.
686 if (!session_) { 672 if (!session_) {
687 DLOG(ERROR) << "Cannot decode without configuration"; 673 DLOG(ERROR) << "Cannot decode without configuration";
688 NotifyError(INVALID_ARGUMENT, SFT_INVALID_STREAM); 674 NotifyError(INVALID_ARGUMENT, SFT_INVALID_STREAM);
689 return; 675 return;
690 } 676 }
691 677
(...skipping 27 matching lines...) Expand all
719 NOTIFY_STATUS("CMBlockBufferAssureBlockMemory()", status, 705 NOTIFY_STATUS("CMBlockBufferAssureBlockMemory()", status,
720 SFT_PLATFORM_ERROR); 706 SFT_PLATFORM_ERROR);
721 return; 707 return;
722 } 708 }
723 709
724 // Copy NALU data into the CMBlockBuffer, inserting length headers. 710 // Copy NALU data into the CMBlockBuffer, inserting length headers.
725 size_t offset = 0; 711 size_t offset = 0;
726 for (size_t i = 0; i < nalus.size(); i++) { 712 for (size_t i = 0; i < nalus.size(); i++) {
727 media::H264NALU& nalu = nalus[i]; 713 media::H264NALU& nalu = nalus[i];
728 uint32_t header = base::HostToNet32(static_cast<uint32_t>(nalu.size)); 714 uint32_t header = base::HostToNet32(static_cast<uint32_t>(nalu.size));
729 status = CMBlockBufferReplaceDataBytes( 715 status =
730 &header, data, offset, kNALUHeaderLength); 716 CMBlockBufferReplaceDataBytes(&header, data, offset, kNALUHeaderLength);
731 if (status) { 717 if (status) {
732 NOTIFY_STATUS("CMBlockBufferReplaceDataBytes()", status, 718 NOTIFY_STATUS("CMBlockBufferReplaceDataBytes()", status,
733 SFT_PLATFORM_ERROR); 719 SFT_PLATFORM_ERROR);
734 return; 720 return;
735 } 721 }
736 offset += kNALUHeaderLength; 722 offset += kNALUHeaderLength;
737 status = CMBlockBufferReplaceDataBytes(nalu.data, data, offset, nalu.size); 723 status = CMBlockBufferReplaceDataBytes(nalu.data, data, offset, nalu.size);
738 if (status) { 724 if (status) {
739 NOTIFY_STATUS("CMBlockBufferReplaceDataBytes()", status, 725 NOTIFY_STATUS("CMBlockBufferReplaceDataBytes()", status,
740 SFT_PLATFORM_ERROR); 726 SFT_PLATFORM_ERROR);
741 return; 727 return;
742 } 728 }
743 offset += nalu.size; 729 offset += nalu.size;
744 } 730 }
745 731
746 // Package the data in a CMSampleBuffer. 732 // Package the data in a CMSampleBuffer.
747 base::ScopedCFTypeRef<CMSampleBufferRef> sample; 733 base::ScopedCFTypeRef<CMSampleBufferRef> sample;
748 status = CMSampleBufferCreate( 734 status = CMSampleBufferCreate(kCFAllocatorDefault,
749 kCFAllocatorDefault, 735 data, // data_buffer
750 data, // data_buffer 736 true, // data_ready
751 true, // data_ready 737 nullptr, // make_data_ready_callback
752 nullptr, // make_data_ready_callback 738 nullptr, // make_data_ready_refcon
753 nullptr, // make_data_ready_refcon 739 format_, // format_description
754 format_, // format_description 740 1, // num_samples
755 1, // num_samples 741 0, // num_sample_timing_entries
756 0, // num_sample_timing_entries 742 nullptr, // &sample_timing_array
757 nullptr, // &sample_timing_array 743 1, // num_sample_size_entries
758 1, // num_sample_size_entries 744 &data_size, // &sample_size_array
759 &data_size, // &sample_size_array 745 sample.InitializeInto());
760 sample.InitializeInto());
761 if (status) { 746 if (status) {
762 NOTIFY_STATUS("CMSampleBufferCreate()", status, SFT_PLATFORM_ERROR); 747 NOTIFY_STATUS("CMSampleBufferCreate()", status, SFT_PLATFORM_ERROR);
763 return; 748 return;
764 } 749 }
765 750
766 // Send the frame for decoding. 751 // Send the frame for decoding.
767 // Asynchronous Decompression allows for parallel submission of frames 752 // Asynchronous Decompression allows for parallel submission of frames
768 // (without it, DecodeFrame() does not return until the frame has been 753 // (without it, DecodeFrame() does not return until the frame has been
769 // decoded). We don't enable Temporal Processing so that frames are always 754 // decoded). We don't enable Temporal Processing so that frames are always
770 // returned in decode order; this makes it easier to avoid deadlock. 755 // returned in decode order; this makes it easier to avoid deadlock.
771 VTDecodeFrameFlags decode_flags = 756 VTDecodeFrameFlags decode_flags =
772 kVTDecodeFrame_EnableAsynchronousDecompression; 757 kVTDecodeFrame_EnableAsynchronousDecompression;
773 status = VTDecompressionSessionDecodeFrame( 758 status = VTDecompressionSessionDecodeFrame(
774 session_, 759 session_,
775 sample, // sample_buffer 760 sample, // sample_buffer
776 decode_flags, // decode_flags 761 decode_flags, // decode_flags
777 reinterpret_cast<void*>(frame), // source_frame_refcon 762 reinterpret_cast<void*>(frame), // source_frame_refcon
778 nullptr); // &info_flags_out 763 nullptr); // &info_flags_out
779 if (status) { 764 if (status) {
780 NOTIFY_STATUS("VTDecompressionSessionDecodeFrame()", status, 765 NOTIFY_STATUS("VTDecompressionSessionDecodeFrame()", status,
781 SFT_DECODE_ERROR); 766 SFT_DECODE_ERROR);
782 return; 767 return;
783 } 768 }
784 } 769 }
785 770
786 // This method may be called on any VideoToolbox thread. 771 // This method may be called on any VideoToolbox thread.
787 void VTVideoDecodeAccelerator::Output( 772 void VTVideoDecodeAccelerator::Output(void* source_frame_refcon,
788 void* source_frame_refcon, 773 OSStatus status,
789 OSStatus status, 774 CVImageBufferRef image_buffer) {
790 CVImageBufferRef image_buffer) {
791 if (status) { 775 if (status) {
792 NOTIFY_STATUS("Decoding", status, SFT_DECODE_ERROR); 776 NOTIFY_STATUS("Decoding", status, SFT_DECODE_ERROR);
793 return; 777 return;
794 } 778 }
795 779
796 // The type of |image_buffer| is CVImageBuffer, but we only handle 780 // The type of |image_buffer| is CVImageBuffer, but we only handle
797 // CVPixelBuffers. This should be guaranteed as we set 781 // CVPixelBuffers. This should be guaranteed as we set
798 // kCVPixelBufferOpenGLCompatibilityKey in |image_config|. 782 // kCVPixelBufferOpenGLCompatibilityKey in |image_config|.
799 // 783 //
800 // Sometimes, for unknown reasons (http://crbug.com/453050), |image_buffer| is 784 // Sometimes, for unknown reasons (http://crbug.com/453050), |image_buffer| is
801 // NULL, which causes CFGetTypeID() to crash. While the rest of the code would 785 // NULL, which causes CFGetTypeID() to crash. While the rest of the code would
802 // smoothly handle NULL as a dropped frame, we choose to fail permanantly here 786 // smoothly handle NULL as a dropped frame, we choose to fail permanantly here
803 // until the issue is better understood. 787 // until the issue is better understood.
804 if (!image_buffer || CFGetTypeID(image_buffer) != CVPixelBufferGetTypeID()) { 788 if (!image_buffer || CFGetTypeID(image_buffer) != CVPixelBufferGetTypeID()) {
805 DLOG(ERROR) << "Decoded frame is not a CVPixelBuffer"; 789 DLOG(ERROR) << "Decoded frame is not a CVPixelBuffer";
806 NotifyError(PLATFORM_FAILURE, SFT_DECODE_ERROR); 790 NotifyError(PLATFORM_FAILURE, SFT_DECODE_ERROR);
807 return; 791 return;
808 } 792 }
809 793
810 Frame* frame = reinterpret_cast<Frame*>(source_frame_refcon); 794 Frame* frame = reinterpret_cast<Frame*>(source_frame_refcon);
811 frame->image.reset(image_buffer, base::scoped_policy::RETAIN); 795 frame->image.reset(image_buffer, base::scoped_policy::RETAIN);
812 gpu_task_runner_->PostTask(FROM_HERE, base::Bind( 796 gpu_task_runner_->PostTask(
813 &VTVideoDecodeAccelerator::DecodeDone, weak_this_, frame)); 797 FROM_HERE,
798 base::Bind(&VTVideoDecodeAccelerator::DecodeDone, weak_this_, frame));
814 } 799 }
815 800
816 void VTVideoDecodeAccelerator::DecodeDone(Frame* frame) { 801 void VTVideoDecodeAccelerator::DecodeDone(Frame* frame) {
817 DCHECK(gpu_thread_checker_.CalledOnValidThread()); 802 DCHECK(gpu_thread_checker_.CalledOnValidThread());
818 DCHECK_EQ(1u, pending_frames_.count(frame->bitstream_id)); 803 DCHECK_EQ(1u, pending_frames_.count(frame->bitstream_id));
819 Task task(TASK_FRAME); 804 Task task(TASK_FRAME);
820 task.frame = pending_frames_[frame->bitstream_id]; 805 task.frame = pending_frames_[frame->bitstream_id];
821 pending_frames_.erase(frame->bitstream_id); 806 pending_frames_.erase(frame->bitstream_id);
822 task_queue_.push(task); 807 task_queue_.push(task);
823 ProcessWorkQueues(); 808 ProcessWorkQueues();
824 } 809 }
825 810
826 void VTVideoDecodeAccelerator::FlushTask(TaskType type) { 811 void VTVideoDecodeAccelerator::FlushTask(TaskType type) {
827 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 812 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
828 FinishDelayedFrames(); 813 FinishDelayedFrames();
829 814
830 // Always queue a task, even if FinishDelayedFrames() fails, so that 815 // Always queue a task, even if FinishDelayedFrames() fails, so that
831 // destruction always completes. 816 // destruction always completes.
832 gpu_task_runner_->PostTask(FROM_HERE, base::Bind( 817 gpu_task_runner_->PostTask(
833 &VTVideoDecodeAccelerator::FlushDone, weak_this_, type)); 818 FROM_HERE,
819 base::Bind(&VTVideoDecodeAccelerator::FlushDone, weak_this_, type));
834 } 820 }
835 821
836 void VTVideoDecodeAccelerator::FlushDone(TaskType type) { 822 void VTVideoDecodeAccelerator::FlushDone(TaskType type) {
837 DCHECK(gpu_thread_checker_.CalledOnValidThread()); 823 DCHECK(gpu_thread_checker_.CalledOnValidThread());
838 task_queue_.push(Task(type)); 824 task_queue_.push(Task(type));
839 ProcessWorkQueues(); 825 ProcessWorkQueues();
840 } 826 }
841 827
842 void VTVideoDecodeAccelerator::Decode(const media::BitstreamBuffer& bitstream) { 828 void VTVideoDecodeAccelerator::Decode(const media::BitstreamBuffer& bitstream) {
843 DCHECK(gpu_thread_checker_.CalledOnValidThread()); 829 DCHECK(gpu_thread_checker_.CalledOnValidThread());
(...skipping 25 matching lines...) Expand all
869 DCHECK_LE(1u, picture.texture_ids().size()); 855 DCHECK_LE(1u, picture.texture_ids().size());
870 picture_info_map_.insert(std::make_pair( 856 picture_info_map_.insert(std::make_pair(
871 picture.id(), 857 picture.id(),
872 base::WrapUnique(new PictureInfo(picture.internal_texture_ids()[0], 858 base::WrapUnique(new PictureInfo(picture.internal_texture_ids()[0],
873 picture.texture_ids()[0])))); 859 picture.texture_ids()[0]))));
874 } 860 }
875 861
876 // Pictures are not marked as uncleared until after this method returns, and 862 // Pictures are not marked as uncleared until after this method returns, and
877 // they will be broken if they are used before that happens. So, schedule 863 // they will be broken if they are used before that happens. So, schedule
878 // future work after that happens. 864 // future work after that happens.
879 gpu_task_runner_->PostTask(FROM_HERE, base::Bind( 865 gpu_task_runner_->PostTask(
880 &VTVideoDecodeAccelerator::ProcessWorkQueues, weak_this_)); 866 FROM_HERE,
867 base::Bind(&VTVideoDecodeAccelerator::ProcessWorkQueues, weak_this_));
881 } 868 }
882 869
883 void VTVideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_id) { 870 void VTVideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_id) {
884 DCHECK(gpu_thread_checker_.CalledOnValidThread()); 871 DCHECK(gpu_thread_checker_.CalledOnValidThread());
885 DCHECK(picture_info_map_.count(picture_id)); 872 DCHECK(picture_info_map_.count(picture_id));
886 PictureInfo* picture_info = picture_info_map_.find(picture_id)->second.get(); 873 PictureInfo* picture_info = picture_info_map_.find(picture_id)->second.get();
887 picture_info->cv_image.reset(); 874 picture_info->cv_image.reset();
888 picture_info->gl_image->Destroy(false); 875 picture_info->gl_image->Destroy(false);
889 picture_info->gl_image = nullptr; 876 picture_info->gl_image = nullptr;
890 877
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
975 bool VTVideoDecodeAccelerator::ProcessReorderQueue() { 962 bool VTVideoDecodeAccelerator::ProcessReorderQueue() {
976 DCHECK(gpu_thread_checker_.CalledOnValidThread()); 963 DCHECK(gpu_thread_checker_.CalledOnValidThread());
977 DCHECK_EQ(state_, STATE_DECODING); 964 DCHECK_EQ(state_, STATE_DECODING);
978 965
979 if (reorder_queue_.empty()) 966 if (reorder_queue_.empty())
980 return false; 967 return false;
981 968
982 // If the next task is a flush (because there is a pending flush or becuase 969 // If the next task is a flush (because there is a pending flush or becuase
983 // the next frame is an IDR), then we don't need a full reorder buffer to send 970 // the next frame is an IDR), then we don't need a full reorder buffer to send
984 // the next frame. 971 // the next frame.
985 bool flushing = !task_queue_.empty() && 972 bool flushing =
986 (task_queue_.front().type != TASK_FRAME || 973 !task_queue_.empty() && (task_queue_.front().type != TASK_FRAME ||
987 task_queue_.front().frame->is_idr); 974 task_queue_.front().frame->is_idr);
988 975
989 size_t reorder_window = std::max(0, reorder_queue_.top()->reorder_window); 976 size_t reorder_window = std::max(0, reorder_queue_.top()->reorder_window);
990 if (flushing || reorder_queue_.size() > reorder_window) { 977 if (flushing || reorder_queue_.size() > reorder_window) {
991 if (ProcessFrame(*reorder_queue_.top())) { 978 if (ProcessFrame(*reorder_queue_.top())) {
992 reorder_queue_.pop(); 979 reorder_queue_.pop();
993 return true; 980 return true;
994 } 981 }
995 } 982 }
996 983
997 return false; 984 return false;
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
1050 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR); 1037 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR);
1051 return false; 1038 return false;
1052 } 1039 }
1053 1040
1054 scoped_refptr<gl::GLImageIOSurface> gl_image( 1041 scoped_refptr<gl::GLImageIOSurface> gl_image(
1055 new gl::GLImageIOSurface(frame.coded_size, GL_BGRA_EXT)); 1042 new gl::GLImageIOSurface(frame.coded_size, GL_BGRA_EXT));
1056 if (!gl_image->InitializeWithCVPixelBuffer( 1043 if (!gl_image->InitializeWithCVPixelBuffer(
1057 frame.image.get(), gfx::GenericSharedMemoryId(), 1044 frame.image.get(), gfx::GenericSharedMemoryId(),
1058 gfx::BufferFormat::YUV_420_BIPLANAR)) { 1045 gfx::BufferFormat::YUV_420_BIPLANAR)) {
1059 NOTIFY_STATUS("Failed to initialize GLImageIOSurface", PLATFORM_FAILURE, 1046 NOTIFY_STATUS("Failed to initialize GLImageIOSurface", PLATFORM_FAILURE,
1060 SFT_PLATFORM_ERROR); 1047 SFT_PLATFORM_ERROR);
1061 } 1048 }
1062 1049
1063 if (!bind_image_cb_.Run(picture_info->client_texture_id, 1050 if (!bind_image_cb_.Run(picture_info->client_texture_id,
1064 GL_TEXTURE_RECTANGLE_ARB, gl_image, false)) { 1051 GL_TEXTURE_RECTANGLE_ARB, gl_image, false)) {
1065 DLOG(ERROR) << "Failed to bind image"; 1052 DLOG(ERROR) << "Failed to bind image";
1066 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR); 1053 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR);
1067 return false; 1054 return false;
1068 } 1055 }
1069 1056
1070 // Assign the new image(s) to the the picture info. 1057 // Assign the new image(s) to the the picture info.
1071 picture_info->gl_image = gl_image; 1058 picture_info->gl_image = gl_image;
1072 picture_info->cv_image = frame.image; 1059 picture_info->cv_image = frame.image;
1073 available_picture_ids_.pop_back(); 1060 available_picture_ids_.pop_back();
1074 1061
1075 // TODO(sandersd): Currently, the size got from 1062 // TODO(sandersd): Currently, the size got from
1076 // CMVideoFormatDescriptionGetDimensions is visible size. We pass it to 1063 // CMVideoFormatDescriptionGetDimensions is visible size. We pass it to
1077 // GpuVideoDecoder so that GpuVideoDecoder can use correct visible size in 1064 // GpuVideoDecoder so that GpuVideoDecoder can use correct visible size in
1078 // resolution changed. We should find the correct API to get the real 1065 // resolution changed. We should find the correct API to get the real
1079 // coded size and fix it. 1066 // coded size and fix it.
1080 client_->PictureReady(media::Picture(picture_id, frame.bitstream_id, 1067 client_->PictureReady(media::Picture(picture_id, frame.bitstream_id,
1081 gfx::Rect(frame.coded_size), 1068 gfx::Rect(frame.coded_size), true));
1082 true));
1083 return true; 1069 return true;
1084 } 1070 }
1085 1071
1086 void VTVideoDecodeAccelerator::NotifyError( 1072 void VTVideoDecodeAccelerator::NotifyError(
1087 Error vda_error_type, 1073 Error vda_error_type,
1088 VTVDASessionFailureType session_failure_type) { 1074 VTVDASessionFailureType session_failure_type) {
1089 DCHECK_LT(session_failure_type, SFT_MAX + 1); 1075 DCHECK_LT(session_failure_type, SFT_MAX + 1);
1090 if (!gpu_thread_checker_.CalledOnValidThread()) { 1076 if (!gpu_thread_checker_.CalledOnValidThread()) {
1091 gpu_task_runner_->PostTask(FROM_HERE, base::Bind( 1077 gpu_task_runner_->PostTask(
1092 &VTVideoDecodeAccelerator::NotifyError, weak_this_, vda_error_type, 1078 FROM_HERE,
1093 session_failure_type)); 1079 base::Bind(&VTVideoDecodeAccelerator::NotifyError, weak_this_,
1080 vda_error_type, session_failure_type));
1094 } else if (state_ == STATE_DECODING) { 1081 } else if (state_ == STATE_DECODING) {
1095 state_ = STATE_ERROR; 1082 state_ = STATE_ERROR;
1096 UMA_HISTOGRAM_ENUMERATION("Media.VTVDA.SessionFailureReason", 1083 UMA_HISTOGRAM_ENUMERATION("Media.VTVDA.SessionFailureReason",
1097 session_failure_type, 1084 session_failure_type, SFT_MAX + 1);
1098 SFT_MAX + 1);
1099 client_->NotifyError(vda_error_type); 1085 client_->NotifyError(vda_error_type);
1100 } 1086 }
1101 } 1087 }
1102 1088
1103 void VTVideoDecodeAccelerator::QueueFlush(TaskType type) { 1089 void VTVideoDecodeAccelerator::QueueFlush(TaskType type) {
1104 DCHECK(gpu_thread_checker_.CalledOnValidThread()); 1090 DCHECK(gpu_thread_checker_.CalledOnValidThread());
1105 pending_flush_tasks_.push(type); 1091 pending_flush_tasks_.push(type);
1106 decoder_thread_.task_runner()->PostTask( 1092 decoder_thread_.task_runner()->PostTask(
1107 FROM_HERE, base::Bind(&VTVideoDecodeAccelerator::FlushTask, 1093 FROM_HERE, base::Bind(&VTVideoDecodeAccelerator::FlushTask,
1108 base::Unretained(this), type)); 1094 base::Unretained(this), type));
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
1155 for (const auto& supported_profile : kSupportedProfiles) { 1141 for (const auto& supported_profile : kSupportedProfiles) {
1156 SupportedProfile profile; 1142 SupportedProfile profile;
1157 profile.profile = supported_profile; 1143 profile.profile = supported_profile;
1158 profile.min_resolution.SetSize(16, 16); 1144 profile.min_resolution.SetSize(16, 16);
1159 profile.max_resolution.SetSize(4096, 2160); 1145 profile.max_resolution.SetSize(4096, 2160);
1160 profiles.push_back(profile); 1146 profiles.push_back(profile);
1161 } 1147 }
1162 return profiles; 1148 return profiles;
1163 } 1149 }
1164 1150
1165 } // namespace content 1151 } // namespace media
OLDNEW
« no previous file with comments | « media/gpu/vt_video_decode_accelerator_mac.h ('k') | media/gpu/vt_video_encode_accelerator_mac.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698