Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(166)

Side by Side Diff: media/gpu/vt_video_decode_accelerator_mac.cc

Issue 1882373004: Migrate content/common/gpu/media code to media/gpu (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Fix several more bot-identified build issues Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/common/gpu/media/vt_video_decode_accelerator_mac.h" 5 #include "media/gpu/vt_video_decode_accelerator_mac.h"
6 6
7 #include <CoreVideo/CoreVideo.h> 7 #include <CoreVideo/CoreVideo.h>
8 #include <OpenGL/CGLIOSurface.h> 8 #include <OpenGL/CGLIOSurface.h>
9 #include <OpenGL/gl.h> 9 #include <OpenGL/gl.h>
10 #include <stddef.h> 10 #include <stddef.h>
11 11
12 #include <algorithm> 12 #include <algorithm>
13 13
14 #include "base/bind.h" 14 #include "base/bind.h"
15 #include "base/logging.h" 15 #include "base/logging.h"
16 #include "base/mac/mac_logging.h" 16 #include "base/mac/mac_logging.h"
17 #include "base/macros.h" 17 #include "base/macros.h"
18 #include "base/memory/ptr_util.h" 18 #include "base/memory/ptr_util.h"
19 #include "base/metrics/histogram_macros.h" 19 #include "base/metrics/histogram_macros.h"
20 #include "base/sys_byteorder.h" 20 #include "base/sys_byteorder.h"
21 #include "base/sys_info.h" 21 #include "base/sys_info.h"
22 #include "base/thread_task_runner_handle.h" 22 #include "base/thread_task_runner_handle.h"
23 #include "base/version.h" 23 #include "base/version.h"
24 #include "media/base/limits.h" 24 #include "media/base/limits.h"
25 #include "ui/gl/gl_context.h" 25 #include "ui/gl/gl_context.h"
26 #include "ui/gl/gl_image_io_surface.h" 26 #include "ui/gl/gl_image_io_surface.h"
27 #include "ui/gl/gl_implementation.h" 27 #include "ui/gl/gl_implementation.h"
28 #include "ui/gl/scoped_binders.h" 28 #include "ui/gl/scoped_binders.h"
29 29
30 using content_common_gpu_media::kModuleVt; 30 using media_gpu::kModuleVt;
31 using content_common_gpu_media::InitializeStubs; 31 using media_gpu::InitializeStubs;
32 using content_common_gpu_media::IsVtInitialized; 32 using media_gpu::IsVtInitialized;
33 using content_common_gpu_media::StubPathMap; 33 using media_gpu::StubPathMap;
34 34
35 #define NOTIFY_STATUS(name, status, session_failure) \ 35 #define NOTIFY_STATUS(name, status, session_failure) \
36 do { \ 36 do { \
37 OSSTATUS_DLOG(ERROR, status) << name; \ 37 OSSTATUS_DLOG(ERROR, status) << name; \
38 NotifyError(PLATFORM_FAILURE, session_failure); \ 38 NotifyError(PLATFORM_FAILURE, session_failure); \
39 } while (0) 39 } while (0)
40 40
41 namespace content { 41 namespace media {
42 42
43 // Only H.264 with 4:2:0 chroma sampling is supported. 43 // Only H.264 with 4:2:0 chroma sampling is supported.
44 static const media::VideoCodecProfile kSupportedProfiles[] = { 44 static const media::VideoCodecProfile kSupportedProfiles[] = {
45 media::H264PROFILE_BASELINE, 45 media::H264PROFILE_BASELINE, media::H264PROFILE_MAIN,
46 media::H264PROFILE_MAIN, 46 media::H264PROFILE_EXTENDED, media::H264PROFILE_HIGH,
47 media::H264PROFILE_EXTENDED, 47 // TODO(hubbe): Try to re-enable this again somehow. Currently it seems
48 media::H264PROFILE_HIGH, 48 // that some codecs fail to check the profile during initialization and
49 // TODO(hubbe): Try to re-enable this again somehow. Currently it seems 49 // then fail on the first frame decode, which currently results in a
50 // that some codecs fail to check the profile during initialization and 50 // pipeline failure.
51 // then fail on the first frame decode, which currently results in a 51 // media::H264PROFILE_HIGH10PROFILE,
52 // pipeline failure. 52 media::H264PROFILE_SCALABLEBASELINE, media::H264PROFILE_SCALABLEHIGH,
53 // media::H264PROFILE_HIGH10PROFILE, 53 media::H264PROFILE_STEREOHIGH, media::H264PROFILE_MULTIVIEWHIGH,
54 media::H264PROFILE_SCALABLEBASELINE,
55 media::H264PROFILE_SCALABLEHIGH,
56 media::H264PROFILE_STEREOHIGH,
57 media::H264PROFILE_MULTIVIEWHIGH,
58 }; 54 };
59 55
60 // Size to use for NALU length headers in AVC format (can be 1, 2, or 4). 56 // Size to use for NALU length headers in AVC format (can be 1, 2, or 4).
61 static const int kNALUHeaderLength = 4; 57 static const int kNALUHeaderLength = 4;
62 58
63 // We request 5 picture buffers from the client, each of which has a texture ID 59 // We request 5 picture buffers from the client, each of which has a texture ID
64 // that we can bind decoded frames to. We need enough to satisfy preroll, and 60 // that we can bind decoded frames to. We need enough to satisfy preroll, and
65 // enough to avoid unnecessary stalling, but no more than that. The resource 61 // enough to avoid unnecessary stalling, but no more than that. The resource
66 // requirements are low, as we don't need the textures to be backed by storage. 62 // requirements are low, as we don't need the textures to be backed by storage.
67 static const int kNumPictureBuffers = media::limits::kMaxVideoFrames + 1; 63 static const int kNumPictureBuffers = media::limits::kMaxVideoFrames + 1;
68 64
69 // Maximum number of frames to queue for reordering before we stop asking for 65 // Maximum number of frames to queue for reordering before we stop asking for
70 // more. (NotifyEndOfBitstreamBuffer() is called when frames are moved into the 66 // more. (NotifyEndOfBitstreamBuffer() is called when frames are moved into the
71 // reorder queue.) 67 // reorder queue.)
72 static const int kMaxReorderQueueSize = 16; 68 static const int kMaxReorderQueueSize = 16;
73 69
74 // Build an |image_config| dictionary for VideoToolbox initialization. 70 // Build an |image_config| dictionary for VideoToolbox initialization.
75 static base::ScopedCFTypeRef<CFMutableDictionaryRef> 71 static base::ScopedCFTypeRef<CFMutableDictionaryRef> BuildImageConfig(
76 BuildImageConfig(CMVideoDimensions coded_dimensions) { 72 CMVideoDimensions coded_dimensions) {
77 base::ScopedCFTypeRef<CFMutableDictionaryRef> image_config; 73 base::ScopedCFTypeRef<CFMutableDictionaryRef> image_config;
78 74
79 // Note that 4:2:0 textures cannot be used directly as RGBA in OpenGL, but are 75 // Note that 4:2:0 textures cannot be used directly as RGBA in OpenGL, but are
80 // lower power than 4:2:2 when composited directly by CoreAnimation. 76 // lower power than 4:2:2 when composited directly by CoreAnimation.
81 int32_t pixel_format = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; 77 int32_t pixel_format = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
82 #define CFINT(i) CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &i) 78 #define CFINT(i) CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &i)
83 base::ScopedCFTypeRef<CFNumberRef> cf_pixel_format(CFINT(pixel_format)); 79 base::ScopedCFTypeRef<CFNumberRef> cf_pixel_format(CFINT(pixel_format));
84 base::ScopedCFTypeRef<CFNumberRef> cf_width(CFINT(coded_dimensions.width)); 80 base::ScopedCFTypeRef<CFNumberRef> cf_width(CFINT(coded_dimensions.width));
85 base::ScopedCFTypeRef<CFNumberRef> cf_height(CFINT(coded_dimensions.height)); 81 base::ScopedCFTypeRef<CFNumberRef> cf_height(CFINT(coded_dimensions.height));
86 #undef CFINT 82 #undef CFINT
87 if (!cf_pixel_format.get() || !cf_width.get() || !cf_height.get()) 83 if (!cf_pixel_format.get() || !cf_width.get() || !cf_height.get())
88 return image_config; 84 return image_config;
89 85
90 image_config.reset( 86 image_config.reset(CFDictionaryCreateMutable(
91 CFDictionaryCreateMutable( 87 kCFAllocatorDefault,
92 kCFAllocatorDefault, 88 3, // capacity
93 3, // capacity 89 &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
94 &kCFTypeDictionaryKeyCallBacks,
95 &kCFTypeDictionaryValueCallBacks));
96 if (!image_config.get()) 90 if (!image_config.get())
97 return image_config; 91 return image_config;
98 92
99 CFDictionarySetValue(image_config, kCVPixelBufferPixelFormatTypeKey, 93 CFDictionarySetValue(image_config, kCVPixelBufferPixelFormatTypeKey,
100 cf_pixel_format); 94 cf_pixel_format);
101 CFDictionarySetValue(image_config, kCVPixelBufferWidthKey, cf_width); 95 CFDictionarySetValue(image_config, kCVPixelBufferWidthKey, cf_width);
102 CFDictionarySetValue(image_config, kCVPixelBufferHeightKey, cf_height); 96 CFDictionarySetValue(image_config, kCVPixelBufferHeightKey, cf_height);
103 97
104 return image_config; 98 return image_config;
105 } 99 }
106 100
107 // Create a VTDecompressionSession using the provided |pps| and |sps|. If 101 // Create a VTDecompressionSession using the provided |pps| and |sps|. If
108 // |require_hardware| is true, the session must uses real hardware decoding 102 // |require_hardware| is true, the session must uses real hardware decoding
109 // (as opposed to software decoding inside of VideoToolbox) to be considered 103 // (as opposed to software decoding inside of VideoToolbox) to be considered
110 // successful. 104 // successful.
111 // 105 //
112 // TODO(sandersd): Merge with ConfigureDecoder(), as the code is very similar. 106 // TODO(sandersd): Merge with ConfigureDecoder(), as the code is very similar.
113 static bool CreateVideoToolboxSession(const uint8_t* sps, size_t sps_size, 107 static bool CreateVideoToolboxSession(const uint8_t* sps,
114 const uint8_t* pps, size_t pps_size, 108 size_t sps_size,
109 const uint8_t* pps,
110 size_t pps_size,
115 bool require_hardware) { 111 bool require_hardware) {
116 const uint8_t* data_ptrs[] = {sps, pps}; 112 const uint8_t* data_ptrs[] = {sps, pps};
117 const size_t data_sizes[] = {sps_size, pps_size}; 113 const size_t data_sizes[] = {sps_size, pps_size};
118 114
119 base::ScopedCFTypeRef<CMFormatDescriptionRef> format; 115 base::ScopedCFTypeRef<CMFormatDescriptionRef> format;
120 OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets( 116 OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(
121 kCFAllocatorDefault, 117 kCFAllocatorDefault,
122 2, // parameter_set_count 118 2, // parameter_set_count
123 data_ptrs, // &parameter_set_pointers 119 data_ptrs, // &parameter_set_pointers
124 data_sizes, // &parameter_set_sizes 120 data_sizes, // &parameter_set_sizes
125 kNALUHeaderLength, // nal_unit_header_length 121 kNALUHeaderLength, // nal_unit_header_length
126 format.InitializeInto()); 122 format.InitializeInto());
127 if (status) { 123 if (status) {
128 OSSTATUS_DLOG(WARNING, status) 124 OSSTATUS_DLOG(WARNING, status)
129 << "Failed to create CMVideoFormatDescription"; 125 << "Failed to create CMVideoFormatDescription";
130 return false; 126 return false;
131 } 127 }
132 128
133 base::ScopedCFTypeRef<CFMutableDictionaryRef> decoder_config( 129 base::ScopedCFTypeRef<CFMutableDictionaryRef> decoder_config(
134 CFDictionaryCreateMutable( 130 CFDictionaryCreateMutable(kCFAllocatorDefault,
135 kCFAllocatorDefault, 131 1, // capacity
136 1, // capacity 132 &kCFTypeDictionaryKeyCallBacks,
137 &kCFTypeDictionaryKeyCallBacks, 133 &kCFTypeDictionaryValueCallBacks));
138 &kCFTypeDictionaryValueCallBacks));
139 if (!decoder_config.get()) 134 if (!decoder_config.get())
140 return false; 135 return false;
141 136
142 if (require_hardware) { 137 if (require_hardware) {
143 CFDictionarySetValue( 138 CFDictionarySetValue(
144 decoder_config, 139 decoder_config,
145 // kVTVideoDecoderSpecification_RequireHardwareAcceleratedVideoDecoder 140 // kVTVideoDecoderSpecification_RequireHardwareAcceleratedVideoDecoder
146 CFSTR("RequireHardwareAcceleratedVideoDecoder"), 141 CFSTR("RequireHardwareAcceleratedVideoDecoder"), kCFBooleanTrue);
147 kCFBooleanTrue);
148 } 142 }
149 143
150 base::ScopedCFTypeRef<CFMutableDictionaryRef> image_config( 144 base::ScopedCFTypeRef<CFMutableDictionaryRef> image_config(
151 BuildImageConfig(CMVideoFormatDescriptionGetDimensions(format))); 145 BuildImageConfig(CMVideoFormatDescriptionGetDimensions(format)));
152 if (!image_config.get()) 146 if (!image_config.get())
153 return false; 147 return false;
154 148
155 VTDecompressionOutputCallbackRecord callback = {0}; 149 VTDecompressionOutputCallbackRecord callback = {0};
156 150
157 base::ScopedCFTypeRef<VTDecompressionSessionRef> session; 151 base::ScopedCFTypeRef<VTDecompressionSessionRef> session;
158 status = VTDecompressionSessionCreate( 152 status = VTDecompressionSessionCreate(
159 kCFAllocatorDefault, 153 kCFAllocatorDefault,
160 format, // video_format_description 154 format, // video_format_description
161 decoder_config, // video_decoder_specification 155 decoder_config, // video_decoder_specification
162 image_config, // destination_image_buffer_attributes 156 image_config, // destination_image_buffer_attributes
163 &callback, // output_callback 157 &callback, // output_callback
164 session.InitializeInto()); 158 session.InitializeInto());
165 if (status) { 159 if (status) {
166 OSSTATUS_DLOG(WARNING, status) 160 OSSTATUS_DLOG(WARNING, status)
167 << "Failed to create VTDecompressionSession"; 161 << "Failed to create VTDecompressionSession";
168 return false; 162 return false;
169 } 163 }
170 164
171 return true; 165 return true;
172 } 166 }
173 167
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
227 221
228 if (!attempted) { 222 if (!attempted) {
229 attempted = true; 223 attempted = true;
230 succeeded = InitializeVideoToolboxInternal(); 224 succeeded = InitializeVideoToolboxInternal();
231 } 225 }
232 226
233 return succeeded; 227 return succeeded;
234 } 228 }
235 229
236 // Route decoded frame callbacks back into the VTVideoDecodeAccelerator. 230 // Route decoded frame callbacks back into the VTVideoDecodeAccelerator.
237 static void OutputThunk( 231 static void OutputThunk(void* decompression_output_refcon,
238 void* decompression_output_refcon, 232 void* source_frame_refcon,
239 void* source_frame_refcon, 233 OSStatus status,
240 OSStatus status, 234 VTDecodeInfoFlags info_flags,
241 VTDecodeInfoFlags info_flags, 235 CVImageBufferRef image_buffer,
242 CVImageBufferRef image_buffer, 236 CMTime presentation_time_stamp,
243 CMTime presentation_time_stamp, 237 CMTime presentation_duration) {
244 CMTime presentation_duration) {
245 VTVideoDecodeAccelerator* vda = 238 VTVideoDecodeAccelerator* vda =
246 reinterpret_cast<VTVideoDecodeAccelerator*>(decompression_output_refcon); 239 reinterpret_cast<VTVideoDecodeAccelerator*>(decompression_output_refcon);
247 vda->Output(source_frame_refcon, status, image_buffer); 240 vda->Output(source_frame_refcon, status, image_buffer);
248 } 241 }
249 242
250 VTVideoDecodeAccelerator::Task::Task(TaskType type) : type(type) { 243 VTVideoDecodeAccelerator::Task::Task(TaskType type) : type(type) {}
251 }
252 244
253 VTVideoDecodeAccelerator::Task::Task(const Task& other) = default; 245 VTVideoDecodeAccelerator::Task::Task(const Task& other) = default;
254 246
255 VTVideoDecodeAccelerator::Task::~Task() { 247 VTVideoDecodeAccelerator::Task::~Task() {}
256 }
257 248
258 VTVideoDecodeAccelerator::Frame::Frame(int32_t bitstream_id) 249 VTVideoDecodeAccelerator::Frame::Frame(int32_t bitstream_id)
259 : bitstream_id(bitstream_id), 250 : bitstream_id(bitstream_id),
260 pic_order_cnt(0), 251 pic_order_cnt(0),
261 is_idr(false), 252 is_idr(false),
262 reorder_window(0) { 253 reorder_window(0) {}
263 }
264 254
265 VTVideoDecodeAccelerator::Frame::~Frame() { 255 VTVideoDecodeAccelerator::Frame::~Frame() {}
266 }
267 256
268 VTVideoDecodeAccelerator::PictureInfo::PictureInfo(uint32_t client_texture_id, 257 VTVideoDecodeAccelerator::PictureInfo::PictureInfo(uint32_t client_texture_id,
269 uint32_t service_texture_id) 258 uint32_t service_texture_id)
270 : client_texture_id(client_texture_id), 259 : client_texture_id(client_texture_id),
271 service_texture_id(service_texture_id) {} 260 service_texture_id(service_texture_id) {}
272 261
273 VTVideoDecodeAccelerator::PictureInfo::~PictureInfo() { 262 VTVideoDecodeAccelerator::PictureInfo::~PictureInfo() {
274 if (gl_image) 263 if (gl_image)
275 gl_image->Destroy(false); 264 gl_image->Destroy(false);
276 } 265 }
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
341 } 330 }
342 if (!profile_supported) 331 if (!profile_supported)
343 return false; 332 return false;
344 333
345 // Spawn a thread to handle parsing and calling VideoToolbox. 334 // Spawn a thread to handle parsing and calling VideoToolbox.
346 if (!decoder_thread_.Start()) 335 if (!decoder_thread_.Start())
347 return false; 336 return false;
348 337
349 // Count the session as successfully initialized. 338 // Count the session as successfully initialized.
350 UMA_HISTOGRAM_ENUMERATION("Media.VTVDA.SessionFailureReason", 339 UMA_HISTOGRAM_ENUMERATION("Media.VTVDA.SessionFailureReason",
351 SFT_SUCCESSFULLY_INITIALIZED, 340 SFT_SUCCESSFULLY_INITIALIZED, SFT_MAX + 1);
352 SFT_MAX + 1);
353 return true; 341 return true;
354 } 342 }
355 343
356 bool VTVideoDecodeAccelerator::FinishDelayedFrames() { 344 bool VTVideoDecodeAccelerator::FinishDelayedFrames() {
357 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 345 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
358 if (session_) { 346 if (session_) {
359 OSStatus status = VTDecompressionSessionWaitForAsynchronousFrames(session_); 347 OSStatus status = VTDecompressionSessionWaitForAsynchronousFrames(session_);
360 if (status) { 348 if (status) {
361 NOTIFY_STATUS("VTDecompressionSessionWaitForAsynchronousFrames()", 349 NOTIFY_STATUS("VTDecompressionSessionWaitForAsynchronousFrames()", status,
362 status, SFT_PLATFORM_ERROR); 350 SFT_PLATFORM_ERROR);
363 return false; 351 return false;
364 } 352 }
365 } 353 }
366 return true; 354 return true;
367 } 355 }
368 356
369 bool VTVideoDecodeAccelerator::ConfigureDecoder() { 357 bool VTVideoDecodeAccelerator::ConfigureDecoder() {
370 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 358 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
371 DCHECK(!last_sps_.empty()); 359 DCHECK(!last_sps_.empty());
372 DCHECK(!last_pps_.empty()); 360 DCHECK(!last_pps_.empty());
373 361
374 // Build the configuration records. 362 // Build the configuration records.
375 std::vector<const uint8_t*> nalu_data_ptrs; 363 std::vector<const uint8_t*> nalu_data_ptrs;
376 std::vector<size_t> nalu_data_sizes; 364 std::vector<size_t> nalu_data_sizes;
377 nalu_data_ptrs.reserve(3); 365 nalu_data_ptrs.reserve(3);
378 nalu_data_sizes.reserve(3); 366 nalu_data_sizes.reserve(3);
379 nalu_data_ptrs.push_back(&last_sps_.front()); 367 nalu_data_ptrs.push_back(&last_sps_.front());
380 nalu_data_sizes.push_back(last_sps_.size()); 368 nalu_data_sizes.push_back(last_sps_.size());
381 if (!last_spsext_.empty()) { 369 if (!last_spsext_.empty()) {
382 nalu_data_ptrs.push_back(&last_spsext_.front()); 370 nalu_data_ptrs.push_back(&last_spsext_.front());
383 nalu_data_sizes.push_back(last_spsext_.size()); 371 nalu_data_sizes.push_back(last_spsext_.size());
384 } 372 }
385 nalu_data_ptrs.push_back(&last_pps_.front()); 373 nalu_data_ptrs.push_back(&last_pps_.front());
386 nalu_data_sizes.push_back(last_pps_.size()); 374 nalu_data_sizes.push_back(last_pps_.size());
387 375
388 // Construct a new format description from the parameter sets. 376 // Construct a new format description from the parameter sets.
389 format_.reset(); 377 format_.reset();
390 OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets( 378 OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(
391 kCFAllocatorDefault, 379 kCFAllocatorDefault,
392 nalu_data_ptrs.size(), // parameter_set_count 380 nalu_data_ptrs.size(), // parameter_set_count
393 &nalu_data_ptrs.front(), // &parameter_set_pointers 381 &nalu_data_ptrs.front(), // &parameter_set_pointers
394 &nalu_data_sizes.front(), // &parameter_set_sizes 382 &nalu_data_sizes.front(), // &parameter_set_sizes
395 kNALUHeaderLength, // nal_unit_header_length 383 kNALUHeaderLength, // nal_unit_header_length
396 format_.InitializeInto()); 384 format_.InitializeInto());
397 if (status) { 385 if (status) {
398 NOTIFY_STATUS("CMVideoFormatDescriptionCreateFromH264ParameterSets()", 386 NOTIFY_STATUS("CMVideoFormatDescriptionCreateFromH264ParameterSets()",
399 status, SFT_PLATFORM_ERROR); 387 status, SFT_PLATFORM_ERROR);
400 return false; 388 return false;
401 } 389 }
402 390
403 // Store the new configuration data. 391 // Store the new configuration data.
404 // TODO(sandersd): Despite the documentation, this seems to return the visible 392 // TODO(sandersd): Despite the documentation, this seems to return the visible
405 // size. However, the output always appears to be top-left aligned, so it 393 // size. However, the output always appears to be top-left aligned, so it
406 // makes no difference. Re-verify this and update the variable name. 394 // makes no difference. Re-verify this and update the variable name.
407 CMVideoDimensions coded_dimensions = 395 CMVideoDimensions coded_dimensions =
408 CMVideoFormatDescriptionGetDimensions(format_); 396 CMVideoFormatDescriptionGetDimensions(format_);
409 coded_size_.SetSize(coded_dimensions.width, coded_dimensions.height); 397 coded_size_.SetSize(coded_dimensions.width, coded_dimensions.height);
410 398
411 // Prepare VideoToolbox configuration dictionaries. 399 // Prepare VideoToolbox configuration dictionaries.
412 base::ScopedCFTypeRef<CFMutableDictionaryRef> decoder_config( 400 base::ScopedCFTypeRef<CFMutableDictionaryRef> decoder_config(
413 CFDictionaryCreateMutable( 401 CFDictionaryCreateMutable(kCFAllocatorDefault,
414 kCFAllocatorDefault, 402 1, // capacity
415 1, // capacity 403 &kCFTypeDictionaryKeyCallBacks,
416 &kCFTypeDictionaryKeyCallBacks, 404 &kCFTypeDictionaryValueCallBacks));
417 &kCFTypeDictionaryValueCallBacks));
418 if (!decoder_config.get()) { 405 if (!decoder_config.get()) {
419 DLOG(ERROR) << "Failed to create CFMutableDictionary"; 406 DLOG(ERROR) << "Failed to create CFMutableDictionary";
420 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR); 407 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR);
421 return false; 408 return false;
422 } 409 }
423 410
424 CFDictionarySetValue( 411 CFDictionarySetValue(
425 decoder_config, 412 decoder_config,
426 // kVTVideoDecoderSpecification_EnableHardwareAcceleratedVideoDecoder 413 // kVTVideoDecoderSpecification_EnableHardwareAcceleratedVideoDecoder
427 CFSTR("EnableHardwareAcceleratedVideoDecoder"), 414 CFSTR("EnableHardwareAcceleratedVideoDecoder"), kCFBooleanTrue);
428 kCFBooleanTrue);
429 415
430 base::ScopedCFTypeRef<CFMutableDictionaryRef> image_config( 416 base::ScopedCFTypeRef<CFMutableDictionaryRef> image_config(
431 BuildImageConfig(coded_dimensions)); 417 BuildImageConfig(coded_dimensions));
432 if (!image_config.get()) { 418 if (!image_config.get()) {
433 DLOG(ERROR) << "Failed to create decoder image configuration"; 419 DLOG(ERROR) << "Failed to create decoder image configuration";
434 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR); 420 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR);
435 return false; 421 return false;
436 } 422 }
437 423
438 // Ensure that the old decoder emits all frames before the new decoder can 424 // Ensure that the old decoder emits all frames before the new decoder can
439 // emit any. 425 // emit any.
440 if (!FinishDelayedFrames()) 426 if (!FinishDelayedFrames())
441 return false; 427 return false;
442 428
443 session_.reset(); 429 session_.reset();
444 status = VTDecompressionSessionCreate( 430 status = VTDecompressionSessionCreate(
445 kCFAllocatorDefault, 431 kCFAllocatorDefault,
446 format_, // video_format_description 432 format_, // video_format_description
447 decoder_config, // video_decoder_specification 433 decoder_config, // video_decoder_specification
448 image_config, // destination_image_buffer_attributes 434 image_config, // destination_image_buffer_attributes
449 &callback_, // output_callback 435 &callback_, // output_callback
450 session_.InitializeInto()); 436 session_.InitializeInto());
451 if (status) { 437 if (status) {
452 NOTIFY_STATUS("VTDecompressionSessionCreate()", status, 438 NOTIFY_STATUS("VTDecompressionSessionCreate()", status,
453 SFT_UNSUPPORTED_STREAM_PARAMETERS); 439 SFT_UNSUPPORTED_STREAM_PARAMETERS);
454 return false; 440 return false;
455 } 441 }
456 442
457 // Report whether hardware decode is being used. 443 // Report whether hardware decode is being used.
458 bool using_hardware = false; 444 bool using_hardware = false;
459 base::ScopedCFTypeRef<CFBooleanRef> cf_using_hardware; 445 base::ScopedCFTypeRef<CFBooleanRef> cf_using_hardware;
460 if (VTSessionCopyProperty( 446 if (VTSessionCopyProperty(
461 session_, 447 session_,
462 // kVTDecompressionPropertyKey_UsingHardwareAcceleratedVideoDecoder 448 // kVTDecompressionPropertyKey_UsingHardwareAcceleratedVideoDecoder
463 CFSTR("UsingHardwareAcceleratedVideoDecoder"), 449 CFSTR("UsingHardwareAcceleratedVideoDecoder"), kCFAllocatorDefault,
464 kCFAllocatorDefault,
465 cf_using_hardware.InitializeInto()) == 0) { 450 cf_using_hardware.InitializeInto()) == 0) {
466 using_hardware = CFBooleanGetValue(cf_using_hardware); 451 using_hardware = CFBooleanGetValue(cf_using_hardware);
467 } 452 }
468 UMA_HISTOGRAM_BOOLEAN("Media.VTVDA.HardwareAccelerated", using_hardware); 453 UMA_HISTOGRAM_BOOLEAN("Media.VTVDA.HardwareAccelerated", using_hardware);
469 454
470 return true; 455 return true;
471 } 456 }
472 457
473 void VTVideoDecodeAccelerator::DecodeTask( 458 void VTVideoDecodeAccelerator::DecodeTask(
474 const media::BitstreamBuffer& bitstream, 459 const media::BitstreamBuffer& bitstream,
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
604 DLOG(ERROR) << "Unable to compute POC"; 589 DLOG(ERROR) << "Unable to compute POC";
605 NotifyError(UNREADABLE_INPUT, SFT_INVALID_STREAM); 590 NotifyError(UNREADABLE_INPUT, SFT_INVALID_STREAM);
606 return; 591 return;
607 } 592 }
608 593
609 if (nalu.nal_unit_type == media::H264NALU::kIDRSlice) 594 if (nalu.nal_unit_type == media::H264NALU::kIDRSlice)
610 frame->is_idr = true; 595 frame->is_idr = true;
611 596
612 if (sps->vui_parameters_present_flag && 597 if (sps->vui_parameters_present_flag &&
613 sps->bitstream_restriction_flag) { 598 sps->bitstream_restriction_flag) {
614 frame->reorder_window = std::min(sps->max_num_reorder_frames, 599 frame->reorder_window =
615 kMaxReorderQueueSize - 1); 600 std::min(sps->max_num_reorder_frames, kMaxReorderQueueSize - 1);
616 } 601 }
617 } 602 }
618 has_slice = true; 603 has_slice = true;
619 default: 604 default:
620 nalus.push_back(nalu); 605 nalus.push_back(nalu);
621 data_size += kNALUHeaderLength + nalu.size; 606 data_size += kNALUHeaderLength + nalu.size;
622 break; 607 break;
623 } 608 }
624 } 609 }
625 610
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
664 } 649 }
665 has_slice = false; 650 has_slice = false;
666 } 651 }
667 652
668 // If there is nothing to decode, drop the bitstream buffer by returning an 653 // If there is nothing to decode, drop the bitstream buffer by returning an
669 // empty frame. 654 // empty frame.
670 if (!has_slice) { 655 if (!has_slice) {
671 // Keep everything in order by flushing first. 656 // Keep everything in order by flushing first.
672 if (!FinishDelayedFrames()) 657 if (!FinishDelayedFrames())
673 return; 658 return;
674 gpu_task_runner_->PostTask(FROM_HERE, base::Bind( 659 gpu_task_runner_->PostTask(
675 &VTVideoDecodeAccelerator::DecodeDone, weak_this_, frame)); 660 FROM_HERE,
661 base::Bind(&VTVideoDecodeAccelerator::DecodeDone, weak_this_, frame));
676 return; 662 return;
677 } 663 }
678 664
679 // If the session is not configured by this point, fail. 665 // If the session is not configured by this point, fail.
680 if (!session_) { 666 if (!session_) {
681 DLOG(ERROR) << "Cannot decode without configuration"; 667 DLOG(ERROR) << "Cannot decode without configuration";
682 NotifyError(INVALID_ARGUMENT, SFT_INVALID_STREAM); 668 NotifyError(INVALID_ARGUMENT, SFT_INVALID_STREAM);
683 return; 669 return;
684 } 670 }
685 671
(...skipping 27 matching lines...) Expand all
713 NOTIFY_STATUS("CMBlockBufferAssureBlockMemory()", status, 699 NOTIFY_STATUS("CMBlockBufferAssureBlockMemory()", status,
714 SFT_PLATFORM_ERROR); 700 SFT_PLATFORM_ERROR);
715 return; 701 return;
716 } 702 }
717 703
718 // Copy NALU data into the CMBlockBuffer, inserting length headers. 704 // Copy NALU data into the CMBlockBuffer, inserting length headers.
719 size_t offset = 0; 705 size_t offset = 0;
720 for (size_t i = 0; i < nalus.size(); i++) { 706 for (size_t i = 0; i < nalus.size(); i++) {
721 media::H264NALU& nalu = nalus[i]; 707 media::H264NALU& nalu = nalus[i];
722 uint32_t header = base::HostToNet32(static_cast<uint32_t>(nalu.size)); 708 uint32_t header = base::HostToNet32(static_cast<uint32_t>(nalu.size));
723 status = CMBlockBufferReplaceDataBytes( 709 status =
724 &header, data, offset, kNALUHeaderLength); 710 CMBlockBufferReplaceDataBytes(&header, data, offset, kNALUHeaderLength);
725 if (status) { 711 if (status) {
726 NOTIFY_STATUS("CMBlockBufferReplaceDataBytes()", status, 712 NOTIFY_STATUS("CMBlockBufferReplaceDataBytes()", status,
727 SFT_PLATFORM_ERROR); 713 SFT_PLATFORM_ERROR);
728 return; 714 return;
729 } 715 }
730 offset += kNALUHeaderLength; 716 offset += kNALUHeaderLength;
731 status = CMBlockBufferReplaceDataBytes(nalu.data, data, offset, nalu.size); 717 status = CMBlockBufferReplaceDataBytes(nalu.data, data, offset, nalu.size);
732 if (status) { 718 if (status) {
733 NOTIFY_STATUS("CMBlockBufferReplaceDataBytes()", status, 719 NOTIFY_STATUS("CMBlockBufferReplaceDataBytes()", status,
734 SFT_PLATFORM_ERROR); 720 SFT_PLATFORM_ERROR);
735 return; 721 return;
736 } 722 }
737 offset += nalu.size; 723 offset += nalu.size;
738 } 724 }
739 725
740 // Package the data in a CMSampleBuffer. 726 // Package the data in a CMSampleBuffer.
741 base::ScopedCFTypeRef<CMSampleBufferRef> sample; 727 base::ScopedCFTypeRef<CMSampleBufferRef> sample;
742 status = CMSampleBufferCreate( 728 status = CMSampleBufferCreate(kCFAllocatorDefault,
743 kCFAllocatorDefault, 729 data, // data_buffer
744 data, // data_buffer 730 true, // data_ready
745 true, // data_ready 731 nullptr, // make_data_ready_callback
746 nullptr, // make_data_ready_callback 732 nullptr, // make_data_ready_refcon
747 nullptr, // make_data_ready_refcon 733 format_, // format_description
748 format_, // format_description 734 1, // num_samples
749 1, // num_samples 735 0, // num_sample_timing_entries
750 0, // num_sample_timing_entries 736 nullptr, // &sample_timing_array
751 nullptr, // &sample_timing_array 737 1, // num_sample_size_entries
752 1, // num_sample_size_entries 738 &data_size, // &sample_size_array
753 &data_size, // &sample_size_array 739 sample.InitializeInto());
754 sample.InitializeInto());
755 if (status) { 740 if (status) {
756 NOTIFY_STATUS("CMSampleBufferCreate()", status, SFT_PLATFORM_ERROR); 741 NOTIFY_STATUS("CMSampleBufferCreate()", status, SFT_PLATFORM_ERROR);
757 return; 742 return;
758 } 743 }
759 744
760 // Send the frame for decoding. 745 // Send the frame for decoding.
761 // Asynchronous Decompression allows for parallel submission of frames 746 // Asynchronous Decompression allows for parallel submission of frames
762 // (without it, DecodeFrame() does not return until the frame has been 747 // (without it, DecodeFrame() does not return until the frame has been
763 // decoded). We don't enable Temporal Processing so that frames are always 748 // decoded). We don't enable Temporal Processing so that frames are always
764 // returned in decode order; this makes it easier to avoid deadlock. 749 // returned in decode order; this makes it easier to avoid deadlock.
765 VTDecodeFrameFlags decode_flags = 750 VTDecodeFrameFlags decode_flags =
766 kVTDecodeFrame_EnableAsynchronousDecompression; 751 kVTDecodeFrame_EnableAsynchronousDecompression;
767 status = VTDecompressionSessionDecodeFrame( 752 status = VTDecompressionSessionDecodeFrame(
768 session_, 753 session_,
769 sample, // sample_buffer 754 sample, // sample_buffer
770 decode_flags, // decode_flags 755 decode_flags, // decode_flags
771 reinterpret_cast<void*>(frame), // source_frame_refcon 756 reinterpret_cast<void*>(frame), // source_frame_refcon
772 nullptr); // &info_flags_out 757 nullptr); // &info_flags_out
773 if (status) { 758 if (status) {
774 NOTIFY_STATUS("VTDecompressionSessionDecodeFrame()", status, 759 NOTIFY_STATUS("VTDecompressionSessionDecodeFrame()", status,
775 SFT_DECODE_ERROR); 760 SFT_DECODE_ERROR);
776 return; 761 return;
777 } 762 }
778 } 763 }
779 764
780 // This method may be called on any VideoToolbox thread. 765 // This method may be called on any VideoToolbox thread.
781 void VTVideoDecodeAccelerator::Output( 766 void VTVideoDecodeAccelerator::Output(void* source_frame_refcon,
782 void* source_frame_refcon, 767 OSStatus status,
783 OSStatus status, 768 CVImageBufferRef image_buffer) {
784 CVImageBufferRef image_buffer) {
785 if (status) { 769 if (status) {
786 NOTIFY_STATUS("Decoding", status, SFT_DECODE_ERROR); 770 NOTIFY_STATUS("Decoding", status, SFT_DECODE_ERROR);
787 return; 771 return;
788 } 772 }
789 773
790 // The type of |image_buffer| is CVImageBuffer, but we only handle 774 // The type of |image_buffer| is CVImageBuffer, but we only handle
791 // CVPixelBuffers. This should be guaranteed as we set 775 // CVPixelBuffers. This should be guaranteed as we set
792 // kCVPixelBufferOpenGLCompatibilityKey in |image_config|. 776 // kCVPixelBufferOpenGLCompatibilityKey in |image_config|.
793 // 777 //
794 // Sometimes, for unknown reasons (http://crbug.com/453050), |image_buffer| is 778 // Sometimes, for unknown reasons (http://crbug.com/453050), |image_buffer| is
795 // NULL, which causes CFGetTypeID() to crash. While the rest of the code would 779 // NULL, which causes CFGetTypeID() to crash. While the rest of the code would
796 // smoothly handle NULL as a dropped frame, we choose to fail permanantly here 780 // smoothly handle NULL as a dropped frame, we choose to fail permanantly here
797 // until the issue is better understood. 781 // until the issue is better understood.
798 if (!image_buffer || CFGetTypeID(image_buffer) != CVPixelBufferGetTypeID()) { 782 if (!image_buffer || CFGetTypeID(image_buffer) != CVPixelBufferGetTypeID()) {
799 DLOG(ERROR) << "Decoded frame is not a CVPixelBuffer"; 783 DLOG(ERROR) << "Decoded frame is not a CVPixelBuffer";
800 NotifyError(PLATFORM_FAILURE, SFT_DECODE_ERROR); 784 NotifyError(PLATFORM_FAILURE, SFT_DECODE_ERROR);
801 return; 785 return;
802 } 786 }
803 787
804 Frame* frame = reinterpret_cast<Frame*>(source_frame_refcon); 788 Frame* frame = reinterpret_cast<Frame*>(source_frame_refcon);
805 frame->image.reset(image_buffer, base::scoped_policy::RETAIN); 789 frame->image.reset(image_buffer, base::scoped_policy::RETAIN);
806 gpu_task_runner_->PostTask(FROM_HERE, base::Bind( 790 gpu_task_runner_->PostTask(
807 &VTVideoDecodeAccelerator::DecodeDone, weak_this_, frame)); 791 FROM_HERE,
792 base::Bind(&VTVideoDecodeAccelerator::DecodeDone, weak_this_, frame));
808 } 793 }
809 794
810 void VTVideoDecodeAccelerator::DecodeDone(Frame* frame) { 795 void VTVideoDecodeAccelerator::DecodeDone(Frame* frame) {
811 DCHECK(gpu_thread_checker_.CalledOnValidThread()); 796 DCHECK(gpu_thread_checker_.CalledOnValidThread());
812 DCHECK_EQ(1u, pending_frames_.count(frame->bitstream_id)); 797 DCHECK_EQ(1u, pending_frames_.count(frame->bitstream_id));
813 Task task(TASK_FRAME); 798 Task task(TASK_FRAME);
814 task.frame = pending_frames_[frame->bitstream_id]; 799 task.frame = pending_frames_[frame->bitstream_id];
815 pending_frames_.erase(frame->bitstream_id); 800 pending_frames_.erase(frame->bitstream_id);
816 task_queue_.push(task); 801 task_queue_.push(task);
817 ProcessWorkQueues(); 802 ProcessWorkQueues();
818 } 803 }
819 804
820 void VTVideoDecodeAccelerator::FlushTask(TaskType type) { 805 void VTVideoDecodeAccelerator::FlushTask(TaskType type) {
821 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); 806 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
822 FinishDelayedFrames(); 807 FinishDelayedFrames();
823 808
824 // Always queue a task, even if FinishDelayedFrames() fails, so that 809 // Always queue a task, even if FinishDelayedFrames() fails, so that
825 // destruction always completes. 810 // destruction always completes.
826 gpu_task_runner_->PostTask(FROM_HERE, base::Bind( 811 gpu_task_runner_->PostTask(
827 &VTVideoDecodeAccelerator::FlushDone, weak_this_, type)); 812 FROM_HERE,
813 base::Bind(&VTVideoDecodeAccelerator::FlushDone, weak_this_, type));
828 } 814 }
829 815
830 void VTVideoDecodeAccelerator::FlushDone(TaskType type) { 816 void VTVideoDecodeAccelerator::FlushDone(TaskType type) {
831 DCHECK(gpu_thread_checker_.CalledOnValidThread()); 817 DCHECK(gpu_thread_checker_.CalledOnValidThread());
832 task_queue_.push(Task(type)); 818 task_queue_.push(Task(type));
833 ProcessWorkQueues(); 819 ProcessWorkQueues();
834 } 820 }
835 821
836 void VTVideoDecodeAccelerator::Decode(const media::BitstreamBuffer& bitstream) { 822 void VTVideoDecodeAccelerator::Decode(const media::BitstreamBuffer& bitstream) {
837 DCHECK(gpu_thread_checker_.CalledOnValidThread()); 823 DCHECK(gpu_thread_checker_.CalledOnValidThread());
(...skipping 25 matching lines...) Expand all
863 DCHECK_LE(1u, picture.texture_ids().size()); 849 DCHECK_LE(1u, picture.texture_ids().size());
864 picture_info_map_.insert(std::make_pair( 850 picture_info_map_.insert(std::make_pair(
865 picture.id(), 851 picture.id(),
866 base::WrapUnique(new PictureInfo(picture.internal_texture_ids()[0], 852 base::WrapUnique(new PictureInfo(picture.internal_texture_ids()[0],
867 picture.texture_ids()[0])))); 853 picture.texture_ids()[0]))));
868 } 854 }
869 855
870 // Pictures are not marked as uncleared until after this method returns, and 856 // Pictures are not marked as uncleared until after this method returns, and
871 // they will be broken if they are used before that happens. So, schedule 857 // they will be broken if they are used before that happens. So, schedule
872 // future work after that happens. 858 // future work after that happens.
873 gpu_task_runner_->PostTask(FROM_HERE, base::Bind( 859 gpu_task_runner_->PostTask(
874 &VTVideoDecodeAccelerator::ProcessWorkQueues, weak_this_)); 860 FROM_HERE,
861 base::Bind(&VTVideoDecodeAccelerator::ProcessWorkQueues, weak_this_));
875 } 862 }
876 863
877 void VTVideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_id) { 864 void VTVideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_id) {
878 DCHECK(gpu_thread_checker_.CalledOnValidThread()); 865 DCHECK(gpu_thread_checker_.CalledOnValidThread());
879 DCHECK(picture_info_map_.count(picture_id)); 866 DCHECK(picture_info_map_.count(picture_id));
880 PictureInfo* picture_info = picture_info_map_.find(picture_id)->second.get(); 867 PictureInfo* picture_info = picture_info_map_.find(picture_id)->second.get();
881 picture_info->cv_image.reset(); 868 picture_info->cv_image.reset();
882 picture_info->gl_image->Destroy(false); 869 picture_info->gl_image->Destroy(false);
883 picture_info->gl_image = nullptr; 870 picture_info->gl_image = nullptr;
884 871
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
969 bool VTVideoDecodeAccelerator::ProcessReorderQueue() { 956 bool VTVideoDecodeAccelerator::ProcessReorderQueue() {
970 DCHECK(gpu_thread_checker_.CalledOnValidThread()); 957 DCHECK(gpu_thread_checker_.CalledOnValidThread());
971 DCHECK_EQ(state_, STATE_DECODING); 958 DCHECK_EQ(state_, STATE_DECODING);
972 959
973 if (reorder_queue_.empty()) 960 if (reorder_queue_.empty())
974 return false; 961 return false;
975 962
976 // If the next task is a flush (because there is a pending flush or becuase 963 // If the next task is a flush (because there is a pending flush or becuase
977 // the next frame is an IDR), then we don't need a full reorder buffer to send 964 // the next frame is an IDR), then we don't need a full reorder buffer to send
978 // the next frame. 965 // the next frame.
979 bool flushing = !task_queue_.empty() && 966 bool flushing =
980 (task_queue_.front().type != TASK_FRAME || 967 !task_queue_.empty() && (task_queue_.front().type != TASK_FRAME ||
981 task_queue_.front().frame->is_idr); 968 task_queue_.front().frame->is_idr);
982 969
983 size_t reorder_window = std::max(0, reorder_queue_.top()->reorder_window); 970 size_t reorder_window = std::max(0, reorder_queue_.top()->reorder_window);
984 if (flushing || reorder_queue_.size() > reorder_window) { 971 if (flushing || reorder_queue_.size() > reorder_window) {
985 if (ProcessFrame(*reorder_queue_.top())) { 972 if (ProcessFrame(*reorder_queue_.top())) {
986 reorder_queue_.pop(); 973 reorder_queue_.pop();
987 return true; 974 return true;
988 } 975 }
989 } 976 }
990 977
991 return false; 978 return false;
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
1044 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR); 1031 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR);
1045 return false; 1032 return false;
1046 } 1033 }
1047 1034
1048 scoped_refptr<gl::GLImageIOSurface> gl_image( 1035 scoped_refptr<gl::GLImageIOSurface> gl_image(
1049 new gl::GLImageIOSurface(frame.coded_size, GL_BGRA_EXT)); 1036 new gl::GLImageIOSurface(frame.coded_size, GL_BGRA_EXT));
1050 if (!gl_image->InitializeWithCVPixelBuffer( 1037 if (!gl_image->InitializeWithCVPixelBuffer(
1051 frame.image.get(), gfx::GenericSharedMemoryId(), 1038 frame.image.get(), gfx::GenericSharedMemoryId(),
1052 gfx::BufferFormat::YUV_420_BIPLANAR)) { 1039 gfx::BufferFormat::YUV_420_BIPLANAR)) {
1053 NOTIFY_STATUS("Failed to initialize GLImageIOSurface", PLATFORM_FAILURE, 1040 NOTIFY_STATUS("Failed to initialize GLImageIOSurface", PLATFORM_FAILURE,
1054 SFT_PLATFORM_ERROR); 1041 SFT_PLATFORM_ERROR);
1055 } 1042 }
1056 1043
1057 if (!bind_image_cb_.Run(picture_info->client_texture_id, 1044 if (!bind_image_cb_.Run(picture_info->client_texture_id,
1058 GL_TEXTURE_RECTANGLE_ARB, gl_image, false)) { 1045 GL_TEXTURE_RECTANGLE_ARB, gl_image, false)) {
1059 DLOG(ERROR) << "Failed to bind image"; 1046 DLOG(ERROR) << "Failed to bind image";
1060 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR); 1047 NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR);
1061 return false; 1048 return false;
1062 } 1049 }
1063 1050
1064 // Assign the new image(s) to the the picture info. 1051 // Assign the new image(s) to the the picture info.
1065 picture_info->gl_image = gl_image; 1052 picture_info->gl_image = gl_image;
1066 picture_info->cv_image = frame.image; 1053 picture_info->cv_image = frame.image;
1067 available_picture_ids_.pop_back(); 1054 available_picture_ids_.pop_back();
1068 1055
1069 // TODO(sandersd): Currently, the size got from 1056 // TODO(sandersd): Currently, the size got from
1070 // CMVideoFormatDescriptionGetDimensions is visible size. We pass it to 1057 // CMVideoFormatDescriptionGetDimensions is visible size. We pass it to
1071 // GpuVideoDecoder so that GpuVideoDecoder can use correct visible size in 1058 // GpuVideoDecoder so that GpuVideoDecoder can use correct visible size in
1072 // resolution changed. We should find the correct API to get the real 1059 // resolution changed. We should find the correct API to get the real
1073 // coded size and fix it. 1060 // coded size and fix it.
1074 client_->PictureReady(media::Picture(picture_id, frame.bitstream_id, 1061 client_->PictureReady(media::Picture(picture_id, frame.bitstream_id,
1075 gfx::Rect(frame.coded_size), 1062 gfx::Rect(frame.coded_size), true));
1076 true));
1077 return true; 1063 return true;
1078 } 1064 }
1079 1065
1080 void VTVideoDecodeAccelerator::NotifyError( 1066 void VTVideoDecodeAccelerator::NotifyError(
1081 Error vda_error_type, 1067 Error vda_error_type,
1082 VTVDASessionFailureType session_failure_type) { 1068 VTVDASessionFailureType session_failure_type) {
1083 DCHECK_LT(session_failure_type, SFT_MAX + 1); 1069 DCHECK_LT(session_failure_type, SFT_MAX + 1);
1084 if (!gpu_thread_checker_.CalledOnValidThread()) { 1070 if (!gpu_thread_checker_.CalledOnValidThread()) {
1085 gpu_task_runner_->PostTask(FROM_HERE, base::Bind( 1071 gpu_task_runner_->PostTask(
1086 &VTVideoDecodeAccelerator::NotifyError, weak_this_, vda_error_type, 1072 FROM_HERE,
1087 session_failure_type)); 1073 base::Bind(&VTVideoDecodeAccelerator::NotifyError, weak_this_,
1074 vda_error_type, session_failure_type));
1088 } else if (state_ == STATE_DECODING) { 1075 } else if (state_ == STATE_DECODING) {
1089 state_ = STATE_ERROR; 1076 state_ = STATE_ERROR;
1090 UMA_HISTOGRAM_ENUMERATION("Media.VTVDA.SessionFailureReason", 1077 UMA_HISTOGRAM_ENUMERATION("Media.VTVDA.SessionFailureReason",
1091 session_failure_type, 1078 session_failure_type, SFT_MAX + 1);
1092 SFT_MAX + 1);
1093 client_->NotifyError(vda_error_type); 1079 client_->NotifyError(vda_error_type);
1094 } 1080 }
1095 } 1081 }
1096 1082
1097 void VTVideoDecodeAccelerator::QueueFlush(TaskType type) { 1083 void VTVideoDecodeAccelerator::QueueFlush(TaskType type) {
1098 DCHECK(gpu_thread_checker_.CalledOnValidThread()); 1084 DCHECK(gpu_thread_checker_.CalledOnValidThread());
1099 pending_flush_tasks_.push(type); 1085 pending_flush_tasks_.push(type);
1100 decoder_thread_.task_runner()->PostTask( 1086 decoder_thread_.task_runner()->PostTask(
1101 FROM_HERE, base::Bind(&VTVideoDecodeAccelerator::FlushTask, 1087 FROM_HERE, base::Bind(&VTVideoDecodeAccelerator::FlushTask,
1102 base::Unretained(this), type)); 1088 base::Unretained(this), type));
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
1149 for (const auto& supported_profile : kSupportedProfiles) { 1135 for (const auto& supported_profile : kSupportedProfiles) {
1150 SupportedProfile profile; 1136 SupportedProfile profile;
1151 profile.profile = supported_profile; 1137 profile.profile = supported_profile;
1152 profile.min_resolution.SetSize(16, 16); 1138 profile.min_resolution.SetSize(16, 16);
1153 profile.max_resolution.SetSize(4096, 2160); 1139 profile.max_resolution.SetSize(4096, 2160);
1154 profiles.push_back(profile); 1140 profiles.push_back(profile);
1155 } 1141 }
1156 return profiles; 1142 return profiles;
1157 } 1143 }
1158 1144
1159 } // namespace content 1145 } // namespace media
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698