Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(607)

Side by Side Diff: media/gpu/d3d11_video_decoder.cc

Issue 2534313004: Add prototype D3D11VideoDecodeAccelerator. (Closed)
Patch Set: clean up code Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2016 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "media/gpu/d3d11_video_decoder.h"
6
7 #define INITGUID
8 #include <d3d11.h>
9 #include <dxva.h>
10 #include <windows.h>
11
12 #include "base/memory/ptr_util.h"
13 #include "base/trace_event/trace_event.h"
14 #include "base/win/scoped_comptr.h"
15 #include "media/gpu/h264_decoder.h"
16 #include "media/gpu/h264_dpb.h"
17 #include "third_party/angle/include/EGL/egl.h"
18 #include "third_party/angle/include/EGL/eglext.h"
19 #include "ui/gfx/color_space.h"
20 #include "ui/gl/gl_bindings.h"
21 #include "ui/gl/gl_context.h"
22 #include "ui/gl/gl_surface_egl.h"
23 #include "ui/gl/scoped_binders.h"
24
25 namespace media {
26
27 #define RETURN_ON_FAILURE(result, log, ret) \
28 do { \
29 if (!(result)) { \
30 DLOG(ERROR) << log; \
31 return ret; \
32 } \
33 } while (0)
34
35 D3D11PictureBuffer::D3D11PictureBuffer(PictureBuffer picture_buffer,
36 size_t level)
37 : picture_buffer_(picture_buffer), level_(level) {}
38
39 D3D11PictureBuffer::~D3D11PictureBuffer() {}
40
41 bool D3D11PictureBuffer::Init(
42 base::win::ScopedComPtr<ID3D11VideoDevice> video_device,
43 base::win::ScopedComPtr<ID3D11Texture2D> texture,
44 const GUID& decoder_guid) {
45 texture_ = texture;
46 D3D11_VIDEO_DECODER_OUTPUT_VIEW_DESC view_desc = {};
47 view_desc.DecodeProfile = decoder_guid;
48 view_desc.ViewDimension = D3D11_VDOV_DIMENSION_TEXTURE2D;
49 view_desc.Texture2D.ArraySlice = (UINT)level_;
50
51 HRESULT hr = video_device->CreateVideoDecoderOutputView(
52 texture.get(), &view_desc, output_view_.Receive());
53
54 CHECK(SUCCEEDED(hr));
55 EGLDisplay egl_display = gl::GLSurfaceEGL::GetHardwareDisplay();
56 const EGLint stream_attributes[] = {
57 EGL_CONSUMER_LATENCY_USEC_KHR,
58 0,
59 EGL_CONSUMER_ACQUIRE_TIMEOUT_USEC_KHR,
60 0,
61 EGL_NONE,
62 };
63 stream_ = eglCreateStreamKHR(egl_display, stream_attributes);
64 RETURN_ON_FAILURE(!!stream_, "Could not create stream", false);
65 gl::ScopedActiveTexture texture0(GL_TEXTURE0);
66 gl::ScopedTextureBinder texture0_binder(
67 GL_TEXTURE_EXTERNAL_OES, picture_buffer_.service_texture_ids()[0]);
68 gl::ScopedActiveTexture texture1(GL_TEXTURE1);
69 gl::ScopedTextureBinder texture1_binder(
70 GL_TEXTURE_EXTERNAL_OES, picture_buffer_.service_texture_ids()[1]);
71
72 EGLAttrib consumer_attributes[] = {
73 EGL_COLOR_BUFFER_TYPE,
74 EGL_YUV_BUFFER_EXT,
75 EGL_YUV_NUMBER_OF_PLANES_EXT,
76 2,
77 EGL_YUV_PLANE0_TEXTURE_UNIT_NV,
78 0,
79 EGL_YUV_PLANE1_TEXTURE_UNIT_NV,
80 1,
81 EGL_NONE,
82 };
83 EGLBoolean result = eglStreamConsumerGLTextureExternalAttribsNV(
84 egl_display, stream_, consumer_attributes);
85 RETURN_ON_FAILURE(result, "Could not set stream consumer", false);
86
87 EGLAttrib producer_attributes[] = {
88 EGL_NONE,
89 };
90
91 result = eglCreateStreamProducerD3DTextureNV12ANGLE(egl_display, stream_,
92 producer_attributes);
93
94 EGLAttrib frame_attributes[] = {
95 EGL_D3D_TEXTURE_SUBRESOURCE_ID_ANGLE, level_, EGL_NONE,
96 };
97
98 result = eglStreamPostD3DTextureNV12ANGLE(egl_display, stream_,
99 static_cast<void*>(texture.get()),
100 frame_attributes);
101 RETURN_ON_FAILURE(result, "Could not post texture", false);
102 result = eglStreamConsumerAcquireKHR(egl_display, stream_);
103 RETURN_ON_FAILURE(result, "Could not post acquire stream", false);
104 return true;
105 }
106
107 class D3D11H264Picture : public H264Picture {
108 public:
109 D3D11H264Picture(D3D11PictureBuffer* picture, size_t input_buffer_id)
110 : picture(picture),
111 level_(picture->level()),
112 input_buffer_id_(input_buffer_id) {}
113
114 D3D11PictureBuffer* picture;
115 size_t level_;
116 size_t input_buffer_id_;
117
118 protected:
119 ~D3D11H264Picture() override;
120 };
121
122 D3D11H264Accelerator::D3D11H264Accelerator(
123 D3D11VideoDecoderClient* client,
124 base::win::ScopedComPtr<ID3D11VideoDecoder> video_decoder,
125 base::win::ScopedComPtr<ID3D11VideoDevice> video_device,
126 base::win::ScopedComPtr<ID3D11VideoContext> video_context)
127 : client_(client),
128 video_decoder_(video_decoder),
129 video_device_(video_device),
130 video_context_(video_context) {}
131
132 D3D11H264Accelerator::~D3D11H264Accelerator() {}
133
134 scoped_refptr<H264Picture> D3D11H264Accelerator::CreateH264Picture() {
135 D3D11PictureBuffer* picture = client_->GetPicture();
136 if (!picture) {
137 return nullptr;
138 }
139 picture->set_in_picture_use(true);
140 return make_scoped_refptr(
141 new D3D11H264Picture(picture, client_->input_buffer_id()));
142 }
143
144 bool D3D11H264Accelerator::SubmitFrameMetadata(
145 const H264SPS* sps,
146 const H264PPS* pps,
147 const H264DPB& dpb,
148 const H264Picture::Vector& ref_pic_listp0,
149 const H264Picture::Vector& ref_pic_listb0,
150 const H264Picture::Vector& ref_pic_listb1,
151 const scoped_refptr<H264Picture>& pic) {
152 scoped_refptr<D3D11H264Picture> our_pic(
153 static_cast<D3D11H264Picture*>(pic.get()));
154
155 HRESULT hr;
156 hr = video_context_->DecoderBeginFrame(
157 video_decoder_.get(), our_pic->picture->output_view_.get(), 0, nullptr);
158 CHECK(SUCCEEDED(hr));
159
160 sps_ = *sps;
161 for (size_t i = 0; i < 16; i++) {
162 ref_frame_list_[i].bPicEntry = 0xFF;
163 field_order_cnt_list_[i][0] = 0;
164 field_order_cnt_list_[i][1] = 0;
165 frame_num_list_[i] = 0;
166 }
167 used_for_reference_flags_ = 0;
168 non_existing_frame_flags_ = 0;
169
170 int i = 0;
171
172 for (auto it = dpb.begin(); it != dpb.end(); it++) {
173 scoped_refptr<D3D11H264Picture> our_ref_pic(
174 static_cast<D3D11H264Picture*>(it->get()));
175 if (!our_ref_pic->ref) {
176 i++;
177 continue;
178 }
179 ref_frame_list_[i].Index7Bits = our_ref_pic->level_;
180 ref_frame_list_[i].AssociatedFlag = our_ref_pic->long_term;
181 field_order_cnt_list_[i][0] = our_ref_pic->top_field_order_cnt;
182 field_order_cnt_list_[i][1] = our_ref_pic->bottom_field_order_cnt;
183 frame_num_list_[i] = ref_frame_list_[i].AssociatedFlag
184 ? our_ref_pic->long_term_pic_num
185 : our_ref_pic->pic_num;
186 int ref = 3;
187 used_for_reference_flags_ |= ref << (2 * i);
188 non_existing_frame_flags_ |= (our_ref_pic->nonexisting) << i;
189 i++;
190 }
191 slice_info_.clear();
192 RetrieveBitstreamBuffer();
193 return true;
194 }
195
196 void D3D11H264Accelerator::RetrieveBitstreamBuffer() {
197 current_offset_ = 0;
198 void* buffer;
199 UINT buffer_size;
200 HRESULT hr = video_context_->GetDecoderBuffer(
201 video_decoder_.get(), D3D11_VIDEO_DECODER_BUFFER_BITSTREAM, &buffer_size,
202 &buffer);
203 bitstream_buffer_bytes_ = (uint8_t*)buffer;
204 bitstream_buffer_size_ = buffer_size;
205 CHECK(SUCCEEDED(hr));
206 }
207
208 bool D3D11H264Accelerator::SubmitSlice(const H264PPS* pps,
209 const H264SliceHeader* slice_hdr,
210 const H264Picture::Vector& ref_pic_list0,
211 const H264Picture::Vector& ref_pic_list1,
212 const scoped_refptr<H264Picture>& pic,
213 const uint8_t* data,
214 size_t size) {
215 scoped_refptr<D3D11H264Picture> our_pic(
216 static_cast<D3D11H264Picture*>(pic.get()));
217
218 DXVA_PicParams_H264 pic_param = {};
219
220 #define FROM_SPS_TO_PP(a) pic_param.a = sps_.a
221 #define FROM_SPS_TO_PP2(a, b) pic_param.a = sps_.b
222 #define FROM_PPS_TO_PP(a) pic_param.a = pps->a
223 #define FROM_PPS_TO_PP2(a, b) pic_param.a = pps->b
224 #define FROM_SLICE_TO_PP(a) pic_param.a = slice_hdr->a
225 #define FROM_SLICE_TO_PP2(a, b) pic_param.a = slice_hdr->b
226 FROM_SPS_TO_PP2(wFrameWidthInMbsMinus1, pic_width_in_mbs_minus1);
227 FROM_SPS_TO_PP2(wFrameHeightInMbsMinus1, pic_height_in_map_units_minus1);
228 pic_param.CurrPic.Index7Bits = our_pic->level_;
229 // UNUSED: pic_param.CurrPic.AssociatedFlag = slide_hdr->field_pic_flag
230 FROM_SPS_TO_PP2(num_ref_frames, max_num_ref_frames);
231
232 FROM_SLICE_TO_PP(field_pic_flag);
233 pic_param.MbaffFrameFlag =
234 sps_.mb_adaptive_frame_field_flag && pic_param.field_pic_flag;
235 FROM_SPS_TO_PP2(residual_colour_transform_flag, separate_colour_plane_flag);
236 FROM_SLICE_TO_PP(sp_for_switch_flag);
237 FROM_SPS_TO_PP(chroma_format_idc);
238 pic_param.RefPicFlag = pic->ref;
239 FROM_PPS_TO_PP(constrained_intra_pred_flag);
240 FROM_PPS_TO_PP(weighted_pred_flag);
241 FROM_PPS_TO_PP(weighted_bipred_idc);
242 pic_param.MbsConsecutiveFlag = 1;
243 FROM_SPS_TO_PP(frame_mbs_only_flag);
244 FROM_PPS_TO_PP(transform_8x8_mode_flag);
245 // UNUSED: Minlumabipredsize
246 // UNUSED: pic_param.IntraPicFlag = slice_hdr->IsISlice();
247 FROM_SPS_TO_PP(bit_depth_luma_minus8);
248 FROM_SPS_TO_PP(bit_depth_chroma_minus8);
249 memcpy(pic_param.RefFrameList, ref_frame_list_,
250 sizeof pic_param.RefFrameList);
251 if (pic_param.field_pic_flag && pic_param.CurrPic.AssociatedFlag) {
252 pic_param.CurrFieldOrderCnt[1] = pic->bottom_field_order_cnt;
253 pic_param.CurrFieldOrderCnt[0] = 0;
254 } else if (pic_param.field_pic_flag && !pic_param.CurrPic.AssociatedFlag) {
255 pic_param.CurrFieldOrderCnt[0] = pic->top_field_order_cnt;
256 pic_param.CurrFieldOrderCnt[1] = 0;
257 } else {
258 pic_param.CurrFieldOrderCnt[0] = pic->top_field_order_cnt;
259 pic_param.CurrFieldOrderCnt[1] = pic->bottom_field_order_cnt;
260 }
261 memcpy(pic_param.FieldOrderCntList, field_order_cnt_list_,
262 sizeof pic_param.FieldOrderCntList);
263 FROM_PPS_TO_PP(pic_init_qs_minus26);
264 FROM_PPS_TO_PP(chroma_qp_index_offset);
265 FROM_PPS_TO_PP(second_chroma_qp_index_offset);
266 pic_param.ContinuationFlag = 1;
267 FROM_PPS_TO_PP(pic_init_qp_minus26);
268 FROM_PPS_TO_PP2(num_ref_idx_l0_active_minus1,
269 num_ref_idx_l0_default_active_minus1);
270 FROM_PPS_TO_PP2(num_ref_idx_l1_active_minus1,
271 num_ref_idx_l1_default_active_minus1);
272 // UNUSED: Reserved8BitsA
273 memcpy(pic_param.FrameNumList, frame_num_list_,
274 sizeof pic_param.FrameNumList);
275 pic_param.UsedForReferenceFlags = used_for_reference_flags_;
276 pic_param.NonExistingFrameFlags = non_existing_frame_flags_;
277 pic_param.frame_num = pic->frame_num;
278 FROM_SPS_TO_PP(log2_max_frame_num_minus4);
279 FROM_SPS_TO_PP(pic_order_cnt_type);
280 FROM_SPS_TO_PP(log2_max_pic_order_cnt_lsb_minus4);
281 FROM_SPS_TO_PP(delta_pic_order_always_zero_flag);
282 FROM_SPS_TO_PP(direct_8x8_inference_flag);
283 FROM_PPS_TO_PP(entropy_coding_mode_flag);
284 FROM_PPS_TO_PP2(pic_order_present_flag,
285 bottom_field_pic_order_in_frame_present_flag);
286 FROM_PPS_TO_PP(num_slice_groups_minus1);
287 CHECK_EQ(0u, pic_param.num_slice_groups_minus1);
288 // UNUSED: slice_group_map_type
289 FROM_PPS_TO_PP(deblocking_filter_control_present_flag);
290 FROM_PPS_TO_PP(redundant_pic_cnt_present_flag);
291 // UNUSED: Reserved8BitsB
292 // UNUSED: slice_group_change_rate
293 //
294 //
295 //
296
297 pic_param.StatusReportFeedbackNumber = 1;
298
299 UINT buffer_size;
300 void* buffer;
301 HRESULT hr = video_context_->GetDecoderBuffer(
302 video_decoder_.get(), D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS,
303 &buffer_size, &buffer);
304 CHECK(SUCCEEDED(hr));
305
306 memcpy(buffer, &pic_param, sizeof(pic_param));
307 hr = video_context_->ReleaseDecoderBuffer(
308 video_decoder_.get(), D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS);
309 CHECK(SUCCEEDED(hr));
310
311 DXVA_Qmatrix_H264 iq_matrix_buf = {};
312
313 if (pps->pic_scaling_matrix_present_flag) {
314 for (int i = 0; i < 6; ++i) {
315 for (int j = 0; j < 16; ++j)
316 iq_matrix_buf.bScalingLists4x4[i][j] = pps->scaling_list4x4[i][j];
317 }
318
319 for (int i = 0; i < 2; ++i) {
320 for (int j = 0; j < 64; ++j)
321 iq_matrix_buf.bScalingLists8x8[i][j] = pps->scaling_list8x8[i][j];
322 }
323 } else {
324 for (int i = 0; i < 6; ++i) {
325 for (int j = 0; j < 16; ++j)
326 iq_matrix_buf.bScalingLists4x4[i][j] = sps_.scaling_list4x4[i][j];
327 }
328
329 for (int i = 0; i < 2; ++i) {
330 for (int j = 0; j < 64; ++j)
331 iq_matrix_buf.bScalingLists8x8[i][j] = sps_.scaling_list8x8[i][j];
332 }
333 }
334 hr = video_context_->GetDecoderBuffer(
335 video_decoder_.get(),
336 D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX, &buffer_size,
337 &buffer);
338 CHECK(SUCCEEDED(hr));
339 memcpy(buffer, &iq_matrix_buf, sizeof(iq_matrix_buf));
340 hr = video_context_->ReleaseDecoderBuffer(
341 video_decoder_.get(),
342 D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX);
343
344 // Ideally all slices in a frame are put in the same bitstream buffer.
345 // However the bitstream buffer may not fit all the data, so split on the
346 // necessary boundaries.
347
348 size_t out_bitstream_size = size + 3;
349
350 size_t remaining_bitstream = out_bitstream_size;
351 size_t start_location = 0;
352
353 while (remaining_bitstream > 0) {
354 if (bitstream_buffer_size_ < remaining_bitstream &&
355 slice_info_.size() > 0) {
356 SubmitSliceData();
357 RetrieveBitstreamBuffer();
358 }
359
360 size_t bytes_to_copy = remaining_bitstream;
361 bool contains_end = true;
362 if (bytes_to_copy > bitstream_buffer_size_) {
363 bytes_to_copy = bitstream_buffer_size_;
364 contains_end = false;
365 }
366 size_t real_bytes_to_copy = bytes_to_copy;
367 // TODO(jbauman): fix hack
368 uint8_t* out_start = bitstream_buffer_bytes_;
369 if (bytes_to_copy >= 3 && start_location == 0) {
370 *(out_start++) = 0;
371 *(out_start++) = 0;
372 *(out_start++) = 1;
373 real_bytes_to_copy -= 3;
374 }
375 memcpy(out_start, data + start_location, real_bytes_to_copy);
376
377 DXVA_Slice_H264_Short slice_info = {};
378 slice_info.BSNALunitDataLocation = (UINT)current_offset_;
379 slice_info.SliceBytesInBuffer = (UINT)bytes_to_copy;
380 if (contains_end && start_location == 0)
381 slice_info.wBadSliceChopping = 0;
382 else if (!contains_end && start_location == 0)
383 slice_info.wBadSliceChopping = 1;
384 else if (contains_end && start_location != 0)
385 slice_info.wBadSliceChopping = 2;
386 else
387 slice_info.wBadSliceChopping = 3;
388
389 slice_info_.push_back(slice_info);
390 bitstream_buffer_size_ -= bytes_to_copy;
391 current_offset_ += bytes_to_copy;
392 start_location += bytes_to_copy;
393 remaining_bitstream -= bytes_to_copy;
394 bitstream_buffer_bytes_ += bytes_to_copy;
395 }
396
397 return true;
398 }
399
400 void D3D11H264Accelerator::SubmitSliceData() {
401 CHECK(slice_info_.size() > 0);
402 UINT buffer_size;
403 void* buffer;
404 HRESULT hr = video_context_->GetDecoderBuffer(
405 video_decoder_.get(), D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL,
406 &buffer_size, &buffer);
407 CHECK(SUCCEEDED(hr));
408 CHECK_LE(sizeof(slice_info_[0]) * slice_info_.size(), buffer_size);
409 memcpy(buffer, &slice_info_[0], sizeof(slice_info_[0]) * slice_info_.size());
410 hr = video_context_->ReleaseDecoderBuffer(
411 video_decoder_.get(), D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL);
412
413 hr = video_context_->ReleaseDecoderBuffer(
414 video_decoder_.get(), D3D11_VIDEO_DECODER_BUFFER_BITSTREAM);
415 D3D11_VIDEO_DECODER_BUFFER_DESC buffers[4] = {};
416 buffers[0].BufferType = D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS;
417 buffers[0].DataOffset = 0;
418 buffers[0].DataSize = sizeof(DXVA_PicParams_H264);
419 buffers[1].BufferType =
420 D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX;
421 buffers[1].DataOffset = 0;
422 buffers[1].DataSize = sizeof(DXVA_Qmatrix_H264);
423 buffers[2].BufferType = D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL;
424 buffers[2].DataOffset = 0;
425 buffers[2].DataSize = (UINT)(sizeof(slice_info_[0]) * slice_info_.size());
426 buffers[3].BufferType = D3D11_VIDEO_DECODER_BUFFER_BITSTREAM;
427 buffers[3].DataOffset = 0;
428 buffers[3].DataSize = (UINT)current_offset_;
429
430 hr = video_context_->SubmitDecoderBuffers(video_decoder_.get(), 4, buffers);
431 current_offset_ = 0;
432 slice_info_.clear();
433 }
434
435 bool D3D11H264Accelerator::SubmitDecode(const scoped_refptr<H264Picture>& pic) {
436 SubmitSliceData();
437
438 HRESULT hr = video_context_->DecoderEndFrame(video_decoder_.get());
439 CHECK(SUCCEEDED(hr));
440
441 return true;
442 }
443
444 bool D3D11H264Accelerator::OutputPicture(
445 const scoped_refptr<H264Picture>& pic) {
446 scoped_refptr<D3D11H264Picture> our_pic(
447 static_cast<D3D11H264Picture*>(pic.get()));
448 client_->OutputResult(our_pic->picture, our_pic->input_buffer_id_);
449 return true;
450 }
451
452 D3D11H264Picture::~D3D11H264Picture() {
453 picture->set_in_picture_use(false);
454 }
455
456 } // namespace media
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698