| OLD | NEW |
| (Empty) | |
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include "media/gpu/d3d11_h264_accelerator.h" |
| 6 |
| 7 #include <d3d11.h> |
| 8 #include <dxva.h> |
| 9 #include <windows.h> |
| 10 |
| 11 #include "base/memory/ptr_util.h" |
| 12 #include "base/trace_event/trace_event.h" |
| 13 #include "base/win/scoped_comptr.h" |
| 14 #include "media/gpu/h264_decoder.h" |
| 15 #include "media/gpu/h264_dpb.h" |
| 16 #include "third_party/angle/include/EGL/egl.h" |
| 17 #include "third_party/angle/include/EGL/eglext.h" |
| 18 #include "ui/gfx/color_space.h" |
| 19 #include "ui/gl/gl_bindings.h" |
| 20 #include "ui/gl/gl_context.h" |
| 21 #include "ui/gl/gl_surface_egl.h" |
| 22 #include "ui/gl/scoped_binders.h" |
| 23 |
| 24 namespace media { |
| 25 |
| 26 #define RETURN_ON_FAILURE(result, log, ret) \ |
| 27 do { \ |
| 28 if (!(result)) { \ |
| 29 DLOG(ERROR) << log; \ |
| 30 return ret; \ |
| 31 } \ |
| 32 } while (0) |
| 33 |
| 34 D3D11PictureBuffer::D3D11PictureBuffer(PictureBuffer picture_buffer, |
| 35 size_t level) |
| 36 : picture_buffer_(picture_buffer), level_(level) {} |
| 37 |
| 38 D3D11PictureBuffer::~D3D11PictureBuffer() {} |
| 39 |
| 40 bool D3D11PictureBuffer::Init( |
| 41 base::win::ScopedComPtr<ID3D11VideoDevice> video_device, |
| 42 base::win::ScopedComPtr<ID3D11Texture2D> texture, |
| 43 const GUID& decoder_guid) { |
| 44 texture_ = texture; |
| 45 D3D11_VIDEO_DECODER_OUTPUT_VIEW_DESC view_desc = {}; |
| 46 view_desc.DecodeProfile = decoder_guid; |
| 47 view_desc.ViewDimension = D3D11_VDOV_DIMENSION_TEXTURE2D; |
| 48 view_desc.Texture2D.ArraySlice = (UINT)level_; |
| 49 |
| 50 HRESULT hr = video_device->CreateVideoDecoderOutputView( |
| 51 texture.get(), &view_desc, output_view_.Receive()); |
| 52 |
| 53 CHECK(SUCCEEDED(hr)); |
| 54 EGLDisplay egl_display = gl::GLSurfaceEGL::GetHardwareDisplay(); |
| 55 const EGLint stream_attributes[] = { |
| 56 EGL_CONSUMER_LATENCY_USEC_KHR, |
| 57 0, |
| 58 EGL_CONSUMER_ACQUIRE_TIMEOUT_USEC_KHR, |
| 59 0, |
| 60 EGL_NONE, |
| 61 }; |
| 62 stream_ = eglCreateStreamKHR(egl_display, stream_attributes); |
| 63 RETURN_ON_FAILURE(!!stream_, "Could not create stream", false); |
| 64 gl::ScopedActiveTexture texture0(GL_TEXTURE0); |
| 65 gl::ScopedTextureBinder texture0_binder( |
| 66 GL_TEXTURE_EXTERNAL_OES, picture_buffer_.service_texture_ids()[0]); |
| 67 gl::ScopedActiveTexture texture1(GL_TEXTURE1); |
| 68 gl::ScopedTextureBinder texture1_binder( |
| 69 GL_TEXTURE_EXTERNAL_OES, picture_buffer_.service_texture_ids()[1]); |
| 70 |
| 71 EGLAttrib consumer_attributes[] = { |
| 72 EGL_COLOR_BUFFER_TYPE, |
| 73 EGL_YUV_BUFFER_EXT, |
| 74 EGL_YUV_NUMBER_OF_PLANES_EXT, |
| 75 2, |
| 76 EGL_YUV_PLANE0_TEXTURE_UNIT_NV, |
| 77 0, |
| 78 EGL_YUV_PLANE1_TEXTURE_UNIT_NV, |
| 79 1, |
| 80 EGL_NONE, |
| 81 }; |
| 82 EGLBoolean result = eglStreamConsumerGLTextureExternalAttribsNV( |
| 83 egl_display, stream_, consumer_attributes); |
| 84 RETURN_ON_FAILURE(result, "Could not set stream consumer", false); |
| 85 |
| 86 EGLAttrib producer_attributes[] = { |
| 87 EGL_NONE, |
| 88 }; |
| 89 |
| 90 result = eglCreateStreamProducerD3DTextureNV12ANGLE(egl_display, stream_, |
| 91 producer_attributes); |
| 92 |
| 93 EGLAttrib frame_attributes[] = { |
| 94 EGL_D3D_TEXTURE_SUBRESOURCE_ID_ANGLE, level_, EGL_NONE, |
| 95 }; |
| 96 |
| 97 result = eglStreamPostD3DTextureNV12ANGLE(egl_display, stream_, |
| 98 static_cast<void*>(texture.get()), |
| 99 frame_attributes); |
| 100 RETURN_ON_FAILURE(result, "Could not post texture", false); |
| 101 result = eglStreamConsumerAcquireKHR(egl_display, stream_); |
| 102 RETURN_ON_FAILURE(result, "Could not post acquire stream", false); |
| 103 return true; |
| 104 } |
| 105 |
| 106 class D3D11H264Picture : public H264Picture { |
| 107 public: |
| 108 D3D11H264Picture(D3D11PictureBuffer* picture, size_t input_buffer_id) |
| 109 : picture(picture), |
| 110 level_(picture->level()), |
| 111 input_buffer_id_(input_buffer_id) {} |
| 112 |
| 113 D3D11PictureBuffer* picture; |
| 114 size_t level_; |
| 115 size_t input_buffer_id_; |
| 116 |
| 117 protected: |
| 118 ~D3D11H264Picture() override; |
| 119 }; |
| 120 |
| 121 D3D11H264Accelerator::D3D11H264Accelerator( |
| 122 D3D11VideoDecoderClient* client, |
| 123 base::win::ScopedComPtr<ID3D11VideoDecoder> video_decoder, |
| 124 base::win::ScopedComPtr<ID3D11VideoDevice> video_device, |
| 125 base::win::ScopedComPtr<ID3D11VideoContext> video_context) |
| 126 : client_(client), |
| 127 video_decoder_(video_decoder), |
| 128 video_device_(video_device), |
| 129 video_context_(video_context) {} |
| 130 |
| 131 D3D11H264Accelerator::~D3D11H264Accelerator() {} |
| 132 |
| 133 scoped_refptr<H264Picture> D3D11H264Accelerator::CreateH264Picture() { |
| 134 D3D11PictureBuffer* picture = client_->GetPicture(); |
| 135 if (!picture) { |
| 136 return nullptr; |
| 137 } |
| 138 picture->set_in_picture_use(true); |
| 139 return make_scoped_refptr( |
| 140 new D3D11H264Picture(picture, client_->input_buffer_id())); |
| 141 } |
| 142 |
| 143 bool D3D11H264Accelerator::SubmitFrameMetadata( |
| 144 const H264SPS* sps, |
| 145 const H264PPS* pps, |
| 146 const H264DPB& dpb, |
| 147 const H264Picture::Vector& ref_pic_listp0, |
| 148 const H264Picture::Vector& ref_pic_listb0, |
| 149 const H264Picture::Vector& ref_pic_listb1, |
| 150 const scoped_refptr<H264Picture>& pic) { |
| 151 scoped_refptr<D3D11H264Picture> our_pic( |
| 152 static_cast<D3D11H264Picture*>(pic.get())); |
| 153 |
| 154 HRESULT hr; |
| 155 hr = video_context_->DecoderBeginFrame( |
| 156 video_decoder_.get(), our_pic->picture->output_view_.get(), 0, nullptr); |
| 157 CHECK(SUCCEEDED(hr)); |
| 158 |
| 159 sps_ = *sps; |
| 160 for (size_t i = 0; i < 16; i++) { |
| 161 ref_frame_list_[i].bPicEntry = 0xFF; |
| 162 field_order_cnt_list_[i][0] = 0; |
| 163 field_order_cnt_list_[i][1] = 0; |
| 164 frame_num_list_[i] = 0; |
| 165 } |
| 166 used_for_reference_flags_ = 0; |
| 167 non_existing_frame_flags_ = 0; |
| 168 |
| 169 int i = 0; |
| 170 |
| 171 for (auto it = dpb.begin(); it != dpb.end(); it++) { |
| 172 scoped_refptr<D3D11H264Picture> our_ref_pic( |
| 173 static_cast<D3D11H264Picture*>(it->get())); |
| 174 if (!our_ref_pic->ref) { |
| 175 i++; |
| 176 continue; |
| 177 } |
| 178 ref_frame_list_[i].Index7Bits = our_ref_pic->level_; |
| 179 ref_frame_list_[i].AssociatedFlag = our_ref_pic->long_term; |
| 180 field_order_cnt_list_[i][0] = our_ref_pic->top_field_order_cnt; |
| 181 field_order_cnt_list_[i][1] = our_ref_pic->bottom_field_order_cnt; |
| 182 frame_num_list_[i] = ref_frame_list_[i].AssociatedFlag |
| 183 ? our_ref_pic->long_term_pic_num |
| 184 : our_ref_pic->pic_num; |
| 185 int ref = 3; |
| 186 used_for_reference_flags_ |= ref << (2 * i); |
| 187 non_existing_frame_flags_ |= (our_ref_pic->nonexisting) << i; |
| 188 i++; |
| 189 } |
| 190 slice_info_.clear(); |
| 191 RetrieveBitstreamBuffer(); |
| 192 return true; |
| 193 } |
| 194 |
| 195 void D3D11H264Accelerator::RetrieveBitstreamBuffer() { |
| 196 current_offset_ = 0; |
| 197 void* buffer; |
| 198 UINT buffer_size; |
| 199 HRESULT hr = video_context_->GetDecoderBuffer( |
| 200 video_decoder_.get(), D3D11_VIDEO_DECODER_BUFFER_BITSTREAM, &buffer_size, |
| 201 &buffer); |
| 202 bitstream_buffer_bytes_ = (uint8_t*)buffer; |
| 203 bitstream_buffer_size_ = buffer_size; |
| 204 CHECK(SUCCEEDED(hr)); |
| 205 } |
| 206 |
| 207 bool D3D11H264Accelerator::SubmitSlice(const H264PPS* pps, |
| 208 const H264SliceHeader* slice_hdr, |
| 209 const H264Picture::Vector& ref_pic_list0, |
| 210 const H264Picture::Vector& ref_pic_list1, |
| 211 const scoped_refptr<H264Picture>& pic, |
| 212 const uint8_t* data, |
| 213 size_t size) { |
| 214 scoped_refptr<D3D11H264Picture> our_pic( |
| 215 static_cast<D3D11H264Picture*>(pic.get())); |
| 216 |
| 217 DXVA_PicParams_H264 pic_param = {}; |
| 218 |
| 219 #define FROM_SPS_TO_PP(a) pic_param.a = sps_.a |
| 220 #define FROM_SPS_TO_PP2(a, b) pic_param.a = sps_.b |
| 221 #define FROM_PPS_TO_PP(a) pic_param.a = pps->a |
| 222 #define FROM_PPS_TO_PP2(a, b) pic_param.a = pps->b |
| 223 #define FROM_SLICE_TO_PP(a) pic_param.a = slice_hdr->a |
| 224 #define FROM_SLICE_TO_PP2(a, b) pic_param.a = slice_hdr->b |
| 225 FROM_SPS_TO_PP2(wFrameWidthInMbsMinus1, pic_width_in_mbs_minus1); |
| 226 FROM_SPS_TO_PP2(wFrameHeightInMbsMinus1, pic_height_in_map_units_minus1); |
| 227 pic_param.CurrPic.Index7Bits = our_pic->level_; |
| 228 // UNUSED: pic_param.CurrPic.AssociatedFlag = slide_hdr->field_pic_flag |
| 229 FROM_SPS_TO_PP2(num_ref_frames, max_num_ref_frames); |
| 230 |
| 231 FROM_SLICE_TO_PP(field_pic_flag); |
| 232 pic_param.MbaffFrameFlag = |
| 233 sps_.mb_adaptive_frame_field_flag && pic_param.field_pic_flag; |
| 234 FROM_SPS_TO_PP2(residual_colour_transform_flag, separate_colour_plane_flag); |
| 235 FROM_SLICE_TO_PP(sp_for_switch_flag); |
| 236 FROM_SPS_TO_PP(chroma_format_idc); |
| 237 pic_param.RefPicFlag = pic->ref; |
| 238 FROM_PPS_TO_PP(constrained_intra_pred_flag); |
| 239 FROM_PPS_TO_PP(weighted_pred_flag); |
| 240 FROM_PPS_TO_PP(weighted_bipred_idc); |
| 241 pic_param.MbsConsecutiveFlag = 1; |
| 242 FROM_SPS_TO_PP(frame_mbs_only_flag); |
| 243 FROM_PPS_TO_PP(transform_8x8_mode_flag); |
| 244 // UNUSED: Minlumabipredsize |
| 245 // UNUSED: pic_param.IntraPicFlag = slice_hdr->IsISlice(); |
| 246 FROM_SPS_TO_PP(bit_depth_luma_minus8); |
| 247 FROM_SPS_TO_PP(bit_depth_chroma_minus8); |
| 248 memcpy(pic_param.RefFrameList, ref_frame_list_, |
| 249 sizeof pic_param.RefFrameList); |
| 250 if (pic_param.field_pic_flag && pic_param.CurrPic.AssociatedFlag) { |
| 251 pic_param.CurrFieldOrderCnt[1] = pic->bottom_field_order_cnt; |
| 252 pic_param.CurrFieldOrderCnt[0] = 0; |
| 253 } else if (pic_param.field_pic_flag && !pic_param.CurrPic.AssociatedFlag) { |
| 254 pic_param.CurrFieldOrderCnt[0] = pic->top_field_order_cnt; |
| 255 pic_param.CurrFieldOrderCnt[1] = 0; |
| 256 } else { |
| 257 pic_param.CurrFieldOrderCnt[0] = pic->top_field_order_cnt; |
| 258 pic_param.CurrFieldOrderCnt[1] = pic->bottom_field_order_cnt; |
| 259 } |
| 260 memcpy(pic_param.FieldOrderCntList, field_order_cnt_list_, |
| 261 sizeof pic_param.FieldOrderCntList); |
| 262 FROM_PPS_TO_PP(pic_init_qs_minus26); |
| 263 FROM_PPS_TO_PP(chroma_qp_index_offset); |
| 264 FROM_PPS_TO_PP(second_chroma_qp_index_offset); |
| 265 pic_param.ContinuationFlag = 1; |
| 266 FROM_PPS_TO_PP(pic_init_qp_minus26); |
| 267 FROM_PPS_TO_PP2(num_ref_idx_l0_active_minus1, |
| 268 num_ref_idx_l0_default_active_minus1); |
| 269 FROM_PPS_TO_PP2(num_ref_idx_l1_active_minus1, |
| 270 num_ref_idx_l1_default_active_minus1); |
| 271 // UNUSED: Reserved8BitsA |
| 272 memcpy(pic_param.FrameNumList, frame_num_list_, |
| 273 sizeof pic_param.FrameNumList); |
| 274 pic_param.UsedForReferenceFlags = used_for_reference_flags_; |
| 275 pic_param.NonExistingFrameFlags = non_existing_frame_flags_; |
| 276 pic_param.frame_num = pic->frame_num; |
| 277 FROM_SPS_TO_PP(log2_max_frame_num_minus4); |
| 278 FROM_SPS_TO_PP(pic_order_cnt_type); |
| 279 FROM_SPS_TO_PP(log2_max_pic_order_cnt_lsb_minus4); |
| 280 FROM_SPS_TO_PP(delta_pic_order_always_zero_flag); |
| 281 FROM_SPS_TO_PP(direct_8x8_inference_flag); |
| 282 FROM_PPS_TO_PP(entropy_coding_mode_flag); |
| 283 FROM_PPS_TO_PP2(pic_order_present_flag, |
| 284 bottom_field_pic_order_in_frame_present_flag); |
| 285 FROM_PPS_TO_PP(num_slice_groups_minus1); |
| 286 CHECK_EQ(0u, pic_param.num_slice_groups_minus1); |
| 287 // UNUSED: slice_group_map_type |
| 288 FROM_PPS_TO_PP(deblocking_filter_control_present_flag); |
| 289 FROM_PPS_TO_PP(redundant_pic_cnt_present_flag); |
| 290 // UNUSED: Reserved8BitsB |
| 291 // UNUSED: slice_group_change_rate |
| 292 // |
| 293 // |
| 294 // |
| 295 |
| 296 pic_param.StatusReportFeedbackNumber = 1; |
| 297 |
| 298 UINT buffer_size; |
| 299 void* buffer; |
| 300 HRESULT hr = video_context_->GetDecoderBuffer( |
| 301 video_decoder_.get(), D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS, |
| 302 &buffer_size, &buffer); |
| 303 CHECK(SUCCEEDED(hr)); |
| 304 |
| 305 memcpy(buffer, &pic_param, sizeof(pic_param)); |
| 306 hr = video_context_->ReleaseDecoderBuffer( |
| 307 video_decoder_.get(), D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS); |
| 308 CHECK(SUCCEEDED(hr)); |
| 309 |
| 310 DXVA_Qmatrix_H264 iq_matrix_buf = {}; |
| 311 |
| 312 if (pps->pic_scaling_matrix_present_flag) { |
| 313 for (int i = 0; i < 6; ++i) { |
| 314 for (int j = 0; j < 16; ++j) |
| 315 iq_matrix_buf.bScalingLists4x4[i][j] = pps->scaling_list4x4[i][j]; |
| 316 } |
| 317 |
| 318 for (int i = 0; i < 2; ++i) { |
| 319 for (int j = 0; j < 64; ++j) |
| 320 iq_matrix_buf.bScalingLists8x8[i][j] = pps->scaling_list8x8[i][j]; |
| 321 } |
| 322 } else { |
| 323 for (int i = 0; i < 6; ++i) { |
| 324 for (int j = 0; j < 16; ++j) |
| 325 iq_matrix_buf.bScalingLists4x4[i][j] = sps_.scaling_list4x4[i][j]; |
| 326 } |
| 327 |
| 328 for (int i = 0; i < 2; ++i) { |
| 329 for (int j = 0; j < 64; ++j) |
| 330 iq_matrix_buf.bScalingLists8x8[i][j] = sps_.scaling_list8x8[i][j]; |
| 331 } |
| 332 } |
| 333 hr = video_context_->GetDecoderBuffer( |
| 334 video_decoder_.get(), |
| 335 D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX, &buffer_size, |
| 336 &buffer); |
| 337 CHECK(SUCCEEDED(hr)); |
| 338 memcpy(buffer, &iq_matrix_buf, sizeof(iq_matrix_buf)); |
| 339 hr = video_context_->ReleaseDecoderBuffer( |
| 340 video_decoder_.get(), |
| 341 D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX); |
| 342 |
| 343 // Ideally all slices in a frame are put in the same bitstream buffer. |
| 344 // However the bitstream buffer may not fit all the data, so split on the |
| 345 // necessary boundaries. |
| 346 |
| 347 size_t out_bitstream_size = size + 3; |
| 348 |
| 349 size_t remaining_bitstream = out_bitstream_size; |
| 350 size_t start_location = 0; |
| 351 |
| 352 while (remaining_bitstream > 0) { |
| 353 if (bitstream_buffer_size_ < remaining_bitstream && |
| 354 slice_info_.size() > 0) { |
| 355 SubmitSliceData(); |
| 356 RetrieveBitstreamBuffer(); |
| 357 } |
| 358 |
| 359 size_t bytes_to_copy = remaining_bitstream; |
| 360 bool contains_end = true; |
| 361 if (bytes_to_copy > bitstream_buffer_size_) { |
| 362 bytes_to_copy = bitstream_buffer_size_; |
| 363 contains_end = false; |
| 364 } |
| 365 size_t real_bytes_to_copy = bytes_to_copy; |
| 366 // TODO(jbauman): fix hack |
| 367 uint8_t* out_start = bitstream_buffer_bytes_; |
| 368 if (bytes_to_copy >= 3 && start_location == 0) { |
| 369 *(out_start++) = 0; |
| 370 *(out_start++) = 0; |
| 371 *(out_start++) = 1; |
| 372 real_bytes_to_copy -= 3; |
| 373 } |
| 374 memcpy(out_start, data + start_location, real_bytes_to_copy); |
| 375 |
| 376 DXVA_Slice_H264_Short slice_info = {}; |
| 377 slice_info.BSNALunitDataLocation = (UINT)current_offset_; |
| 378 slice_info.SliceBytesInBuffer = (UINT)bytes_to_copy; |
| 379 if (contains_end && start_location == 0) |
| 380 slice_info.wBadSliceChopping = 0; |
| 381 else if (!contains_end && start_location == 0) |
| 382 slice_info.wBadSliceChopping = 1; |
| 383 else if (contains_end && start_location != 0) |
| 384 slice_info.wBadSliceChopping = 2; |
| 385 else |
| 386 slice_info.wBadSliceChopping = 3; |
| 387 |
| 388 slice_info_.push_back(slice_info); |
| 389 bitstream_buffer_size_ -= bytes_to_copy; |
| 390 current_offset_ += bytes_to_copy; |
| 391 start_location += bytes_to_copy; |
| 392 remaining_bitstream -= bytes_to_copy; |
| 393 bitstream_buffer_bytes_ += bytes_to_copy; |
| 394 } |
| 395 |
| 396 return true; |
| 397 } |
| 398 |
| 399 void D3D11H264Accelerator::SubmitSliceData() { |
| 400 CHECK(slice_info_.size() > 0); |
| 401 UINT buffer_size; |
| 402 void* buffer; |
| 403 HRESULT hr = video_context_->GetDecoderBuffer( |
| 404 video_decoder_.get(), D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL, |
| 405 &buffer_size, &buffer); |
| 406 CHECK(SUCCEEDED(hr)); |
| 407 CHECK_LE(sizeof(slice_info_[0]) * slice_info_.size(), buffer_size); |
| 408 memcpy(buffer, &slice_info_[0], sizeof(slice_info_[0]) * slice_info_.size()); |
| 409 hr = video_context_->ReleaseDecoderBuffer( |
| 410 video_decoder_.get(), D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL); |
| 411 |
| 412 hr = video_context_->ReleaseDecoderBuffer( |
| 413 video_decoder_.get(), D3D11_VIDEO_DECODER_BUFFER_BITSTREAM); |
| 414 D3D11_VIDEO_DECODER_BUFFER_DESC buffers[4] = {}; |
| 415 buffers[0].BufferType = D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS; |
| 416 buffers[0].DataOffset = 0; |
| 417 buffers[0].DataSize = sizeof(DXVA_PicParams_H264); |
| 418 buffers[1].BufferType = |
| 419 D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX; |
| 420 buffers[1].DataOffset = 0; |
| 421 buffers[1].DataSize = sizeof(DXVA_Qmatrix_H264); |
| 422 buffers[2].BufferType = D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL; |
| 423 buffers[2].DataOffset = 0; |
| 424 buffers[2].DataSize = (UINT)(sizeof(slice_info_[0]) * slice_info_.size()); |
| 425 buffers[3].BufferType = D3D11_VIDEO_DECODER_BUFFER_BITSTREAM; |
| 426 buffers[3].DataOffset = 0; |
| 427 buffers[3].DataSize = (UINT)current_offset_; |
| 428 |
| 429 hr = video_context_->SubmitDecoderBuffers(video_decoder_.get(), 4, buffers); |
| 430 current_offset_ = 0; |
| 431 slice_info_.clear(); |
| 432 } |
| 433 |
| 434 bool D3D11H264Accelerator::SubmitDecode(const scoped_refptr<H264Picture>& pic) { |
| 435 SubmitSliceData(); |
| 436 |
| 437 HRESULT hr = video_context_->DecoderEndFrame(video_decoder_.get()); |
| 438 CHECK(SUCCEEDED(hr)); |
| 439 |
| 440 return true; |
| 441 } |
| 442 |
| 443 bool D3D11H264Accelerator::OutputPicture( |
| 444 const scoped_refptr<H264Picture>& pic) { |
| 445 scoped_refptr<D3D11H264Picture> our_pic( |
| 446 static_cast<D3D11H264Picture*>(pic.get())); |
| 447 client_->OutputResult(our_pic->picture, our_pic->input_buffer_id_); |
| 448 return true; |
| 449 } |
| 450 |
| 451 D3D11H264Picture::~D3D11H264Picture() { |
| 452 picture->set_in_picture_use(false); |
| 453 } |
| 454 |
| 455 } // namespace media |
| OLD | NEW |