OLD | NEW |
(Empty) | |
| 1 // Copyright (c) 2010 The Chromium Authors. All rights reserved. Use of this |
| 2 // source code is governed by a BSD-style license that can be found in the |
| 3 // LICENSE file. |
| 4 |
| 5 #include "media/mf/mft_h264_decoder.h" |
| 6 |
| 7 #include <algorithm> |
| 8 #include <string> |
| 9 |
| 10 #include <d3d9.h> |
| 11 #include <evr.h> |
| 12 #include <initguid.h> |
| 13 #include <mfapi.h> |
| 14 #include <mferror.h> |
| 15 #include <mfidl.h> |
| 16 #include <shlwapi.h> |
| 17 #include <wmcodecdsp.h> |
| 18 |
| 19 #include "base/callback.h" |
| 20 #include "base/logging.h" |
| 21 #include "base/scoped_comptr_win.h" |
| 22 #include "media/base/video_frame.h" |
| 23 #include "media/mf/file_reader_util.h" |
| 24 |
| 25 #pragma comment(lib, "dxva2.lib") |
| 26 #pragma comment(lib, "d3d9.lib") |
| 27 #pragma comment(lib, "mfuuid.lib") |
| 28 #pragma comment(lib, "evr.lib") |
| 29 #pragma comment(lib, "mfplat.lib") |
| 30 |
| 31 namespace media { |
| 32 |
| 33 // Returns Media Foundation's H.264 decoder as an MFT, or NULL if not found |
| 34 // (e.g. Not using Windows 7) |
| 35 static IMFTransform* GetH264Decoder() { |
| 36 // Use __uuidof() to avoid linking to a library just for the CLSID. |
| 37 IMFTransform* dec; |
| 38 HRESULT hr = CoCreateInstance(__uuidof(CMSH264DecoderMFT), NULL, |
| 39 CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&dec)); |
| 40 if (FAILED(hr)) { |
| 41 LOG(ERROR) << "CoCreateInstance failed " << std::hex << std::showbase << hr; |
| 42 return NULL; |
| 43 } |
| 44 return dec; |
| 45 } |
| 46 |
| 47 // Creates an empty Media Foundation sample with no buffers. |
| 48 static IMFSample* CreateEmptySample() { |
| 49 HRESULT hr; |
| 50 ScopedComPtr<IMFSample> sample; |
| 51 hr = MFCreateSample(sample.Receive()); |
| 52 if (FAILED(hr)) { |
| 53 LOG(ERROR) << "Unable to create an empty sample"; |
| 54 return NULL; |
| 55 } |
| 56 return sample.Detach(); |
| 57 } |
| 58 |
| 59 // Creates a Media Foundation sample with one buffer of length |buffer_length|. |
| 60 static IMFSample* CreateEmptySampleWithBuffer(int buffer_length) { |
| 61 CHECK_GT(buffer_length, 0); |
| 62 ScopedComPtr<IMFSample> sample; |
| 63 sample.Attach(CreateEmptySample()); |
| 64 if (sample.get() == NULL) |
| 65 return NULL; |
| 66 ScopedComPtr<IMFMediaBuffer> buffer; |
| 67 HRESULT hr; |
| 68 hr = MFCreateMemoryBuffer(buffer_length, buffer.Receive()); |
| 69 if (FAILED(hr)) { |
| 70 LOG(ERROR) << "Unable to create an empty buffer"; |
| 71 return NULL; |
| 72 } |
| 73 hr = sample->AddBuffer(buffer.get()); |
| 74 if (FAILED(hr)) { |
| 75 LOG(ERROR) << "Failed to add empty buffer to sample"; |
| 76 return NULL; |
| 77 } |
| 78 return sample.Detach(); |
| 79 } |
| 80 |
| 81 // Creates a Media Foundation sample with one buffer containing a copy of the |
| 82 // given Annex B stream data. |
| 83 // If duration and sample_time are not known, provide 0. |
| 84 // min_size specifies the minimum size of the buffer (might be required by |
| 85 // the decoder for input). The times here should be given in 100ns units. |
| 86 static IMFSample* CreateInputSample(uint8* stream, int size, |
| 87 int64 timestamp, int64 duration, |
| 88 int min_size) { |
| 89 CHECK(stream != NULL); |
| 90 CHECK_GT(size, 0); |
| 91 ScopedComPtr<IMFSample> sample; |
| 92 sample.Attach(CreateEmptySampleWithBuffer(std::max(min_size, size))); |
| 93 if (sample.get() == NULL) { |
| 94 LOG(ERROR) << "Failed to create empty buffer for input"; |
| 95 return NULL; |
| 96 } |
| 97 HRESULT hr; |
| 98 if (duration > 0) { |
| 99 hr = sample->SetSampleDuration(duration); |
| 100 if (FAILED(hr)) { |
| 101 LOG(ERROR) << "Failed to set sample duration"; |
| 102 return NULL; |
| 103 } |
| 104 } |
| 105 if (timestamp > 0) { |
| 106 hr = sample->SetSampleTime(timestamp); |
| 107 if (FAILED(hr)) { |
| 108 LOG(ERROR) << "Failed to set sample time"; |
| 109 return NULL; |
| 110 } |
| 111 } |
| 112 ScopedComPtr<IMFMediaBuffer> buffer; |
| 113 hr = sample->GetBufferByIndex(0, buffer.Receive()); |
| 114 if (FAILED(hr)) { |
| 115 LOG(ERROR) << "Failed to get buffer in sample"; |
| 116 return NULL; |
| 117 } |
| 118 DWORD max_length, current_length; |
| 119 uint8* destination; |
| 120 hr = buffer->Lock(&destination, &max_length, ¤t_length); |
| 121 if (FAILED(hr)) { |
| 122 LOG(ERROR) << "Failed to lock buffer"; |
| 123 return NULL; |
| 124 } |
| 125 CHECK_EQ(static_cast<int>(current_length), 0); |
| 126 CHECK_GE(static_cast<int>(max_length), size); |
| 127 memcpy(destination, stream, size); |
| 128 CHECK(SUCCEEDED(buffer->Unlock())); |
| 129 hr = buffer->SetCurrentLength(size); |
| 130 if (FAILED(hr)) { |
| 131 LOG(ERROR) << "Failed to set current length to " << size; |
| 132 return NULL; |
| 133 } |
| 134 LOG(INFO) << __FUNCTION__ << " wrote " << size << " bytes into input sample"; |
| 135 return sample.Detach(); |
| 136 } |
| 137 |
| 138 // Public methods |
| 139 |
| 140 MftH264Decoder::MftH264Decoder(bool use_dxva) |
| 141 : read_input_callback_(NULL), |
| 142 output_avail_callback_(NULL), |
| 143 decoder_(NULL), |
| 144 initialized_(false), |
| 145 use_dxva_(use_dxva), |
| 146 drain_message_sent_(false), |
| 147 in_buffer_size_(0), |
| 148 out_buffer_size_(0), |
| 149 frames_read_(0), |
| 150 frames_decoded_(0), |
| 151 width_(0), |
| 152 height_(0), |
| 153 stride_(0) { |
| 154 } |
| 155 |
| 156 MftH264Decoder::~MftH264Decoder() { |
| 157 } |
| 158 |
| 159 bool MftH264Decoder::Init(IDirect3DDeviceManager9* dev_manager, |
| 160 int frame_rate_num, int frame_rate_denom, |
| 161 int width, int height, |
| 162 int aspect_num, int aspect_denom, |
| 163 ReadInputCallback* read_input_cb, |
| 164 OutputReadyCallback* output_avail_cb) { |
| 165 CHECK(read_input_cb != NULL); |
| 166 CHECK(output_avail_cb != NULL); |
| 167 if (initialized_) |
| 168 return true; |
| 169 read_input_callback_.reset(read_input_cb); |
| 170 output_avail_callback_.reset(output_avail_cb); |
| 171 if (!InitDecoder(dev_manager, frame_rate_num, frame_rate_denom, |
| 172 width, height, aspect_num, aspect_denom)) |
| 173 return false; |
| 174 if (!GetStreamsInfoAndBufferReqs()) |
| 175 return false; |
| 176 if (!SendStartMessage()) |
| 177 return false; |
| 178 initialized_ = true; |
| 179 return true; |
| 180 } |
| 181 |
| 182 bool MftH264Decoder::SendInput(uint8* data, int size, int64 timestamp, |
| 183 int64 duration) { |
| 184 CHECK(initialized_); |
| 185 CHECK(data != NULL); |
| 186 CHECK_GT(size, 0); |
| 187 if (drain_message_sent_) { |
| 188 LOG(ERROR) << "Drain message was already sent, but trying to send more " |
| 189 "input to decoder"; |
| 190 return false; |
| 191 } |
| 192 ScopedComPtr<IMFSample> sample; |
| 193 sample.Attach(CreateInputSample(data, size, timestamp, duration, |
| 194 in_buffer_size_)); |
| 195 if (sample.get() == NULL) { |
| 196 LOG(ERROR) << "Failed to convert input stream to sample"; |
| 197 return false; |
| 198 } |
| 199 HRESULT hr = decoder_->ProcessInput(0, sample.get(), 0); |
| 200 if (FAILED(hr)) { |
| 201 LOG(ERROR) << "Failed to ProcessInput, hr = " << std::hex << hr; |
| 202 return false; |
| 203 } |
| 204 frames_read_++; |
| 205 return true; |
| 206 } |
| 207 |
| 208 static const char* const ProcessOutputStatusToCString(HRESULT hr) { |
| 209 if (hr == MF_E_TRANSFORM_STREAM_CHANGE) |
| 210 return "media stream change occurred, need to set output type"; |
| 211 if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) |
| 212 return "decoder needs more samples"; |
| 213 else |
| 214 return "unhandled error from ProcessOutput"; |
| 215 } |
| 216 |
| 217 MftH264Decoder::DecoderOutputState MftH264Decoder::GetOutput() { |
| 218 CHECK(initialized_); |
| 219 |
| 220 ScopedComPtr<IMFSample> output_sample; |
| 221 if (!use_dxva_) { |
| 222 // If DXVA is enabled, the decoder will allocate the sample for us. |
| 223 output_sample.Attach(CreateEmptySampleWithBuffer(out_buffer_size_)); |
| 224 if (output_sample.get() == NULL) { |
| 225 LOG(ERROR) << "GetSample: failed to create empty output sample"; |
| 226 return kNoMemory; |
| 227 } |
| 228 } |
| 229 MFT_OUTPUT_DATA_BUFFER output_data_buffer; |
| 230 HRESULT hr; |
| 231 DWORD status; |
| 232 for (;;) { |
| 233 output_data_buffer.dwStreamID = 0; |
| 234 output_data_buffer.pSample = output_sample; |
| 235 output_data_buffer.dwStatus = 0; |
| 236 output_data_buffer.pEvents = NULL; |
| 237 hr = decoder_->ProcessOutput(0, // No flags |
| 238 1, // # of out streams to pull from |
| 239 &output_data_buffer, |
| 240 &status); |
| 241 |
| 242 // TODO(imcheng): Handle the events, if any. (No event is returned most of |
| 243 // the time.) |
| 244 IMFCollection* events = output_data_buffer.pEvents; |
| 245 if (events != NULL) { |
| 246 LOG(INFO) << "Got events from ProcessOuput, but discarding"; |
| 247 events->Release(); |
| 248 } |
| 249 if (FAILED(hr)) { |
| 250 LOG(INFO) << "ProcessOutput failed with status " << std::hex << hr |
| 251 << ", meaning..." << ProcessOutputStatusToCString(hr); |
| 252 if (hr == MF_E_TRANSFORM_STREAM_CHANGE) { |
| 253 if (!SetDecoderOutputMediaType(MFVideoFormat_NV12)) { |
| 254 LOG(ERROR) << "Failed to reset output type"; |
| 255 return kResetOutputStreamFailed; |
| 256 } else { |
| 257 LOG(INFO) << "Reset output type done"; |
| 258 continue; |
| 259 } |
| 260 } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { |
| 261 // If we have read everything then we should've sent a drain message |
| 262 // to the MFT. If the drain message is sent but it doesn't give out |
| 263 // anymore output then we know the decoder has processed everything. |
| 264 if (drain_message_sent_) { |
| 265 LOG(INFO) << "Drain message was already sent + no output => done"; |
| 266 return kNoMoreOutput; |
| 267 } else { |
| 268 if (!ReadAndProcessInput()) { |
| 269 LOG(INFO) << "Failed to read/process input. Sending drain message"; |
| 270 if (!SendDrainMessage()) { |
| 271 LOG(ERROR) << "Failed to send drain message"; |
| 272 return kNoMoreOutput; |
| 273 } |
| 274 } |
| 275 continue; |
| 276 } |
| 277 } else { |
| 278 return kUnspecifiedError; |
| 279 } |
| 280 } else { |
| 281 // A decoded sample was successfully obtained. |
| 282 LOG(INFO) << "Got a decoded sample from decoder"; |
| 283 if (use_dxva_) { |
| 284 // If dxva is enabled, we did not provide a sample to ProcessOutput, |
| 285 // i.e. output_sample is NULL. |
| 286 output_sample.Attach(output_data_buffer.pSample); |
| 287 if (output_sample.get() == NULL) { |
| 288 LOG(ERROR) << "Output sample using DXVA is NULL - ProcessOutput did " |
| 289 << "not provide it!"; |
| 290 return kOutputSampleError; |
| 291 } |
| 292 } |
| 293 int64 timestamp, duration; |
| 294 hr = output_sample->GetSampleTime(×tamp); |
| 295 hr = output_sample->GetSampleDuration(&duration); |
| 296 if (FAILED(hr)) { |
| 297 LOG(ERROR) << "Failed to get sample duration or timestamp " |
| 298 << std::hex << hr; |
| 299 return kOutputSampleError; |
| 300 } |
| 301 |
| 302 // The duration and timestamps are in 100-ns units, so divide by 10 |
| 303 // to convert to microseconds. |
| 304 timestamp /= 10; |
| 305 duration /= 10; |
| 306 |
| 307 // Sanity checks for checking if there is really something in the sample. |
| 308 DWORD buf_count; |
| 309 hr = output_sample->GetBufferCount(&buf_count); |
| 310 if (FAILED(hr)) { |
| 311 LOG(ERROR) << "Failed to get buff count, hr = " << std::hex << hr; |
| 312 return kOutputSampleError; |
| 313 } |
| 314 if (buf_count == 0) { |
| 315 LOG(ERROR) << "buf_count is 0, dropping sample"; |
| 316 return kOutputSampleError; |
| 317 } |
| 318 ScopedComPtr<IMFMediaBuffer> out_buffer; |
| 319 hr = output_sample->GetBufferByIndex(0, out_buffer.Receive()); |
| 320 if (FAILED(hr)) { |
| 321 LOG(ERROR) << "Failed to get decoded output buffer"; |
| 322 return kOutputSampleError; |
| 323 } |
| 324 |
| 325 // To obtain the data, the caller should call the Lock() method instead |
| 326 // of using the data field. |
| 327 // In NV12, there are only 2 planes - the Y plane, and the interleaved UV |
| 328 // plane. Both have the same strides. |
| 329 uint8* null_data[2] = { NULL, NULL }; |
| 330 int32 strides[2] = { stride_, stride_ }; |
| 331 scoped_refptr<VideoFrame> decoded_frame; |
| 332 VideoFrame::CreateFrameExternal( |
| 333 use_dxva_ ? VideoFrame::TYPE_DIRECT3DSURFACE : |
| 334 VideoFrame::TYPE_MFBUFFER, |
| 335 VideoFrame::NV12, |
| 336 width_, |
| 337 height_, |
| 338 2, |
| 339 null_data, |
| 340 strides, |
| 341 base::TimeDelta::FromMicroseconds(timestamp), |
| 342 base::TimeDelta::FromMicroseconds(duration), |
| 343 out_buffer.Detach(), |
| 344 &decoded_frame); |
| 345 CHECK(decoded_frame.get() != NULL); |
| 346 frames_decoded_++; |
| 347 output_avail_callback_->Run(decoded_frame); |
| 348 return kOutputOk; |
| 349 } |
| 350 } |
| 351 } |
| 352 |
| 353 // Private methods |
| 354 |
| 355 bool MftH264Decoder::InitDecoder(IDirect3DDeviceManager9* dev_manager, |
| 356 int frame_rate_num, int frame_rate_denom, |
| 357 int width, int height, |
| 358 int aspect_num, int aspect_denom) { |
| 359 decoder_.Attach(GetH264Decoder()); |
| 360 if (!decoder_.get()) |
| 361 return false; |
| 362 if (!CheckDecoderProperties()) |
| 363 return false; |
| 364 if (use_dxva_) { |
| 365 if (!CheckDecoderDxvaSupport()) |
| 366 return false; |
| 367 if (!SetDecoderD3d9Manager(dev_manager)) |
| 368 return false; |
| 369 } |
| 370 if (!SetDecoderMediaTypes(frame_rate_num, frame_rate_denom, |
| 371 width, height, |
| 372 aspect_num, aspect_denom)) { |
| 373 return false; |
| 374 } |
| 375 return true; |
| 376 } |
| 377 |
| 378 bool MftH264Decoder::CheckDecoderProperties() { |
| 379 DCHECK(decoder_.get()); |
| 380 DWORD in_stream_count; |
| 381 DWORD out_stream_count; |
| 382 HRESULT hr; |
| 383 hr = decoder_->GetStreamCount(&in_stream_count, &out_stream_count); |
| 384 if (FAILED(hr)) { |
| 385 LOG(ERROR) << "Failed to get stream count"; |
| 386 return false; |
| 387 } else { |
| 388 LOG(INFO) << "Input stream count: " << in_stream_count << ", " |
| 389 << "Output stream count: " << out_stream_count; |
| 390 bool mismatch = false; |
| 391 if (in_stream_count != 1) { |
| 392 LOG(ERROR) << "Input stream count mismatch!"; |
| 393 mismatch = true; |
| 394 } |
| 395 if (out_stream_count != 1) { |
| 396 LOG(ERROR) << "Output stream count mismatch!"; |
| 397 mismatch = true; |
| 398 } |
| 399 return !mismatch; |
| 400 } |
| 401 } |
| 402 |
| 403 bool MftH264Decoder::CheckDecoderDxvaSupport() { |
| 404 HRESULT hr; |
| 405 ScopedComPtr<IMFAttributes> attributes; |
| 406 hr = decoder_->GetAttributes(attributes.Receive()); |
| 407 if (FAILED(hr)) { |
| 408 LOG(ERROR) << "Unlock: Failed to get attributes, hr = " |
| 409 << std::hex << std::showbase << hr; |
| 410 return false; |
| 411 } |
| 412 UINT32 dxva; |
| 413 hr = attributes->GetUINT32(MF_SA_D3D_AWARE, &dxva); |
| 414 if (FAILED(hr)) { |
| 415 LOG(ERROR) << "Failed to get DXVA attr, hr = " |
| 416 << std::hex << std::showbase << hr |
| 417 << "this might not be the right decoder."; |
| 418 return false; |
| 419 } |
| 420 LOG(INFO) << "Support dxva? " << dxva; |
| 421 if (!dxva) { |
| 422 LOG(ERROR) << "Decoder does not support DXVA - this might not be the " |
| 423 << "right decoder."; |
| 424 return false; |
| 425 } |
| 426 return true; |
| 427 } |
| 428 |
| 429 bool MftH264Decoder::SetDecoderD3d9Manager( |
| 430 IDirect3DDeviceManager9* dev_manager) { |
| 431 DCHECK(use_dxva_) << "SetDecoderD3d9Manager should only be called if DXVA is " |
| 432 << "enabled"; |
| 433 CHECK(dev_manager != NULL); |
| 434 HRESULT hr; |
| 435 hr = decoder_->ProcessMessage(MFT_MESSAGE_SET_D3D_MANAGER, |
| 436 reinterpret_cast<ULONG_PTR>(dev_manager)); |
| 437 if (FAILED(hr)) { |
| 438 LOG(ERROR) << "Failed to set D3D9 device to decoder"; |
| 439 return false; |
| 440 } |
| 441 return true; |
| 442 } |
| 443 |
| 444 bool MftH264Decoder::SetDecoderMediaTypes(int frame_rate_num, |
| 445 int frame_rate_denom, |
| 446 int width, int height, |
| 447 int aspect_num, int aspect_denom) { |
| 448 DCHECK(decoder_.get()); |
| 449 if (!SetDecoderInputMediaType(frame_rate_num, frame_rate_denom, |
| 450 width, height, |
| 451 aspect_num, aspect_denom)) |
| 452 return false; |
| 453 if (!SetDecoderOutputMediaType(MFVideoFormat_NV12)) { |
| 454 return false; |
| 455 } |
| 456 return true; |
| 457 } |
| 458 |
| 459 bool MftH264Decoder::SetDecoderInputMediaType(int frame_rate_num, |
| 460 int frame_rate_denom, |
| 461 int width, int height, |
| 462 int aspect_num, |
| 463 int aspect_denom) { |
| 464 ScopedComPtr<IMFMediaType> media_type; |
| 465 HRESULT hr; |
| 466 hr = MFCreateMediaType(media_type.Receive()); |
| 467 if (FAILED(hr)) { |
| 468 LOG(ERROR) << "Failed to create empty media type object"; |
| 469 return NULL; |
| 470 } |
| 471 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); |
| 472 if (FAILED(hr)) { |
| 473 LOG(ERROR) << "SetGUID for major type failed"; |
| 474 return NULL; |
| 475 } |
| 476 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); |
| 477 if (FAILED(hr)) { |
| 478 LOG(ERROR) << "SetGUID for subtype failed"; |
| 479 return NULL; |
| 480 } |
| 481 |
| 482 // Provide additional info to the decoder to avoid a format change during |
| 483 // streaming. |
| 484 if (frame_rate_num == 0 || frame_rate_denom == 0) { |
| 485 hr = MFSetAttributeRatio(media_type.get(), MF_MT_FRAME_RATE, |
| 486 frame_rate_num, frame_rate_denom); |
| 487 if (FAILED(hr)) { |
| 488 LOG(ERROR) << "Failed to set frame rate"; |
| 489 return NULL; |
| 490 } |
| 491 } |
| 492 if (width == 0 || height == 0) { |
| 493 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, height); |
| 494 if (FAILED(hr)) { |
| 495 LOG(ERROR) << "Failed to set frame size"; |
| 496 return NULL; |
| 497 } |
| 498 } |
| 499 |
| 500 // TODO(imcheng): Not sure about this, but this is the recommended value by |
| 501 // MSDN. |
| 502 hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE, |
| 503 MFVideoInterlace_MixedInterlaceOrProgressive); |
| 504 if (FAILED(hr)) { |
| 505 LOG(ERROR) << "Failed to set interlace mode"; |
| 506 return NULL; |
| 507 } |
| 508 if (aspect_num == 0 || aspect_denom == 0) { |
| 509 hr = MFSetAttributeRatio(media_type.get(), MF_MT_PIXEL_ASPECT_RATIO, |
| 510 aspect_num, aspect_denom); |
| 511 if (FAILED(hr)) { |
| 512 LOG(ERROR) << "Failed to get aspect ratio"; |
| 513 return NULL; |
| 514 } |
| 515 } |
| 516 hr = decoder_->SetInputType(0, media_type.get(), 0); // No flags |
| 517 if (FAILED(hr)) { |
| 518 LOG(ERROR) << "Failed to set decoder's input type"; |
| 519 return false; |
| 520 } |
| 521 return true; |
| 522 } |
| 523 |
| 524 bool MftH264Decoder::SetDecoderOutputMediaType(const GUID subtype) { |
| 525 DWORD i = 0; |
| 526 IMFMediaType* out_media_type; |
| 527 bool found = false; |
| 528 while (SUCCEEDED(decoder_->GetOutputAvailableType(0, i, &out_media_type))) { |
| 529 GUID out_subtype; |
| 530 HRESULT hr; |
| 531 hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype); |
| 532 if (FAILED(hr)) { |
| 533 LOG(ERROR) << "Failed to GetGUID() on GetOutputAvailableType() " << i; |
| 534 out_media_type->Release(); |
| 535 continue; |
| 536 } |
| 537 if (out_subtype == subtype) { |
| 538 LOG(INFO) << "|subtype| is at index " |
| 539 << i << " in GetOutputAvailableType()"; |
| 540 hr = decoder_->SetOutputType(0, out_media_type, 0); // No flags |
| 541 hr = MFGetAttributeSize(out_media_type, MF_MT_FRAME_SIZE, |
| 542 reinterpret_cast<UINT32*>(&width_), |
| 543 reinterpret_cast<UINT32*>(&height_)); |
| 544 hr = MFGetStrideForBitmapInfoHeader(MFVideoFormat_NV12.Data1, |
| 545 width_, |
| 546 reinterpret_cast<LONG*>(&stride_)); |
| 547 if (FAILED(hr)) { |
| 548 LOG(ERROR) << "Failed to SetOutputType to |subtype| or obtain " |
| 549 << "width/height/stride " << std::hex << hr; |
| 550 } else { |
| 551 found = true; |
| 552 out_media_type->Release(); |
| 553 break; |
| 554 } |
| 555 } |
| 556 i++; |
| 557 out_media_type->Release(); |
| 558 } |
| 559 if (!found) { |
| 560 LOG(ERROR) << "NV12 was not found in GetOutputAvailableType()"; |
| 561 return false; |
| 562 } |
| 563 return true; |
| 564 } |
| 565 |
| 566 bool MftH264Decoder::SendStartMessage() { |
| 567 HRESULT hr; |
| 568 hr = decoder_->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL); |
| 569 if (FAILED(hr)) { |
| 570 LOG(ERROR) << "Process start message failed, hr = " |
| 571 << std::hex << std::showbase << hr; |
| 572 return false; |
| 573 } else { |
| 574 LOG(INFO) << "Sent a message to decoder to indicate start of stream"; |
| 575 return true; |
| 576 } |
| 577 } |
| 578 |
| 579 // Prints out info about the input/output streams, gets the minimum buffer sizes |
| 580 // for input and output samples. |
| 581 // The MFT will not allocate buffer for neither input nor output, so we have |
| 582 // to do it ourselves and make sure they're the correct size. |
| 583 // Exception is when dxva is enabled, the decoder will allocate output. |
| 584 bool MftH264Decoder::GetStreamsInfoAndBufferReqs() { |
| 585 DCHECK(decoder_.get()); |
| 586 HRESULT hr; |
| 587 MFT_INPUT_STREAM_INFO input_stream_info; |
| 588 hr = decoder_->GetInputStreamInfo(0, &input_stream_info); |
| 589 if (FAILED(hr)) { |
| 590 LOG(ERROR) << "Failed to get input stream info"; |
| 591 return false; |
| 592 } |
| 593 LOG(INFO) << "Input stream info: "; |
| 594 LOG(INFO) << "Max latency: " << input_stream_info.hnsMaxLatency; |
| 595 |
| 596 // There should be three flags, one for requiring a whole frame be in a |
| 597 // single sample, one for requiring there be one buffer only in a single |
| 598 // sample, and one that specifies a fixed sample size. (as in cbSize) |
| 599 LOG(INFO) << "Flags: " |
| 600 << std::hex << std::showbase << input_stream_info.dwFlags; |
| 601 CHECK_EQ(static_cast<int>(input_stream_info.dwFlags), 0x7); |
| 602 LOG(INFO) << "Min buffer size: " << input_stream_info.cbSize; |
| 603 LOG(INFO) << "Max lookahead: " << input_stream_info.cbMaxLookahead; |
| 604 LOG(INFO) << "Alignment: " << input_stream_info.cbAlignment; |
| 605 if (input_stream_info.cbAlignment > 0) { |
| 606 LOG(WARNING) << "Warning: Decoder requires input to be aligned"; |
| 607 } |
| 608 in_buffer_size_ = input_stream_info.cbSize; |
| 609 |
| 610 MFT_OUTPUT_STREAM_INFO output_stream_info; |
| 611 hr = decoder_->GetOutputStreamInfo(0, &output_stream_info); |
| 612 if (FAILED(hr)) { |
| 613 LOG(ERROR) << "Failed to get output stream info"; |
| 614 return false; |
| 615 } |
| 616 LOG(INFO) << "Output stream info: "; |
| 617 |
| 618 // The flags here should be the same and mean the same thing, except when |
| 619 // DXVA is enabled, there is an extra 0x100 flag meaning decoder will |
| 620 // allocate its own sample. |
| 621 LOG(INFO) << "Flags: " |
| 622 << std::hex << std::showbase << output_stream_info.dwFlags; |
| 623 CHECK_EQ(static_cast<int>(output_stream_info.dwFlags), |
| 624 use_dxva_ ? 0x107 : 0x7); |
| 625 LOG(INFO) << "Min buffer size: " << output_stream_info.cbSize; |
| 626 LOG(INFO) << "Alignment: " << output_stream_info.cbAlignment; |
| 627 if (output_stream_info.cbAlignment > 0) { |
| 628 LOG(WARNING) << "Warning: Decoder requires output to be aligned"; |
| 629 } |
| 630 out_buffer_size_ = output_stream_info.cbSize; |
| 631 |
| 632 return true; |
| 633 } |
| 634 |
| 635 bool MftH264Decoder::ReadAndProcessInput() { |
| 636 uint8* input_stream_dummy; |
| 637 int size; |
| 638 int64 duration; |
| 639 int64 timestamp; |
| 640 read_input_callback_->Run(&input_stream_dummy, &size, ×tamp, &duration); |
| 641 scoped_array<uint8> input_stream(input_stream_dummy); |
| 642 if (input_stream.get() == NULL) { |
| 643 LOG(INFO) << "No more input"; |
| 644 return false; |
| 645 } else { |
| 646 // We read an input stream, we can feed it into the decoder. |
| 647 return SendInput(input_stream.get(), size, timestamp, duration); |
| 648 } |
| 649 } |
| 650 |
| 651 bool MftH264Decoder::SendDrainMessage() { |
| 652 CHECK(initialized_); |
| 653 if (drain_message_sent_) { |
| 654 LOG(ERROR) << "Drain message was already sent before!"; |
| 655 return false; |
| 656 } |
| 657 |
| 658 // Send the drain message with no parameters. |
| 659 HRESULT hr = decoder_->ProcessMessage(MFT_MESSAGE_COMMAND_DRAIN, NULL); |
| 660 if (FAILED(hr)) { |
| 661 LOG(ERROR) << "Failed to send the drain message to decoder"; |
| 662 return false; |
| 663 } |
| 664 drain_message_sent_ = true; |
| 665 return true; |
| 666 } |
| 667 |
| 668 } // namespace media |
OLD | NEW |