Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(380)

Side by Side Diff: content/common/gpu/media/dxva_video_decode_accelerator.cc

Issue 8510039: Initial implementation of the DXVA 2.0 H.264 hardware decoder for pepper for Windows. The decodin... (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src/
Patch Set: '' Created 8 years, 12 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
(Empty)
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/common/gpu/media/dxva_video_decode_accelerator.h"
6
7 #if !defined(OS_WIN)
8 #error This file should only be built on Windows.
9 #endif // !defined(OS_WIN)
10
11 #include <ks.h>
12 #include <codecapi.h>
13 #include <d3dx9tex.h>
14 #include <mfapi.h>
15 #include <mferror.h>
16 #include <wmcodecdsp.h>
17
18 #include "base/bind.h"
19 #include "base/callback.h"
20 #include "base/debug/trace_event.h"
21 #include "base/logging.h"
22 #include "base/memory/scoped_handle.h"
23 #include "base/memory/scoped_ptr.h"
24 #include "base/message_loop.h"
25 #include "base/process_util.h"
26 #include "base/shared_memory.h"
27 #include "media/video/video_decode_accelerator.h"
28 #include "third_party/angle/include/GLES2/gl2.h"
29 #include "third_party/angle/include/GLES2/gl2ext.h"
30
31 // We only request 5 picture buffers from the client which are used to hold the
32 // decoded samples. These buffers are then reused when the client tells us that
33 // it is done with the buffer.
34 static const int kNumPictureBuffers = 5;
35
36 bool DXVAVideoDecodeAccelerator::loaded_decoder_dlls_ = false;
37
38 #define RETURN_ON_FAILURE(result, log, ret) \
39 do { \
40 if (!(result)) { \
41 DLOG(ERROR) << log; \
42 return ret; \
43 } \
44 } while (0)
45
46 #define RETURN_ON_HR_FAILURE(result, log, ret) \
47 RETURN_ON_FAILURE(SUCCEEDED(result), \
48 log << ", HRESULT: 0x" << std::hex << result, \
49 ret);
50
51 #define RETURN_AND_NOTIFY_ON_FAILURE(result, log, error_code, ret) \
52 do { \
53 if (!(result)) { \
54 DVLOG(1) << log; \
55 StopOnError(error_code); \
56 return ret; \
57 } \
58 } while (0)
59
60 #define RETURN_AND_NOTIFY_ON_HR_FAILURE(result, log, error_code, ret) \
61 RETURN_AND_NOTIFY_ON_FAILURE(SUCCEEDED(result), \
62 log << ", HRESULT: 0x" << std::hex << result, \
63 error_code, ret);
64
65 static IMFSample* CreateEmptySample() {
66 base::win::ScopedComPtr<IMFSample> sample;
67 HRESULT hr = MFCreateSample(sample.Receive());
68 RETURN_ON_HR_FAILURE(hr, "MFCreateSample failed", NULL);
69 return sample.Detach();
70 }
71
72 // Creates a Media Foundation sample with one buffer of length |buffer_length|
73 // on a |align|-byte boundary. Alignment must be a perfect power of 2 or 0.
74 static IMFSample* CreateEmptySampleWithBuffer(int buffer_length, int align) {
75 CHECK_GT(buffer_length, 0);
76
77 base::win::ScopedComPtr<IMFSample> sample;
78 sample.Attach(CreateEmptySample());
79
80 base::win::ScopedComPtr<IMFMediaBuffer> buffer;
81 HRESULT hr = E_FAIL;
82 if (align == 0) {
83 // Note that MFCreateMemoryBuffer is same as MFCreateAlignedMemoryBuffer
84 // with the align argument being 0.
85 hr = MFCreateMemoryBuffer(buffer_length, buffer.Receive());
86 } else {
87 hr = MFCreateAlignedMemoryBuffer(buffer_length,
88 align - 1,
89 buffer.Receive());
90 }
91 RETURN_ON_HR_FAILURE(hr, "Failed to create memory buffer for sample", NULL);
92
93 hr = sample->AddBuffer(buffer);
94 RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", NULL);
95
96 return sample.Detach();
97 }
98
99 // Creates a Media Foundation sample with one buffer containing a copy of the
100 // given Annex B stream data.
101 // If duration and sample time are not known, provide 0.
102 // |min_size| specifies the minimum size of the buffer (might be required by
103 // the decoder for input). If no alignment is required, provide 0.
104 static IMFSample* CreateInputSample(const uint8* stream, int size,
105 int min_size, int alignment) {
106 CHECK(stream);
107 CHECK_GT(size, 0);
108 base::win::ScopedComPtr<IMFSample> sample;
109 sample.Attach(CreateEmptySampleWithBuffer(std::max(min_size, size),
110 alignment));
111 RETURN_ON_FAILURE(sample, "Failed to create empty sample", NULL);
112
113 base::win::ScopedComPtr<IMFMediaBuffer> buffer;
114 HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive());
115 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from sample", NULL);
116
117 DWORD max_length = 0;
118 DWORD current_length = 0;
119 uint8* destination = NULL;
120 hr = buffer->Lock(&destination, &max_length, &current_length);
121 RETURN_ON_HR_FAILURE(hr, "Failed to lock buffer", NULL);
122
123 CHECK_EQ(current_length, 0u);
124 CHECK_GE(static_cast<int>(max_length), size);
125 memcpy(destination, stream, size);
126
127 hr = buffer->Unlock();
128 RETURN_ON_HR_FAILURE(hr, "Failed to unlock buffer", NULL);
129
130 hr = buffer->SetCurrentLength(size);
131 RETURN_ON_HR_FAILURE(hr, "Failed to set buffer length", NULL);
132
133 hr = sample->SetUINT32(MFSampleExtension_CleanPoint, TRUE);
134 RETURN_ON_HR_FAILURE(hr, "Failed to mark sample as key frame", NULL);
135
136 return sample.Detach();
137 }
138
139 static IMFSample* CreateSampleFromInputBuffer(
140 const media::BitstreamBuffer& bitstream_buffer,
141 base::ProcessHandle renderer_process,
142 DWORD stream_size,
143 DWORD alignment) {
144 HANDLE shared_memory_handle = NULL;
145 RETURN_ON_FAILURE(::DuplicateHandle(renderer_process,
146 bitstream_buffer.handle(),
147 base::GetCurrentProcessHandle(),
148 &shared_memory_handle,
149 0,
150 FALSE,
151 DUPLICATE_SAME_ACCESS),
152 "Duplicate handle failed", NULL);
153
154 base::SharedMemory shm(shared_memory_handle, true);
155 RETURN_ON_FAILURE(shm.Map(bitstream_buffer.size()),
156 "Failed in base::SharedMemory::Map", NULL);
157
158 return CreateInputSample(reinterpret_cast<const uint8*>(shm.memory()),
159 bitstream_buffer.size(),
160 stream_size,
161 alignment);
162 }
163
164 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::DXVAPictureBuffer(
165 const media::PictureBuffer& buffer)
166 : available(true),
167 picture_buffer(buffer) {
168 }
169
170 DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo(
171 int32 buffer_id, IDirect3DSurface9* surface)
172 : input_buffer_id(buffer_id),
173 dest_surface(surface) {
174 }
175
176 DXVAVideoDecodeAccelerator::PendingSampleInfo::~PendingSampleInfo() {}
177
178 // static
179 void DXVAVideoDecodeAccelerator::LoadDecodingDlls() {
180 if (loaded_decoder_dlls_)
Ami GONE FROM CHROMIUM 2011/12/21 18:04:56 DCHECK(!loaded_decoder_dlls_) instead to enforce t
ananta 2011/12/21 18:39:38 Done.
181 return;
182
183 static wchar_t* decoding_dlls[] = {
184 L"d3d9.dll",
185 L"d3dx9_43.dll",
186 L"dxva2.dll",
187 L"mf.dll",
188 L"mfplat.dll",
189 L"msmpeg2vdec.dll",
190 };
191
192 for (int i = 0; i < arraysize(decoding_dlls); ++i) {
193 if (!::LoadLibrary(decoding_dlls[i])) {
194 DLOG(ERROR) << "Failed to load decoder dll: " << decoding_dlls[i]
195 << ", Error: " << ::GetLastError();
196 return;
197 }
198 }
199 loaded_decoder_dlls_ = true;
200 }
201
202 DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator(
203 media::VideoDecodeAccelerator::Client* client,
204 base::ProcessHandle renderer_process)
205 : client_(client),
206 state_(kUninitialized),
207 pictures_requested_(false),
208 renderer_process_(renderer_process),
209 dev_manager_reset_token_(0),
210 last_input_buffer_id_(-1),
211 inputs_before_decode_(0) {
212 }
213
214 DXVAVideoDecodeAccelerator::~DXVAVideoDecodeAccelerator() {
215 client_ = NULL;
216 }
217
218 bool DXVAVideoDecodeAccelerator::Initialize(Profile) {
219 DCHECK(CalledOnValidThread());
220
221 RETURN_AND_NOTIFY_ON_FAILURE(loaded_decoder_dlls_,
222 "Decoder dlls not loaded", PLATFORM_FAILURE, false);
223
224 RETURN_AND_NOTIFY_ON_FAILURE((state_ == kUninitialized),
225 "Initialize: invalid state: " << state_, ILLEGAL_STATE, false);
226
227 HRESULT hr = MFStartup(MF_VERSION, MFSTARTUP_FULL);
228 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFStartup failed.", PLATFORM_FAILURE,
229 false);
230
231 RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(),
232 "Failed to create device manager", PLATFORM_FAILURE, false);
233
234 RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(),
235 "Failed to initialize decoder", PLATFORM_FAILURE, false);
236
237 RETURN_AND_NOTIFY_ON_FAILURE(GetStreamsInfoAndBufferReqs(),
238 "Failed to get input/output stream info.", PLATFORM_FAILURE, false);
239
240 RETURN_AND_NOTIFY_ON_FAILURE(
241 SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0),
242 "Failed to start decoder", PLATFORM_FAILURE, false);
243
244 state_ = kNormal;
245 MessageLoop::current()->PostTask(FROM_HERE,
246 base::Bind(&DXVAVideoDecodeAccelerator::NotifyInitializeDone, this));
247 return true;
248 }
249
250 void DXVAVideoDecodeAccelerator::Decode(
251 const media::BitstreamBuffer& bitstream_buffer) {
252 DCHECK(CalledOnValidThread());
253
254 RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kStopped),
255 "Invalid state: " << state_, ILLEGAL_STATE,);
256
257 base::win::ScopedComPtr<IMFSample> sample;
258 sample.Attach(CreateSampleFromInputBuffer(bitstream_buffer,
259 renderer_process_,
260 input_stream_info_.cbSize,
261 input_stream_info_.cbAlignment));
262 RETURN_AND_NOTIFY_ON_FAILURE(sample, "Failed to create input sample",
263 PLATFORM_FAILURE,);
264 if (!inputs_before_decode_) {
265 TRACE_EVENT_BEGIN_ETW("DXVAVideoDecodeAccelerator.Decoding", this, "");
266 }
267 inputs_before_decode_++;
268
269 RETURN_AND_NOTIFY_ON_FAILURE(
270 SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0),
271 "Failed to create input sample", PLATFORM_FAILURE,);
272
273 HRESULT hr = decoder_->ProcessInput(0, sample, 0);
274 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to process input sample",
275 PLATFORM_FAILURE,);
276
277 RETURN_AND_NOTIFY_ON_FAILURE(
278 SendMFTMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, 0),
279 "Failed to send eos message to MFT", PLATFORM_FAILURE,);
280 state_ = kEosDrain;
281
282 last_input_buffer_id_ = bitstream_buffer.id();
283
284 DoDecode();
285
286 RETURN_AND_NOTIFY_ON_FAILURE((state_ == kStopped || state_ == kNormal),
287 "Failed to process output. Unexpected decoder state: " << state_,
288 ILLEGAL_STATE,);
289
290 // The Microsoft Media foundation decoder internally buffers up to 30 frames
291 // before returning a decoded frame. We need to inform the client that this
292 // input buffer is processed as it may stop sending us further input.
293 // Note: This may break clients which expect every input buffer to be
294 // associated with a decoded output buffer.
295 // TODO(ananta)
296 // Do some more investigation into whether it is possible to get the MFT
297 // decoder to emit an output packet for every input packet.
298 // http://code.google.com/p/chromium/issues/detail?id=108121
299 MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
300 &DXVAVideoDecodeAccelerator::NotifyInputBufferRead, this,
301 bitstream_buffer.id()));
302 }
303
304 void DXVAVideoDecodeAccelerator::AssignPictureBuffers(
305 const std::vector<media::PictureBuffer>& buffers) {
306 DCHECK(CalledOnValidThread());
307 // Copy the picture buffers provided by the client to the available list,
308 // and mark these buffers as available for use.
309 for (size_t buffer_index = 0; buffer_index < buffers.size();
310 ++buffer_index) {
311 bool inserted = output_picture_buffers_.insert(std::make_pair(
312 buffers[buffer_index].id(),
313 DXVAPictureBuffer(buffers[buffer_index]))).second;
314 DCHECK(inserted);
315 }
316 ProcessPendingSamples();
317 }
318
319 void DXVAVideoDecodeAccelerator::ReusePictureBuffer(
320 int32 picture_buffer_id) {
321 DCHECK(CalledOnValidThread());
322
323 OutputBuffers::iterator it = output_picture_buffers_.find(picture_buffer_id);
324 RETURN_AND_NOTIFY_ON_FAILURE(it != output_picture_buffers_.end(),
325 "Invalid picture id: " << picture_buffer_id, INVALID_ARGUMENT,);
326
327 it->second.available = true;
328 ProcessPendingSamples();
329 }
330
331 void DXVAVideoDecodeAccelerator::Flush() {
332 DCHECK(CalledOnValidThread());
333
334 DVLOG(1) << "DXVAVideoDecodeAccelerator::Flush";
335
336 RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kStopped),
337 "Unexpected decoder state: " << state_, ILLEGAL_STATE,);
338
339 state_ = kEosDrain;
340
341 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0),
342 "Failed to send drain message", PLATFORM_FAILURE,);
343
344 // As per MSDN docs after the client sends this message, it calls
345 // IMFTransform::ProcessOutput in a loop, until ProcessOutput returns the
346 // error code MF_E_TRANSFORM_NEED_MORE_INPUT. The DoDecode function sets
347 // the state to kStopped when the decoder returns
348 // MF_E_TRANSFORM_NEED_MORE_INPUT.
349 // The MFT decoder can buffer upto 30 frames worth of input before returning
350 // an output frame. This loop here attempts to retrieve as many output frames
351 // as possible from the buffered set.
352 while (state_ != kStopped) {
353 DoDecode();
354 }
355
356 MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
357 &DXVAVideoDecodeAccelerator::NotifyFlushDone, this));
358
359 state_ = kNormal;
360 }
361
362 void DXVAVideoDecodeAccelerator::Reset() {
363 DCHECK(CalledOnValidThread());
364
365 DVLOG(1) << "DXVAVideoDecodeAccelerator::Reset";
366
367 RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kStopped),
368 "Reset: invalid state: " << state_, ILLEGAL_STATE,);
369
370 state_ = kResetting;
371
372 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_FLUSH, 0),
373 "Reset: Failed to send message.", PLATFORM_FAILURE,);
374
375 MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
376 &DXVAVideoDecodeAccelerator::NotifyResetDone, this));
377
378 state_ = DXVAVideoDecodeAccelerator::kNormal;
379 }
380
381 void DXVAVideoDecodeAccelerator::Destroy() {
382 DCHECK(CalledOnValidThread());
383 Invalidate();
384 }
385
386 bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() {
387 base::win::ScopedComPtr<IDirect3D9Ex> d3d9;
388
389 HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9.Receive());
390 RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false);
391
392 D3DPRESENT_PARAMETERS present_params = {0};
393 present_params.BackBufferWidth = 1;
394 present_params.BackBufferHeight = 1;
395 present_params.BackBufferFormat = D3DFMT_UNKNOWN;
396 present_params.BackBufferCount = 1;
397 present_params.SwapEffect = D3DSWAPEFFECT_DISCARD;
398 present_params.hDeviceWindow = GetShellWindow();
399 present_params.Windowed = TRUE;
400 present_params.Flags = D3DPRESENTFLAG_VIDEO;
401 present_params.FullScreen_RefreshRateInHz = 0;
402 present_params.PresentationInterval = 0;
403
404 hr = d3d9->CreateDeviceEx(D3DADAPTER_DEFAULT,
405 D3DDEVTYPE_HAL,
406 GetShellWindow(),
407 D3DCREATE_SOFTWARE_VERTEXPROCESSING,
408 &present_params,
409 NULL,
410 device_.Receive());
411 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false);
412
413 hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_,
414 device_manager_.Receive());
415 RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false);
416
417 hr = device_manager_->ResetDevice(device_, dev_manager_reset_token_);
418 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false);
419 return true;
420 }
421
422 bool DXVAVideoDecodeAccelerator::InitDecoder() {
423 // We cannot use CoCreateInstance to instantiate the decoder object as that
424 // fails in the sandbox. We mimic the steps CoCreateInstance uses to
425 // instantiate the object.
426 HMODULE decoder_dll = ::GetModuleHandle(L"msmpeg2vdec.dll");
427 RETURN_ON_FAILURE(decoder_dll,
428 "msmpeg2vdec.dll required for decoding is not loaded",
429 false);
430
431 typedef HRESULT (WINAPI* GetClassObject)(const CLSID& clsid,
432 const IID& iid,
433 void** object);
434
435 GetClassObject get_class_object = reinterpret_cast<GetClassObject>(
436 GetProcAddress(decoder_dll, "DllGetClassObject"));
437 RETURN_ON_FAILURE(get_class_object,
438 "Failed to get DllGetClassObject pointer", false);
439
440 base::win::ScopedComPtr<IClassFactory> factory;
441 HRESULT hr = get_class_object(__uuidof(CMSH264DecoderMFT),
442 __uuidof(IClassFactory),
443 reinterpret_cast<void**>(factory.Receive()));
444 RETURN_ON_HR_FAILURE(hr, "DllGetClassObject for decoder failed", false);
445
446 hr = factory->CreateInstance(NULL, __uuidof(IMFTransform),
447 reinterpret_cast<void**>(decoder_.Receive()));
448 RETURN_ON_HR_FAILURE(hr, "Failed to create decoder instance", false);
449
450 RETURN_ON_FAILURE(CheckDecoderDxvaSupport(),
451 "Failed to check decoder DXVA support", false);
452
453 hr = decoder_->ProcessMessage(
454 MFT_MESSAGE_SET_D3D_MANAGER,
455 reinterpret_cast<ULONG_PTR>(device_manager_.get()));
456 RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false);
457
458 return SetDecoderMediaTypes();
459 }
460
461 bool DXVAVideoDecodeAccelerator::CheckDecoderDxvaSupport() {
462 base::win::ScopedComPtr<IMFAttributes> attributes;
463 HRESULT hr = decoder_->GetAttributes(attributes.Receive());
464 RETURN_ON_HR_FAILURE(hr, "Failed to get decoder attributes", false);
465
466 UINT32 dxva = 0;
467 hr = attributes->GetUINT32(MF_SA_D3D_AWARE, &dxva);
468 RETURN_ON_HR_FAILURE(hr, "Failed to check if decoder supports DXVA", false);
469
470 hr = attributes->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE);
471 RETURN_ON_HR_FAILURE(hr, "Failed to enable DXVA H/W decoding", false);
472 return true;
473 }
474
475 bool DXVAVideoDecodeAccelerator::SetDecoderMediaTypes() {
476 RETURN_ON_FAILURE(SetDecoderInputMediaType(),
477 "Failed to set decoder input media type", false);
478 return SetDecoderOutputMediaType(MFVideoFormat_NV12);
479 }
480
481 bool DXVAVideoDecodeAccelerator::SetDecoderInputMediaType() {
482 base::win::ScopedComPtr<IMFMediaType> media_type;
483 HRESULT hr = MFCreateMediaType(media_type.Receive());
484 RETURN_ON_HR_FAILURE(hr, "MFCreateMediaType failed", false);
485
486 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
487 RETURN_ON_HR_FAILURE(hr, "Failed to set major input type", false);
488
489 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
490 RETURN_ON_HR_FAILURE(hr, "Failed to set subtype", false);
491
492 hr = decoder_->SetInputType(0, media_type, 0); // No flags
493 RETURN_ON_HR_FAILURE(hr, "Failed to set decoder input type", false);
494 return true;
495 }
496
497 bool DXVAVideoDecodeAccelerator::SetDecoderOutputMediaType(
498 const GUID& subtype) {
499 base::win::ScopedComPtr<IMFMediaType> out_media_type;
500
501 for (uint32 i = 0;
502 SUCCEEDED(decoder_->GetOutputAvailableType(0, i,
503 out_media_type.Receive()));
504 ++i) {
505 GUID out_subtype = {0};
506 HRESULT hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype);
507 RETURN_ON_HR_FAILURE(hr, "Failed to get output major type", false);
508
509 if (out_subtype == subtype) {
510 hr = decoder_->SetOutputType(0, out_media_type, 0); // No flags
511 RETURN_ON_HR_FAILURE(hr, "Failed to set decoder output type", false);
512 return true;
513 }
514 out_media_type.Release();
515 }
516 return false;
517 }
518
519 bool DXVAVideoDecodeAccelerator::SendMFTMessage(MFT_MESSAGE_TYPE msg,
520 int32 param) {
521 HRESULT hr = decoder_->ProcessMessage(msg, param);
522 return SUCCEEDED(hr);
523 }
524
525 // Gets the minimum buffer sizes for input and output samples. The MFT will not
526 // allocate buffer for input nor output, so we have to do it ourselves and make
527 // sure they're the correct size. We only provide decoding if DXVA is enabled.
528 bool DXVAVideoDecodeAccelerator::GetStreamsInfoAndBufferReqs() {
529 HRESULT hr = decoder_->GetInputStreamInfo(0, &input_stream_info_);
530 RETURN_ON_HR_FAILURE(hr, "Failed to get input stream info", false);
531
532 hr = decoder_->GetOutputStreamInfo(0, &output_stream_info_);
533 RETURN_ON_HR_FAILURE(hr, "Failed to get decoder output stream info", false);
534
535 DVLOG(1) << "Input stream info: ";
536 DVLOG(1) << "Max latency: " << input_stream_info_.hnsMaxLatency;
537 // There should be three flags, one for requiring a whole frame be in a
538 // single sample, one for requiring there be one buffer only in a single
539 // sample, and one that specifies a fixed sample size. (as in cbSize)
540 CHECK_EQ(input_stream_info_.dwFlags, 0x7u);
541
542 DVLOG(1) << "Min buffer size: " << input_stream_info_.cbSize;
543 DVLOG(1) << "Max lookahead: " << input_stream_info_.cbMaxLookahead;
544 DVLOG(1) << "Alignment: " << input_stream_info_.cbAlignment;
545
546 DVLOG(1) << "Output stream info: ";
547 // The flags here should be the same and mean the same thing, except when
548 // DXVA is enabled, there is an extra 0x100 flag meaning decoder will
549 // allocate its own sample.
550 DVLOG(1) << "Flags: "
551 << std::hex << std::showbase << output_stream_info_.dwFlags;
552 CHECK_EQ(output_stream_info_.dwFlags, 0x107u);
553 DVLOG(1) << "Min buffer size: " << output_stream_info_.cbSize;
554 DVLOG(1) << "Alignment: " << output_stream_info_.cbAlignment;
555 return true;
556 }
557
558 void DXVAVideoDecodeAccelerator::DoDecode() {
559 // This function is also called from Flush in a loop which could result
560 // in the state transitioning to kNormal due to decoded output.
561 RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kEosDrain),
562 "DoDecode: not in normal/drain state", ILLEGAL_STATE,);
563
564 MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0};
565 DWORD status = 0;
566
567 HRESULT hr = decoder_->ProcessOutput(0, // No flags
568 1, // # of out streams to pull from
569 &output_data_buffer,
570 &status);
571 IMFCollection* events = output_data_buffer.pEvents;
572 if (events != NULL) {
573 VLOG(1) << "Got events from ProcessOuput, but discarding";
574 events->Release();
575 }
576 if (FAILED(hr)) {
577 // A stream change needs further ProcessInput calls to get back decoder
578 // output which is why we need to set the state to stopped.
579 if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
580 if (!SetDecoderOutputMediaType(MFVideoFormat_NV12)) {
581 // Decoder didn't let us set NV12 output format. Not sure as to why
582 // this can happen. Give up in disgust.
583 NOTREACHED() << "Failed to set decoder output media type to NV12";
584 state_ = kStopped;
585 } else {
586 DVLOG(1) << "Received output format change from the decoder."
587 " Recursively invoking DoDecode";
588 DoDecode();
589 }
590 return;
591 } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
592 // No more output from the decoder. Stop playback.
593 state_ = kStopped;
594 return;
595 } else {
596 NOTREACHED() << "Unhandled error in DoDecode()";
597 return;
598 }
599 }
600 TRACE_EVENT_END_ETW("DXVAVideoDecodeAccelerator.Decoding", this, "");
601
602 TRACE_COUNTER1("DXVA Decoding", "TotalPacketsBeforeDecode",
603 inputs_before_decode_);
604
605 inputs_before_decode_ = 0;
606
607 RETURN_AND_NOTIFY_ON_FAILURE(ProcessOutputSample(output_data_buffer.pSample),
608 "Failed to process output sample.", PLATFORM_FAILURE,);
609
610 state_ = kNormal;
611 }
612
613 bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) {
614 RETURN_ON_FAILURE(sample, "Decode succeeded with NULL output sample", false);
615
616 base::win::ScopedComPtr<IMFSample> output_sample;
617 output_sample.Attach(sample);
618
619 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
620 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive());
621 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false);
622
623 base::win::ScopedComPtr<IDirect3DSurface9> surface;
624 hr = MFGetService(output_buffer, MR_BUFFER_SERVICE,
625 IID_PPV_ARGS(surface.Receive()));
626 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D surface from output sample",
627 false);
628
629 D3DSURFACE_DESC surface_desc;
630 hr = surface->GetDesc(&surface_desc);
631 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false);
632
633 TRACE_EVENT_BEGIN_ETW("DXVAVideoDecodeAccelerator.SurfaceCreation", this,
634 "");
635 // TODO(ananta)
636 // The code below may not be necessary once we have an ANGLE extension which
637 // allows us to pass the Direct 3D surface directly for rendering.
638
639 // The decoded bits in the source direct 3d surface are in the YUV
640 // format. Angle does not support that. As a workaround we create an
641 // offscreen surface in the RGB format and copy the source surface
642 // to this surface.
643 base::win::ScopedComPtr<IDirect3DSurface9> dest_surface;
644 hr = device_->CreateOffscreenPlainSurface(surface_desc.Width,
645 surface_desc.Height,
646 D3DFMT_A8R8G8B8,
647 D3DPOOL_DEFAULT,
648 dest_surface.Receive(),
649 NULL);
650 RETURN_ON_HR_FAILURE(hr, "Failed to create offscreen surface", false);
651
652 hr = D3DXLoadSurfaceFromSurface(dest_surface, NULL, NULL, surface, NULL,
653 NULL, D3DX_DEFAULT, 0);
654 RETURN_ON_HR_FAILURE(hr, "D3DXLoadSurfaceFromSurface failed", false);
655
656 TRACE_EVENT_END_ETW("DXVAVideoDecodeAccelerator.SurfaceCreation", this, "");
657
658 pending_output_samples_.push_back(
659 PendingSampleInfo(last_input_buffer_id_, dest_surface));
660
661 // If we have available picture buffers to copy the output data then use the
662 // first one and then flag it as not being available for use.
663 if (output_picture_buffers_.size()) {
664 ProcessPendingSamples();
665 return true;
666 }
667 if (pictures_requested_) {
668 DVLOG(1) << "Waiting for picture slots from the client.";
669 return true;
670 }
671 // Go ahead and request picture buffers.
672 MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
673 &DXVAVideoDecodeAccelerator::RequestPictureBuffers,
674 this, surface_desc.Width, surface_desc.Height));
675
676 pictures_requested_ = true;
677 return true;
678 }
679
680 bool DXVAVideoDecodeAccelerator::CopyOutputSampleDataToPictureBuffer(
681 IDirect3DSurface9* dest_surface, media::PictureBuffer picture_buffer,
682 int input_buffer_id) {
683 DCHECK(dest_surface);
684
685 D3DSURFACE_DESC surface_desc;
686 HRESULT hr = dest_surface->GetDesc(&surface_desc);
687 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false);
688
689 scoped_array<char> bits;
690 RETURN_ON_FAILURE(GetBitmapFromSurface(dest_surface, &bits),
691 "Failed to get bitmap from surface for rendering", false);
692
693 // This function currently executes in the context of IPC handlers in the
694 // GPU process which ensures that there is always a OpenGL context.
695 GLint current_texture = 0;
696 glGetIntegerv(GL_TEXTURE_BINDING_2D, &current_texture);
697
698 glBindTexture(GL_TEXTURE_2D, picture_buffer.texture_id());
699 glTexImage2D(GL_TEXTURE_2D, 0, GL_BGRA_EXT, surface_desc.Width,
700 surface_desc.Height, 0, GL_BGRA_EXT, GL_UNSIGNED_BYTE,
701 reinterpret_cast<GLvoid*>(bits.get()));
702 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
703
704 glBindTexture(GL_TEXTURE_2D, current_texture);
705
706 media::Picture output_picture(picture_buffer.id(), input_buffer_id);
707 MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
708 &DXVAVideoDecodeAccelerator::NotifyPictureReady, this, output_picture));
709 return true;
710 }
711
712 void DXVAVideoDecodeAccelerator::ProcessPendingSamples() {
713 if (pending_output_samples_.empty())
714 return;
715
716 OutputBuffers::iterator index;
717
718 for (index = output_picture_buffers_.begin();
719 index != output_picture_buffers_.end() &&
720 !pending_output_samples_.empty();
721 ++index) {
722 if (index->second.available) {
723 PendingSampleInfo sample_info = pending_output_samples_.front();
724
725 CopyOutputSampleDataToPictureBuffer(sample_info.dest_surface,
726 index->second.picture_buffer,
727 sample_info.input_buffer_id);
728 index->second.available = false;
729 pending_output_samples_.pop_front();
730 }
731 }
732 }
733
734 void DXVAVideoDecodeAccelerator::ClearState() {
735 last_input_buffer_id_ = -1;
736 output_picture_buffers_.clear();
737 pending_output_samples_.clear();
738 }
739
740 void DXVAVideoDecodeAccelerator::StopOnError(
741 media::VideoDecodeAccelerator::Error error) {
742 DCHECK(CalledOnValidThread());
743
744 if (client_)
745 client_->NotifyError(error);
746 client_ = NULL;
747
748 if (state_ != kUninitialized) {
749 Invalidate();
750 }
751 }
752
753 bool DXVAVideoDecodeAccelerator::GetBitmapFromSurface(
754 IDirect3DSurface9* surface,
755 scoped_array<char>* bits) {
756 // Get the currently loaded bitmap from the DC.
757 HDC hdc = NULL;
758 HRESULT hr = surface->GetDC(&hdc);
759 RETURN_ON_HR_FAILURE(hr, "Failed to get HDC from surface", false);
760
761 HBITMAP bitmap =
762 reinterpret_cast<HBITMAP>(GetCurrentObject(hdc, OBJ_BITMAP));
763 if (!bitmap) {
764 NOTREACHED() << "Failed to get bitmap from DC";
765 surface->ReleaseDC(hdc);
766 return false;
767 }
768 // TODO(ananta)
769 // The code below may not be necessary once we have an ANGLE extension which
770 // allows us to pass the Direct 3D surface directly for rendering.
771 // The Device dependent bitmap is upside down for OpenGL. We convert the
772 // bitmap to a DIB and render it on the texture instead.
773 BITMAP bitmap_basic_info = {0};
774 if (!GetObject(bitmap, sizeof(BITMAP), &bitmap_basic_info)) {
775 NOTREACHED() << "Failed to read bitmap info";
776 surface->ReleaseDC(hdc);
777 return false;
778 }
779 BITMAPINFO bitmap_info = {0};
780 bitmap_info.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
781 bitmap_info.bmiHeader.biWidth = bitmap_basic_info.bmWidth;
782 bitmap_info.bmiHeader.biHeight = bitmap_basic_info.bmHeight;
783 bitmap_info.bmiHeader.biPlanes = 1;
784 bitmap_info.bmiHeader.biBitCount = bitmap_basic_info.bmBitsPixel;
785 bitmap_info.bmiHeader.biCompression = BI_RGB;
786 bitmap_info.bmiHeader.biSizeImage = 0;
787 bitmap_info.bmiHeader.biClrUsed = 0;
788
789 int ret = GetDIBits(hdc, bitmap, 0, 0, NULL, &bitmap_info, DIB_RGB_COLORS);
790 if (!ret || bitmap_info.bmiHeader.biSizeImage <= 0) {
791 NOTREACHED() << "Failed to read bitmap size";
792 surface->ReleaseDC(hdc);
793 return false;
794 }
795
796 bits->reset(new char[bitmap_info.bmiHeader.biSizeImage]);
797 ret = GetDIBits(hdc, bitmap, 0, bitmap_basic_info.bmHeight, bits->get(),
798 &bitmap_info, DIB_RGB_COLORS);
799 if (!ret) {
800 NOTREACHED() << "Failed to retrieve bitmap bits.";
801 }
802 surface->ReleaseDC(hdc);
803 return !!ret;
804 }
805
806 void DXVAVideoDecodeAccelerator::Invalidate() {
807 if (state_ == kUninitialized)
808 return;
809 ClearState();
810 decoder_.Release();
811 device_.Release();
812 device_manager_.Release();
813 MFShutdown();
814 state_ = kUninitialized;
815 }
816
817 void DXVAVideoDecodeAccelerator::NotifyInitializeDone() {
818 if (client_)
819 client_->NotifyInitializeDone();
820 }
821
822 void DXVAVideoDecodeAccelerator::NotifyInputBufferRead(int input_buffer_id) {
823 if (client_)
824 client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
825 }
826
827 void DXVAVideoDecodeAccelerator::NotifyFlushDone() {
828 if (client_)
829 client_->NotifyFlushDone();
830 }
831
832 void DXVAVideoDecodeAccelerator::NotifyResetDone() {
833 if (client_)
834 client_->NotifyResetDone();
835 }
836
837 void DXVAVideoDecodeAccelerator::RequestPictureBuffers(int width, int height) {
838 // This task could execute after the decoder has been torn down.
839 if (state_ != kUninitialized && client_) {
840 client_->ProvidePictureBuffers(kNumPictureBuffers,
841 gfx::Size(width, height));
842 }
843 }
844
845 void DXVAVideoDecodeAccelerator::NotifyPictureReady(
846 const media::Picture& picture) {
847 // This task could execute after the decoder has been torn down.
848 if (state_ != kUninitialized && client_)
849 client_->PictureReady(picture);
850 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698