OLD | NEW |
---|---|
(Empty) | |
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/common/gpu/media/dxva_video_decode_accelerator.h" | |
6 | |
7 #if !defined(OS_WIN) | |
8 #error This file should only be built on Windows. | |
9 #endif // !defined(OS_WIN) | |
10 | |
11 #include <ks.h> | |
12 #include <codecapi.h> | |
13 #include <d3dx9tex.h> | |
14 #include <mfapi.h> | |
15 #include <mferror.h> | |
16 #include <wmcodecdsp.h> | |
17 | |
18 #include "base/bind.h" | |
19 #include "base/callback.h" | |
20 #include "base/debug/trace_event.h" | |
21 #include "base/logging.h" | |
22 #include "base/memory/scoped_handle.h" | |
23 #include "base/memory/scoped_ptr.h" | |
24 #include "base/message_loop.h" | |
25 #include "base/process_util.h" | |
26 #include "base/shared_memory.h" | |
27 #include "media/video/video_decode_accelerator.h" | |
28 #include "third_party/angle/include/GLES2/gl2.h" | |
29 #include "third_party/angle/include/GLES2/gl2ext.h" | |
30 | |
31 // We only request 5 picture buffers from the client which are used to hold the | |
32 // decoded samples. These buffers are then reused when the client tells us that | |
33 // it is done with the buffer. | |
34 static const int kNumPictureBuffers = 5; | |
35 | |
36 bool DXVAVideoDecodeAccelerator::loaded_decoder_dlls_ = false; | |
37 | |
38 #define RETURN_ON_FAILURE(result, log, ret) \ | |
39 do { \ | |
40 if (!(result)) { \ | |
41 DLOG(ERROR) << log; \ | |
42 return ret; \ | |
43 } \ | |
44 } while (0) | |
45 | |
46 #define RETURN_ON_HR_FAILURE(result, log, ret) \ | |
47 RETURN_ON_FAILURE(SUCCEEDED(result), \ | |
48 log << ", HRESULT: 0x" << std::hex << result, \ | |
49 ret); | |
50 | |
51 #define RETURN_AND_NOTIFY_ON_FAILURE(result, log, error_code, ret) \ | |
52 do { \ | |
53 if (!(result)) { \ | |
54 DVLOG(1) << log; \ | |
55 StopOnError(error_code); \ | |
56 return ret; \ | |
57 } \ | |
58 } while (0) | |
59 | |
60 #define RETURN_AND_NOTIFY_ON_HR_FAILURE(result, log, error_code, ret) \ | |
61 RETURN_AND_NOTIFY_ON_FAILURE(SUCCEEDED(result), \ | |
62 log << ", HRESULT: 0x" << std::hex << result, \ | |
63 error_code, ret); | |
64 | |
65 static IMFSample* CreateEmptySample() { | |
66 base::win::ScopedComPtr<IMFSample> sample; | |
67 HRESULT hr = MFCreateSample(sample.Receive()); | |
68 RETURN_ON_HR_FAILURE(hr, "MFCreateSample failed", NULL); | |
69 return sample.Detach(); | |
70 } | |
71 | |
72 // Creates a Media Foundation sample with one buffer of length |buffer_length| | |
73 // on a |align|-byte boundary. Alignment must be a perfect power of 2 or 0. | |
74 static IMFSample* CreateEmptySampleWithBuffer(int buffer_length, int align) { | |
75 CHECK_GT(buffer_length, 0); | |
76 | |
77 base::win::ScopedComPtr<IMFSample> sample; | |
78 sample.Attach(CreateEmptySample()); | |
79 | |
80 base::win::ScopedComPtr<IMFMediaBuffer> buffer; | |
81 HRESULT hr = E_FAIL; | |
82 if (align == 0) { | |
83 // Note that MFCreateMemoryBuffer is same as MFCreateAlignedMemoryBuffer | |
84 // with the align argument being 0. | |
85 hr = MFCreateMemoryBuffer(buffer_length, buffer.Receive()); | |
86 } else { | |
87 hr = MFCreateAlignedMemoryBuffer(buffer_length, | |
88 align - 1, | |
89 buffer.Receive()); | |
90 } | |
91 RETURN_ON_HR_FAILURE(hr, "Failed to create memory buffer for sample", NULL); | |
92 | |
93 hr = sample->AddBuffer(buffer); | |
94 RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", NULL); | |
95 | |
96 return sample.Detach(); | |
97 } | |
98 | |
99 // Creates a Media Foundation sample with one buffer containing a copy of the | |
100 // given Annex B stream data. | |
101 // If duration and sample time are not known, provide 0. | |
102 // |min_size| specifies the minimum size of the buffer (might be required by | |
103 // the decoder for input). If no alignment is required, provide 0. | |
104 static IMFSample* CreateInputSample(const uint8* stream, int size, | |
105 int min_size, int alignment) { | |
106 CHECK(stream); | |
107 CHECK_GT(size, 0); | |
108 base::win::ScopedComPtr<IMFSample> sample; | |
109 sample.Attach(CreateEmptySampleWithBuffer(std::max(min_size, size), | |
110 alignment)); | |
111 RETURN_ON_FAILURE(sample, "Failed to create empty sample", NULL); | |
112 | |
113 base::win::ScopedComPtr<IMFMediaBuffer> buffer; | |
114 HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive()); | |
115 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from sample", NULL); | |
116 | |
117 DWORD max_length = 0; | |
118 DWORD current_length = 0; | |
119 uint8* destination = NULL; | |
120 hr = buffer->Lock(&destination, &max_length, ¤t_length); | |
121 RETURN_ON_HR_FAILURE(hr, "Failed to lock buffer", NULL); | |
122 | |
123 CHECK_EQ(current_length, 0u); | |
124 CHECK_GE(static_cast<int>(max_length), size); | |
125 memcpy(destination, stream, size); | |
126 | |
127 hr = buffer->Unlock(); | |
128 RETURN_ON_HR_FAILURE(hr, "Failed to unlock buffer", NULL); | |
129 | |
130 hr = buffer->SetCurrentLength(size); | |
131 RETURN_ON_HR_FAILURE(hr, "Failed to set buffer length", NULL); | |
132 | |
133 hr = sample->SetUINT32(MFSampleExtension_CleanPoint, TRUE); | |
134 RETURN_ON_HR_FAILURE(hr, "Failed to mark sample as key frame", NULL); | |
135 | |
136 return sample.Detach(); | |
137 } | |
138 | |
139 static IMFSample* CreateSampleFromInputBuffer( | |
140 const media::BitstreamBuffer& bitstream_buffer, | |
141 base::ProcessHandle renderer_process, | |
142 DWORD stream_size, | |
143 DWORD alignment) { | |
144 HANDLE shared_memory_handle = NULL; | |
145 RETURN_ON_FAILURE(::DuplicateHandle(renderer_process, | |
146 bitstream_buffer.handle(), | |
147 base::GetCurrentProcessHandle(), | |
148 &shared_memory_handle, | |
149 0, | |
150 FALSE, | |
151 DUPLICATE_SAME_ACCESS), | |
152 "Duplicate handle failed", NULL); | |
153 | |
154 base::SharedMemory shm(shared_memory_handle, true); | |
155 RETURN_ON_FAILURE(shm.Map(bitstream_buffer.size()), | |
156 "Failed in base::SharedMemory::Map", NULL); | |
157 | |
158 return CreateInputSample(reinterpret_cast<const uint8*>(shm.memory()), | |
159 bitstream_buffer.size(), | |
160 stream_size, | |
161 alignment); | |
162 } | |
163 | |
164 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::DXVAPictureBuffer( | |
165 const media::PictureBuffer& buffer) | |
166 : available(true), | |
167 picture_buffer(buffer) { | |
168 } | |
169 | |
170 DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo( | |
171 int32 buffer_id, IDirect3DSurface9* surface) | |
172 : input_buffer_id(buffer_id), | |
173 dest_surface(surface) { | |
174 } | |
175 | |
176 DXVAVideoDecodeAccelerator::PendingSampleInfo::~PendingSampleInfo() {} | |
177 | |
178 // static | |
179 void DXVAVideoDecodeAccelerator::LoadDecodingDlls() { | |
180 // Should be called only once during program startup. | |
181 DCHECK(!!loaded_decoder_dlls_); | |
Ami GONE FROM CHROMIUM
2011/12/21 18:57:45
Why !! ??
| |
182 | |
183 static wchar_t* decoding_dlls[] = { | |
184 L"d3d9.dll", | |
185 L"d3dx9_43.dll", | |
186 L"dxva2.dll", | |
187 L"mf.dll", | |
188 L"mfplat.dll", | |
189 L"msmpeg2vdec.dll", | |
190 }; | |
191 | |
192 for (int i = 0; i < arraysize(decoding_dlls); ++i) { | |
193 if (!::LoadLibrary(decoding_dlls[i])) { | |
194 DLOG(ERROR) << "Failed to load decoder dll: " << decoding_dlls[i] | |
195 << ", Error: " << ::GetLastError(); | |
196 return; | |
197 } | |
198 } | |
199 loaded_decoder_dlls_ = true; | |
200 } | |
201 | |
202 DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator( | |
203 media::VideoDecodeAccelerator::Client* client, | |
204 base::ProcessHandle renderer_process) | |
205 : client_(client), | |
206 state_(kUninitialized), | |
207 pictures_requested_(false), | |
208 renderer_process_(renderer_process), | |
209 dev_manager_reset_token_(0), | |
210 last_input_buffer_id_(-1), | |
211 inputs_before_decode_(0), | |
212 device_window_(NULL) { | |
213 } | |
214 | |
215 DXVAVideoDecodeAccelerator::~DXVAVideoDecodeAccelerator() { | |
216 client_ = NULL; | |
217 } | |
218 | |
219 bool DXVAVideoDecodeAccelerator::Initialize(Profile) { | |
220 DCHECK(CalledOnValidThread()); | |
221 | |
222 RETURN_AND_NOTIFY_ON_FAILURE(loaded_decoder_dlls_, | |
223 "Decoder dlls not loaded", PLATFORM_FAILURE, false); | |
224 | |
225 RETURN_AND_NOTIFY_ON_FAILURE((state_ == kUninitialized), | |
226 "Initialize: invalid state: " << state_, ILLEGAL_STATE, false); | |
227 | |
228 HRESULT hr = MFStartup(MF_VERSION, MFSTARTUP_FULL); | |
229 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFStartup failed.", PLATFORM_FAILURE, | |
230 false); | |
231 | |
232 RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(), | |
233 "Failed to create device manager", PLATFORM_FAILURE, false); | |
234 | |
235 RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(), | |
236 "Failed to initialize decoder", PLATFORM_FAILURE, false); | |
237 | |
238 RETURN_AND_NOTIFY_ON_FAILURE(GetStreamsInfoAndBufferReqs(), | |
239 "Failed to get input/output stream info.", PLATFORM_FAILURE, false); | |
240 | |
241 RETURN_AND_NOTIFY_ON_FAILURE( | |
242 SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0), | |
243 "Failed to start decoder", PLATFORM_FAILURE, false); | |
244 | |
245 state_ = kNormal; | |
246 MessageLoop::current()->PostTask(FROM_HERE, | |
247 base::Bind(&DXVAVideoDecodeAccelerator::NotifyInitializeDone, this)); | |
248 return true; | |
249 } | |
250 | |
251 void DXVAVideoDecodeAccelerator::Decode( | |
252 const media::BitstreamBuffer& bitstream_buffer) { | |
253 DCHECK(CalledOnValidThread()); | |
254 | |
255 RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kStopped), | |
256 "Invalid state: " << state_, ILLEGAL_STATE,); | |
257 | |
258 base::win::ScopedComPtr<IMFSample> sample; | |
259 sample.Attach(CreateSampleFromInputBuffer(bitstream_buffer, | |
260 renderer_process_, | |
261 input_stream_info_.cbSize, | |
262 input_stream_info_.cbAlignment)); | |
263 RETURN_AND_NOTIFY_ON_FAILURE(sample, "Failed to create input sample", | |
264 PLATFORM_FAILURE,); | |
265 if (!inputs_before_decode_) { | |
266 TRACE_EVENT_BEGIN_ETW("DXVAVideoDecodeAccelerator.Decoding", this, ""); | |
267 } | |
268 inputs_before_decode_++; | |
269 | |
270 RETURN_AND_NOTIFY_ON_FAILURE( | |
271 SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0), | |
272 "Failed to create input sample", PLATFORM_FAILURE,); | |
273 | |
274 HRESULT hr = decoder_->ProcessInput(0, sample, 0); | |
275 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to process input sample", | |
276 PLATFORM_FAILURE,); | |
277 | |
278 RETURN_AND_NOTIFY_ON_FAILURE( | |
279 SendMFTMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, 0), | |
280 "Failed to send eos message to MFT", PLATFORM_FAILURE,); | |
281 state_ = kEosDrain; | |
282 | |
283 last_input_buffer_id_ = bitstream_buffer.id(); | |
284 | |
285 DoDecode(); | |
286 | |
287 RETURN_AND_NOTIFY_ON_FAILURE((state_ == kStopped || state_ == kNormal), | |
288 "Failed to process output. Unexpected decoder state: " << state_, | |
289 ILLEGAL_STATE,); | |
290 | |
291 // The Microsoft Media foundation decoder internally buffers up to 30 frames | |
292 // before returning a decoded frame. We need to inform the client that this | |
293 // input buffer is processed as it may stop sending us further input. | |
294 // Note: This may break clients which expect every input buffer to be | |
295 // associated with a decoded output buffer. | |
296 // TODO(ananta) | |
297 // Do some more investigation into whether it is possible to get the MFT | |
298 // decoder to emit an output packet for every input packet. | |
299 // http://code.google.com/p/chromium/issues/detail?id=108121 | |
300 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
301 &DXVAVideoDecodeAccelerator::NotifyInputBufferRead, this, | |
302 bitstream_buffer.id())); | |
303 } | |
304 | |
305 void DXVAVideoDecodeAccelerator::AssignPictureBuffers( | |
306 const std::vector<media::PictureBuffer>& buffers) { | |
307 DCHECK(CalledOnValidThread()); | |
308 // Copy the picture buffers provided by the client to the available list, | |
309 // and mark these buffers as available for use. | |
310 for (size_t buffer_index = 0; buffer_index < buffers.size(); | |
311 ++buffer_index) { | |
312 bool inserted = output_picture_buffers_.insert(std::make_pair( | |
313 buffers[buffer_index].id(), | |
314 DXVAPictureBuffer(buffers[buffer_index]))).second; | |
315 DCHECK(inserted); | |
316 } | |
317 ProcessPendingSamples(); | |
318 } | |
319 | |
320 void DXVAVideoDecodeAccelerator::ReusePictureBuffer( | |
321 int32 picture_buffer_id) { | |
322 DCHECK(CalledOnValidThread()); | |
323 | |
324 OutputBuffers::iterator it = output_picture_buffers_.find(picture_buffer_id); | |
325 RETURN_AND_NOTIFY_ON_FAILURE(it != output_picture_buffers_.end(), | |
326 "Invalid picture id: " << picture_buffer_id, INVALID_ARGUMENT,); | |
327 | |
328 it->second.available = true; | |
329 ProcessPendingSamples(); | |
330 } | |
331 | |
332 void DXVAVideoDecodeAccelerator::Flush() { | |
333 DCHECK(CalledOnValidThread()); | |
334 | |
335 DVLOG(1) << "DXVAVideoDecodeAccelerator::Flush"; | |
336 | |
337 RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kStopped), | |
338 "Unexpected decoder state: " << state_, ILLEGAL_STATE,); | |
339 | |
340 state_ = kEosDrain; | |
341 | |
342 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0), | |
343 "Failed to send drain message", PLATFORM_FAILURE,); | |
344 | |
345 // As per MSDN docs after the client sends this message, it calls | |
346 // IMFTransform::ProcessOutput in a loop, until ProcessOutput returns the | |
347 // error code MF_E_TRANSFORM_NEED_MORE_INPUT. The DoDecode function sets | |
348 // the state to kStopped when the decoder returns | |
349 // MF_E_TRANSFORM_NEED_MORE_INPUT. | |
350 // The MFT decoder can buffer upto 30 frames worth of input before returning | |
351 // an output frame. This loop here attempts to retrieve as many output frames | |
352 // as possible from the buffered set. | |
353 while (state_ != kStopped) { | |
354 DoDecode(); | |
355 } | |
356 | |
357 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
358 &DXVAVideoDecodeAccelerator::NotifyFlushDone, this)); | |
359 | |
360 state_ = kNormal; | |
361 } | |
362 | |
363 void DXVAVideoDecodeAccelerator::Reset() { | |
364 DCHECK(CalledOnValidThread()); | |
365 | |
366 DVLOG(1) << "DXVAVideoDecodeAccelerator::Reset"; | |
367 | |
368 RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kStopped), | |
369 "Reset: invalid state: " << state_, ILLEGAL_STATE,); | |
370 | |
371 state_ = kResetting; | |
372 | |
373 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_FLUSH, 0), | |
374 "Reset: Failed to send message.", PLATFORM_FAILURE,); | |
375 | |
376 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
377 &DXVAVideoDecodeAccelerator::NotifyResetDone, this)); | |
378 | |
379 state_ = DXVAVideoDecodeAccelerator::kNormal; | |
380 } | |
381 | |
382 void DXVAVideoDecodeAccelerator::Destroy() { | |
383 DCHECK(CalledOnValidThread()); | |
384 Invalidate(); | |
385 } | |
386 | |
387 bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() { | |
388 base::win::ScopedComPtr<IDirect3D9Ex> d3d9; | |
389 | |
390 device_window_ = CreateWindowEx(0, L"Static", NULL, WS_OVERLAPPEDWINDOW, | |
391 CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT, | |
392 CW_USEDEFAULT, NULL, NULL, NULL, NULL); | |
393 RETURN_ON_FAILURE(device_window_, "Failed to create device window", false); | |
394 | |
395 HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9.Receive()); | |
396 RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false); | |
397 | |
398 D3DPRESENT_PARAMETERS present_params = {0}; | |
399 present_params.BackBufferWidth = 1; | |
400 present_params.BackBufferHeight = 1; | |
401 present_params.BackBufferFormat = D3DFMT_UNKNOWN; | |
402 present_params.BackBufferCount = 1; | |
403 present_params.SwapEffect = D3DSWAPEFFECT_DISCARD; | |
404 present_params.hDeviceWindow = device_window_; | |
405 present_params.Windowed = TRUE; | |
406 present_params.Flags = D3DPRESENTFLAG_VIDEO; | |
407 present_params.FullScreen_RefreshRateInHz = 0; | |
408 present_params.PresentationInterval = 0; | |
409 | |
410 hr = d3d9->CreateDeviceEx(D3DADAPTER_DEFAULT, | |
411 D3DDEVTYPE_HAL, | |
412 device_window_, | |
413 D3DCREATE_SOFTWARE_VERTEXPROCESSING, | |
414 &present_params, | |
415 NULL, | |
416 device_.Receive()); | |
417 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false); | |
418 | |
419 hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_, | |
420 device_manager_.Receive()); | |
421 RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false); | |
422 | |
423 hr = device_manager_->ResetDevice(device_, dev_manager_reset_token_); | |
424 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); | |
425 return true; | |
426 } | |
427 | |
428 bool DXVAVideoDecodeAccelerator::InitDecoder() { | |
429 // We cannot use CoCreateInstance to instantiate the decoder object as that | |
430 // fails in the sandbox. We mimic the steps CoCreateInstance uses to | |
431 // instantiate the object. | |
432 HMODULE decoder_dll = ::GetModuleHandle(L"msmpeg2vdec.dll"); | |
433 RETURN_ON_FAILURE(decoder_dll, | |
434 "msmpeg2vdec.dll required for decoding is not loaded", | |
435 false); | |
436 | |
437 typedef HRESULT (WINAPI* GetClassObject)(const CLSID& clsid, | |
438 const IID& iid, | |
439 void** object); | |
440 | |
441 GetClassObject get_class_object = reinterpret_cast<GetClassObject>( | |
442 GetProcAddress(decoder_dll, "DllGetClassObject")); | |
443 RETURN_ON_FAILURE(get_class_object, | |
444 "Failed to get DllGetClassObject pointer", false); | |
445 | |
446 base::win::ScopedComPtr<IClassFactory> factory; | |
447 HRESULT hr = get_class_object(__uuidof(CMSH264DecoderMFT), | |
448 __uuidof(IClassFactory), | |
449 reinterpret_cast<void**>(factory.Receive())); | |
450 RETURN_ON_HR_FAILURE(hr, "DllGetClassObject for decoder failed", false); | |
451 | |
452 hr = factory->CreateInstance(NULL, __uuidof(IMFTransform), | |
453 reinterpret_cast<void**>(decoder_.Receive())); | |
454 RETURN_ON_HR_FAILURE(hr, "Failed to create decoder instance", false); | |
455 | |
456 RETURN_ON_FAILURE(CheckDecoderDxvaSupport(), | |
457 "Failed to check decoder DXVA support", false); | |
458 | |
459 hr = decoder_->ProcessMessage( | |
460 MFT_MESSAGE_SET_D3D_MANAGER, | |
461 reinterpret_cast<ULONG_PTR>(device_manager_.get())); | |
462 RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false); | |
463 | |
464 return SetDecoderMediaTypes(); | |
465 } | |
466 | |
467 bool DXVAVideoDecodeAccelerator::CheckDecoderDxvaSupport() { | |
468 base::win::ScopedComPtr<IMFAttributes> attributes; | |
469 HRESULT hr = decoder_->GetAttributes(attributes.Receive()); | |
470 RETURN_ON_HR_FAILURE(hr, "Failed to get decoder attributes", false); | |
471 | |
472 UINT32 dxva = 0; | |
473 hr = attributes->GetUINT32(MF_SA_D3D_AWARE, &dxva); | |
474 RETURN_ON_HR_FAILURE(hr, "Failed to check if decoder supports DXVA", false); | |
475 | |
476 hr = attributes->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE); | |
477 RETURN_ON_HR_FAILURE(hr, "Failed to enable DXVA H/W decoding", false); | |
478 return true; | |
479 } | |
480 | |
481 bool DXVAVideoDecodeAccelerator::SetDecoderMediaTypes() { | |
482 RETURN_ON_FAILURE(SetDecoderInputMediaType(), | |
483 "Failed to set decoder input media type", false); | |
484 return SetDecoderOutputMediaType(MFVideoFormat_NV12); | |
485 } | |
486 | |
487 bool DXVAVideoDecodeAccelerator::SetDecoderInputMediaType() { | |
488 base::win::ScopedComPtr<IMFMediaType> media_type; | |
489 HRESULT hr = MFCreateMediaType(media_type.Receive()); | |
490 RETURN_ON_HR_FAILURE(hr, "MFCreateMediaType failed", false); | |
491 | |
492 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); | |
493 RETURN_ON_HR_FAILURE(hr, "Failed to set major input type", false); | |
494 | |
495 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); | |
496 RETURN_ON_HR_FAILURE(hr, "Failed to set subtype", false); | |
497 | |
498 hr = decoder_->SetInputType(0, media_type, 0); // No flags | |
499 RETURN_ON_HR_FAILURE(hr, "Failed to set decoder input type", false); | |
500 return true; | |
501 } | |
502 | |
503 bool DXVAVideoDecodeAccelerator::SetDecoderOutputMediaType( | |
504 const GUID& subtype) { | |
505 base::win::ScopedComPtr<IMFMediaType> out_media_type; | |
506 | |
507 for (uint32 i = 0; | |
508 SUCCEEDED(decoder_->GetOutputAvailableType(0, i, | |
509 out_media_type.Receive())); | |
510 ++i) { | |
511 GUID out_subtype = {0}; | |
512 HRESULT hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype); | |
513 RETURN_ON_HR_FAILURE(hr, "Failed to get output major type", false); | |
514 | |
515 if (out_subtype == subtype) { | |
516 hr = decoder_->SetOutputType(0, out_media_type, 0); // No flags | |
517 RETURN_ON_HR_FAILURE(hr, "Failed to set decoder output type", false); | |
518 return true; | |
519 } | |
520 out_media_type.Release(); | |
521 } | |
522 return false; | |
523 } | |
524 | |
525 bool DXVAVideoDecodeAccelerator::SendMFTMessage(MFT_MESSAGE_TYPE msg, | |
526 int32 param) { | |
527 HRESULT hr = decoder_->ProcessMessage(msg, param); | |
528 return SUCCEEDED(hr); | |
529 } | |
530 | |
531 // Gets the minimum buffer sizes for input and output samples. The MFT will not | |
532 // allocate buffer for input nor output, so we have to do it ourselves and make | |
533 // sure they're the correct size. We only provide decoding if DXVA is enabled. | |
534 bool DXVAVideoDecodeAccelerator::GetStreamsInfoAndBufferReqs() { | |
535 HRESULT hr = decoder_->GetInputStreamInfo(0, &input_stream_info_); | |
536 RETURN_ON_HR_FAILURE(hr, "Failed to get input stream info", false); | |
537 | |
538 hr = decoder_->GetOutputStreamInfo(0, &output_stream_info_); | |
539 RETURN_ON_HR_FAILURE(hr, "Failed to get decoder output stream info", false); | |
540 | |
541 DVLOG(1) << "Input stream info: "; | |
542 DVLOG(1) << "Max latency: " << input_stream_info_.hnsMaxLatency; | |
543 // There should be three flags, one for requiring a whole frame be in a | |
544 // single sample, one for requiring there be one buffer only in a single | |
545 // sample, and one that specifies a fixed sample size. (as in cbSize) | |
546 CHECK_EQ(input_stream_info_.dwFlags, 0x7u); | |
547 | |
548 DVLOG(1) << "Min buffer size: " << input_stream_info_.cbSize; | |
549 DVLOG(1) << "Max lookahead: " << input_stream_info_.cbMaxLookahead; | |
550 DVLOG(1) << "Alignment: " << input_stream_info_.cbAlignment; | |
551 | |
552 DVLOG(1) << "Output stream info: "; | |
553 // The flags here should be the same and mean the same thing, except when | |
554 // DXVA is enabled, there is an extra 0x100 flag meaning decoder will | |
555 // allocate its own sample. | |
556 DVLOG(1) << "Flags: " | |
557 << std::hex << std::showbase << output_stream_info_.dwFlags; | |
558 CHECK_EQ(output_stream_info_.dwFlags, 0x107u); | |
559 DVLOG(1) << "Min buffer size: " << output_stream_info_.cbSize; | |
560 DVLOG(1) << "Alignment: " << output_stream_info_.cbAlignment; | |
561 return true; | |
562 } | |
563 | |
564 void DXVAVideoDecodeAccelerator::DoDecode() { | |
565 // This function is also called from Flush in a loop which could result | |
566 // in the state transitioning to kNormal due to decoded output. | |
567 RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kEosDrain), | |
568 "DoDecode: not in normal/drain state", ILLEGAL_STATE,); | |
569 | |
570 MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0}; | |
571 DWORD status = 0; | |
572 | |
573 HRESULT hr = decoder_->ProcessOutput(0, // No flags | |
574 1, // # of out streams to pull from | |
575 &output_data_buffer, | |
576 &status); | |
577 IMFCollection* events = output_data_buffer.pEvents; | |
578 if (events != NULL) { | |
579 VLOG(1) << "Got events from ProcessOuput, but discarding"; | |
580 events->Release(); | |
581 } | |
582 if (FAILED(hr)) { | |
583 // A stream change needs further ProcessInput calls to get back decoder | |
584 // output which is why we need to set the state to stopped. | |
585 if (hr == MF_E_TRANSFORM_STREAM_CHANGE) { | |
586 if (!SetDecoderOutputMediaType(MFVideoFormat_NV12)) { | |
587 // Decoder didn't let us set NV12 output format. Not sure as to why | |
588 // this can happen. Give up in disgust. | |
589 NOTREACHED() << "Failed to set decoder output media type to NV12"; | |
590 state_ = kStopped; | |
591 } else { | |
592 DVLOG(1) << "Received output format change from the decoder." | |
593 " Recursively invoking DoDecode"; | |
594 DoDecode(); | |
595 } | |
596 return; | |
597 } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { | |
598 // No more output from the decoder. Stop playback. | |
599 state_ = kStopped; | |
600 return; | |
601 } else { | |
602 NOTREACHED() << "Unhandled error in DoDecode()"; | |
603 return; | |
604 } | |
605 } | |
606 TRACE_EVENT_END_ETW("DXVAVideoDecodeAccelerator.Decoding", this, ""); | |
607 | |
608 TRACE_COUNTER1("DXVA Decoding", "TotalPacketsBeforeDecode", | |
609 inputs_before_decode_); | |
610 | |
611 inputs_before_decode_ = 0; | |
612 | |
613 RETURN_AND_NOTIFY_ON_FAILURE(ProcessOutputSample(output_data_buffer.pSample), | |
614 "Failed to process output sample.", PLATFORM_FAILURE,); | |
615 | |
616 state_ = kNormal; | |
617 } | |
618 | |
619 bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) { | |
620 RETURN_ON_FAILURE(sample, "Decode succeeded with NULL output sample", false); | |
621 | |
622 base::win::ScopedComPtr<IMFSample> output_sample; | |
623 output_sample.Attach(sample); | |
624 | |
625 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | |
626 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive()); | |
627 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false); | |
628 | |
629 base::win::ScopedComPtr<IDirect3DSurface9> surface; | |
630 hr = MFGetService(output_buffer, MR_BUFFER_SERVICE, | |
631 IID_PPV_ARGS(surface.Receive())); | |
632 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D surface from output sample", | |
633 false); | |
634 | |
635 D3DSURFACE_DESC surface_desc; | |
636 hr = surface->GetDesc(&surface_desc); | |
637 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); | |
638 | |
639 TRACE_EVENT_BEGIN_ETW("DXVAVideoDecodeAccelerator.SurfaceCreation", this, | |
640 ""); | |
641 // TODO(ananta) | |
642 // The code below may not be necessary once we have an ANGLE extension which | |
643 // allows us to pass the Direct 3D surface directly for rendering. | |
644 | |
645 // The decoded bits in the source direct 3d surface are in the YUV | |
646 // format. Angle does not support that. As a workaround we create an | |
647 // offscreen surface in the RGB format and copy the source surface | |
648 // to this surface. | |
649 base::win::ScopedComPtr<IDirect3DSurface9> dest_surface; | |
650 hr = device_->CreateOffscreenPlainSurface(surface_desc.Width, | |
651 surface_desc.Height, | |
652 D3DFMT_A8R8G8B8, | |
653 D3DPOOL_DEFAULT, | |
654 dest_surface.Receive(), | |
655 NULL); | |
656 RETURN_ON_HR_FAILURE(hr, "Failed to create offscreen surface", false); | |
657 | |
658 hr = D3DXLoadSurfaceFromSurface(dest_surface, NULL, NULL, surface, NULL, | |
659 NULL, D3DX_DEFAULT, 0); | |
660 RETURN_ON_HR_FAILURE(hr, "D3DXLoadSurfaceFromSurface failed", false); | |
661 | |
662 TRACE_EVENT_END_ETW("DXVAVideoDecodeAccelerator.SurfaceCreation", this, ""); | |
663 | |
664 pending_output_samples_.push_back( | |
665 PendingSampleInfo(last_input_buffer_id_, dest_surface)); | |
666 | |
667 // If we have available picture buffers to copy the output data then use the | |
668 // first one and then flag it as not being available for use. | |
669 if (output_picture_buffers_.size()) { | |
670 ProcessPendingSamples(); | |
671 return true; | |
672 } | |
673 if (pictures_requested_) { | |
674 DVLOG(1) << "Waiting for picture slots from the client."; | |
675 return true; | |
676 } | |
677 // Go ahead and request picture buffers. | |
678 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
679 &DXVAVideoDecodeAccelerator::RequestPictureBuffers, | |
680 this, surface_desc.Width, surface_desc.Height)); | |
681 | |
682 pictures_requested_ = true; | |
683 return true; | |
684 } | |
685 | |
686 bool DXVAVideoDecodeAccelerator::CopyOutputSampleDataToPictureBuffer( | |
687 IDirect3DSurface9* dest_surface, media::PictureBuffer picture_buffer, | |
688 int input_buffer_id) { | |
689 DCHECK(dest_surface); | |
690 | |
691 D3DSURFACE_DESC surface_desc; | |
692 HRESULT hr = dest_surface->GetDesc(&surface_desc); | |
693 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); | |
694 | |
695 scoped_array<char> bits; | |
696 RETURN_ON_FAILURE(GetBitmapFromSurface(dest_surface, &bits), | |
697 "Failed to get bitmap from surface for rendering", false); | |
698 | |
699 // This function currently executes in the context of IPC handlers in the | |
700 // GPU process which ensures that there is always a OpenGL context. | |
701 GLint current_texture = 0; | |
702 glGetIntegerv(GL_TEXTURE_BINDING_2D, ¤t_texture); | |
703 | |
704 glBindTexture(GL_TEXTURE_2D, picture_buffer.texture_id()); | |
705 glTexImage2D(GL_TEXTURE_2D, 0, GL_BGRA_EXT, surface_desc.Width, | |
706 surface_desc.Height, 0, GL_BGRA_EXT, GL_UNSIGNED_BYTE, | |
707 reinterpret_cast<GLvoid*>(bits.get())); | |
708 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); | |
709 | |
710 glBindTexture(GL_TEXTURE_2D, current_texture); | |
711 | |
712 media::Picture output_picture(picture_buffer.id(), input_buffer_id); | |
713 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
714 &DXVAVideoDecodeAccelerator::NotifyPictureReady, this, output_picture)); | |
715 return true; | |
716 } | |
717 | |
718 void DXVAVideoDecodeAccelerator::ProcessPendingSamples() { | |
719 if (pending_output_samples_.empty()) | |
720 return; | |
721 | |
722 OutputBuffers::iterator index; | |
723 | |
724 for (index = output_picture_buffers_.begin(); | |
725 index != output_picture_buffers_.end() && | |
726 !pending_output_samples_.empty(); | |
727 ++index) { | |
728 if (index->second.available) { | |
729 PendingSampleInfo sample_info = pending_output_samples_.front(); | |
730 | |
731 CopyOutputSampleDataToPictureBuffer(sample_info.dest_surface, | |
732 index->second.picture_buffer, | |
733 sample_info.input_buffer_id); | |
734 index->second.available = false; | |
735 pending_output_samples_.pop_front(); | |
736 } | |
737 } | |
738 } | |
739 | |
740 void DXVAVideoDecodeAccelerator::ClearState() { | |
741 last_input_buffer_id_ = -1; | |
742 output_picture_buffers_.clear(); | |
743 pending_output_samples_.clear(); | |
744 } | |
745 | |
746 void DXVAVideoDecodeAccelerator::StopOnError( | |
747 media::VideoDecodeAccelerator::Error error) { | |
748 DCHECK(CalledOnValidThread()); | |
749 | |
750 if (client_) | |
751 client_->NotifyError(error); | |
752 client_ = NULL; | |
753 | |
754 if (state_ != kUninitialized) { | |
755 Invalidate(); | |
756 } | |
757 } | |
758 | |
759 bool DXVAVideoDecodeAccelerator::GetBitmapFromSurface( | |
760 IDirect3DSurface9* surface, | |
761 scoped_array<char>* bits) { | |
762 // Get the currently loaded bitmap from the DC. | |
763 HDC hdc = NULL; | |
764 HRESULT hr = surface->GetDC(&hdc); | |
765 RETURN_ON_HR_FAILURE(hr, "Failed to get HDC from surface", false); | |
766 | |
767 HBITMAP bitmap = | |
768 reinterpret_cast<HBITMAP>(GetCurrentObject(hdc, OBJ_BITMAP)); | |
769 if (!bitmap) { | |
770 NOTREACHED() << "Failed to get bitmap from DC"; | |
771 surface->ReleaseDC(hdc); | |
772 return false; | |
773 } | |
774 // TODO(ananta) | |
775 // The code below may not be necessary once we have an ANGLE extension which | |
776 // allows us to pass the Direct 3D surface directly for rendering. | |
777 // The Device dependent bitmap is upside down for OpenGL. We convert the | |
778 // bitmap to a DIB and render it on the texture instead. | |
779 BITMAP bitmap_basic_info = {0}; | |
780 if (!GetObject(bitmap, sizeof(BITMAP), &bitmap_basic_info)) { | |
781 NOTREACHED() << "Failed to read bitmap info"; | |
782 surface->ReleaseDC(hdc); | |
783 return false; | |
784 } | |
785 BITMAPINFO bitmap_info = {0}; | |
786 bitmap_info.bmiHeader.biSize = sizeof(BITMAPINFOHEADER); | |
787 bitmap_info.bmiHeader.biWidth = bitmap_basic_info.bmWidth; | |
788 bitmap_info.bmiHeader.biHeight = bitmap_basic_info.bmHeight; | |
789 bitmap_info.bmiHeader.biPlanes = 1; | |
790 bitmap_info.bmiHeader.biBitCount = bitmap_basic_info.bmBitsPixel; | |
791 bitmap_info.bmiHeader.biCompression = BI_RGB; | |
792 bitmap_info.bmiHeader.biSizeImage = 0; | |
793 bitmap_info.bmiHeader.biClrUsed = 0; | |
794 | |
795 int ret = GetDIBits(hdc, bitmap, 0, 0, NULL, &bitmap_info, DIB_RGB_COLORS); | |
796 if (!ret || bitmap_info.bmiHeader.biSizeImage <= 0) { | |
797 NOTREACHED() << "Failed to read bitmap size"; | |
798 surface->ReleaseDC(hdc); | |
799 return false; | |
800 } | |
801 | |
802 bits->reset(new char[bitmap_info.bmiHeader.biSizeImage]); | |
803 ret = GetDIBits(hdc, bitmap, 0, bitmap_basic_info.bmHeight, bits->get(), | |
804 &bitmap_info, DIB_RGB_COLORS); | |
805 if (!ret) { | |
806 NOTREACHED() << "Failed to retrieve bitmap bits."; | |
807 } | |
808 surface->ReleaseDC(hdc); | |
809 return !!ret; | |
810 } | |
811 | |
812 void DXVAVideoDecodeAccelerator::Invalidate() { | |
813 if (state_ == kUninitialized) | |
814 return; | |
815 ClearState(); | |
816 decoder_.Release(); | |
817 device_.Release(); | |
818 device_manager_.Release(); | |
819 MFShutdown(); | |
820 state_ = kUninitialized; | |
821 ::DestroyWindow(device_window_); | |
822 device_window_ = NULL; | |
823 } | |
824 | |
825 void DXVAVideoDecodeAccelerator::NotifyInitializeDone() { | |
826 if (client_) | |
827 client_->NotifyInitializeDone(); | |
828 } | |
829 | |
830 void DXVAVideoDecodeAccelerator::NotifyInputBufferRead(int input_buffer_id) { | |
831 if (client_) | |
832 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); | |
833 } | |
834 | |
835 void DXVAVideoDecodeAccelerator::NotifyFlushDone() { | |
836 if (client_) | |
837 client_->NotifyFlushDone(); | |
838 } | |
839 | |
840 void DXVAVideoDecodeAccelerator::NotifyResetDone() { | |
841 if (client_) | |
842 client_->NotifyResetDone(); | |
843 } | |
844 | |
845 void DXVAVideoDecodeAccelerator::RequestPictureBuffers(int width, int height) { | |
846 // This task could execute after the decoder has been torn down. | |
847 if (state_ != kUninitialized && client_) { | |
848 client_->ProvidePictureBuffers(kNumPictureBuffers, | |
849 gfx::Size(width, height)); | |
850 } | |
851 } | |
852 | |
853 void DXVAVideoDecodeAccelerator::NotifyPictureReady( | |
854 const media::Picture& picture) { | |
855 // This task could execute after the decoder has been torn down. | |
856 if (state_ != kUninitialized && client_) | |
857 client_->PictureReady(picture); | |
858 } | |
OLD | NEW |