Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(255)

Side by Side Diff: media/mf/mft_h264_decoder.cc

Issue 3156046: Changed mft_h264_decoder's API to match with video_decode_engine.h. Also chan... (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src/
Patch Set: Created 10 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2010 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2010 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/mf/mft_h264_decoder.h" 5 #include "build/build_config.h" // For OS_WIN.
6 6
7 #include <algorithm> 7 #if defined(OS_WIN)
8 #include <string>
9 8
10 #include <d3d9.h> 9 #include <d3d9.h>
10 #include <dxva2api.h>
11 #include <evr.h> 11 #include <evr.h>
12 #include <initguid.h> 12 #include <initguid.h>
13 #include <mfapi.h> 13 #include <mfapi.h>
14 #include <mferror.h> 14 #include <mferror.h>
15 #include <mfidl.h>
16 #include <shlwapi.h>
17 #include <wmcodecdsp.h> 15 #include <wmcodecdsp.h>
18 16
19 #include "base/callback.h" 17 #include "base/time.h"
20 #include "base/logging.h"
21 #include "base/message_loop.h" 18 #include "base/message_loop.h"
22 #include "base/scoped_comptr_win.h" 19 #include "media/mf/mft_h264_decoder.h"
23 #include "media/base/data_buffer.h"
24 #include "media/base/video_frame.h"
25 20
21 #pragma comment(lib, "delayimp")
Alpha Left Google 2010/08/24 23:03:33 What is this new lib for?
imcheng 2010/08/24 23:40:51 Not sure why it was there. Removed it.
22 #pragma comment(lib, "dxva2.lib")
26 #pragma comment(lib, "d3d9.lib") 23 #pragma comment(lib, "d3d9.lib")
27 #pragma comment(lib, "dxva2.lib") 24 #pragma comment(lib, "mf.lib")
28 #pragma comment(lib, "evr.lib")
29 #pragma comment(lib, "mfuuid.lib")
30 #pragma comment(lib, "mfplat.lib") 25 #pragma comment(lib, "mfplat.lib")
26 #pragma comment(lib, "strmiids.lib")
Alpha Left Google 2010/08/24 23:03:33 What is this new lib for?
imcheng 2010/08/24 23:40:51 Needed for MR_BUFFER_SERVICE (getting d3d surface
31 27
32 namespace media { 28 namespace {
33
34 // Returns Media Foundation's H.264 decoder as an MFT, or NULL if not found
35 // (e.g. Not using Windows 7)
36 static IMFTransform* GetH264Decoder() {
37 // Use __uuidof() to avoid linking to a library just for the CLSID.
38 IMFTransform* dec;
39 HRESULT hr = CoCreateInstance(__uuidof(CMSH264DecoderMFT), NULL,
40 CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&dec));
41 if (FAILED(hr)) {
42 LOG(ERROR) << "CoCreateInstance failed " << std::hex << std::showbase << hr;
43 return NULL;
44 }
45 return dec;
46 }
47 29
48 // Creates an empty Media Foundation sample with no buffers. 30 // Creates an empty Media Foundation sample with no buffers.
49 static IMFSample* CreateEmptySample() { 31 static IMFSample* CreateEmptySample() {
50 HRESULT hr; 32 HRESULT hr;
51 ScopedComPtr<IMFSample> sample; 33 ScopedComPtr<IMFSample> sample;
52 hr = MFCreateSample(sample.Receive()); 34 hr = MFCreateSample(sample.Receive());
53 if (FAILED(hr)) { 35 if (FAILED(hr)) {
54 LOG(ERROR) << "Unable to create an empty sample"; 36 LOG(ERROR) << "Unable to create an empty sample";
55 return NULL; 37 return NULL;
56 } 38 }
57 return sample.Detach(); 39 return sample.Detach();
58 } 40 }
59 41
60 // Creates a Media Foundation sample with one buffer of length |buffer_length| 42 // Creates a Media Foundation sample with one buffer of length |buffer_length|
61 // on a |align|-byte boundary. Alignment must be a perfect power of 2 or 0. 43 // on a |align|-byte boundary. Alignment must be a perfect power of 2 or 0.
62 // If |align| is 0, then no alignment is specified. 44 // If |align| is 0, then no alignment is specified.
63 static IMFSample* CreateEmptySampleWithBuffer(int buffer_length, int align) { 45 static IMFSample* CreateEmptySampleWithBuffer(int buffer_length, int align) {
64 CHECK_GT(buffer_length, 0); 46 CHECK_GT(buffer_length, 0);
65 ScopedComPtr<IMFSample> sample; 47 ScopedComPtr<IMFSample> sample;
66 sample.Attach(CreateEmptySample()); 48 sample.Attach(CreateEmptySample());
67 if (!sample.get()) 49 if (!sample.get())
68 return NULL; 50 return NULL;
69 ScopedComPtr<IMFMediaBuffer> buffer; 51 ScopedComPtr<IMFMediaBuffer> buffer;
70 HRESULT hr; 52 HRESULT hr;
71 if (align == 0) { 53 if (align == 0) {
72 // Note that MFCreateMemoryBuffer is same as MFCreateAlignedMemoryBuffer 54 // Note that MFCreateMemoryBuffer is same as MFCreateAlignedMemoryBuffer
73 // with the align argument being 0. 55 // with the align argument being 0.
74 hr = MFCreateMemoryBuffer(buffer_length, buffer.Receive()); 56 hr = MFCreateMemoryBuffer(buffer_length, buffer.Receive());
75 } else { 57 } else {
76 hr = MFCreateAlignedMemoryBuffer(buffer_length, align-1, buffer.Receive()); 58 hr = MFCreateAlignedMemoryBuffer(buffer_length,
59 align - 1,
60 buffer.Receive());
77 } 61 }
78 if (FAILED(hr)) { 62 if (FAILED(hr)) {
79 LOG(ERROR) << "Unable to create an empty buffer"; 63 LOG(ERROR) << "Unable to create an empty buffer";
80 return NULL; 64 return NULL;
81 } 65 }
82 hr = sample->AddBuffer(buffer.get()); 66 hr = sample->AddBuffer(buffer.get());
83 if (FAILED(hr)) { 67 if (FAILED(hr)) {
84 LOG(ERROR) << "Failed to add empty buffer to sample"; 68 LOG(ERROR) << "Failed to add empty buffer to sample";
85 return NULL; 69 return NULL;
86 } 70 }
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
140 CHECK(SUCCEEDED(buffer->Unlock())); 124 CHECK(SUCCEEDED(buffer->Unlock()));
141 hr = buffer->SetCurrentLength(size); 125 hr = buffer->SetCurrentLength(size);
142 if (FAILED(hr)) { 126 if (FAILED(hr)) {
143 LOG(ERROR) << "Failed to set current length to " << size; 127 LOG(ERROR) << "Failed to set current length to " << size;
144 return NULL; 128 return NULL;
145 } 129 }
146 LOG(INFO) << __FUNCTION__ << " wrote " << size << " bytes into input sample"; 130 LOG(INFO) << __FUNCTION__ << " wrote " << size << " bytes into input sample";
147 return sample.Detach(); 131 return sample.Detach();
148 } 132 }
149 133
150 // Public methods 134 } // namespace
135
136 namespace media {
137
138 // public methods
151 139
152 MftH264Decoder::MftH264Decoder(bool use_dxva) 140 MftH264Decoder::MftH264Decoder(bool use_dxva)
153 : read_input_callback_(NULL), 141 : use_dxva_(use_dxva),
154 output_avail_callback_(NULL), 142 d3d9_(NULL),
155 output_error_callback_(NULL), 143 device_(NULL),
144 device_manager_(NULL),
145 device_window_(NULL),
156 decoder_(NULL), 146 decoder_(NULL),
157 initialized_(false), 147 input_stream_info_(),
158 use_dxva_(use_dxva), 148 output_stream_info_(),
159 drain_message_sent_(false), 149 state_(kUninitialized),
160 next_frame_discontinuous_(false), 150 event_handler_(NULL) {
161 in_buffer_size_(0), 151 memset(&config_, 0, sizeof(config_));
162 in_buffer_alignment_(0), 152 memset(&info_, 0, sizeof(info_));
163 out_buffer_size_(0),
164 out_buffer_alignment_(0),
165 frames_read_(0),
166 frames_decoded_(0),
167 width_(0),
168 height_(0),
169 stride_(0),
170 output_format_(use_dxva ? MFVideoFormat_NV12 : MFVideoFormat_YV12) {
171 } 153 }
172 154
173 MftH264Decoder::~MftH264Decoder() { 155 MftH264Decoder::~MftH264Decoder() {
174 // |decoder_| has to be destroyed before the library uninitialization.
175 if (decoder_)
176 decoder_->Release();
177 if (FAILED(MFShutdown())) {
178 LOG(WARNING) << "Warning: MF failed to shutdown";
179 }
180 CoUninitialize();
181 } 156 }
182 157
183 bool MftH264Decoder::Init(IDirect3DDeviceManager9* dev_manager, 158 void MftH264Decoder::Initialize(
184 int frame_rate_num, int frame_rate_denom, 159 MessageLoop* message_loop,
185 int width, int height, 160 VideoDecodeEngine::EventHandler* event_handler,
186 int aspect_num, int aspect_denom, 161 const VideoCodecConfig& config) {
187 ReadInputCallback* read_input_cb, 162 LOG(INFO) << "MftH264Decoder::Initialize";
188 OutputReadyCallback* output_avail_cb, 163 if (state_ != kUninitialized) {
189 OutputErrorCallback* output_error_cb) { 164 LOG(ERROR) << "Initialize: invalid state";
190 if (initialized_) 165 return;
191 return true;
192 if (!read_input_cb || !output_avail_cb || !output_error_cb) {
193 LOG(ERROR) << "Callbacks missing in Init";
194 return false;
195 } 166 }
196 read_input_callback_.reset(read_input_cb); 167 if (!message_loop || !event_handler) {
197 output_avail_callback_.reset(output_avail_cb); 168 LOG(ERROR) << "MftH264Decoder::Initialize: parameters cannot be NULL";
198 output_error_callback_.reset(output_error_cb); 169 return;
199 if (!InitComMfLibraries()) 170 }
200 return false;
201 if (!InitDecoder(dev_manager, frame_rate_num, frame_rate_denom,
202 width, height, aspect_num, aspect_denom))
203 return false;
204 if (!GetStreamsInfoAndBufferReqs())
205 return false;
206 if (!SendStartMessage())
207 return false;
208 initialized_ = true;
209 return true;
210 }
211 171
212 static const char* const ProcessOutputStatusToCString(HRESULT hr) { 172 config_ = config;
213 if (hr == MF_E_TRANSFORM_STREAM_CHANGE) 173 event_handler_ = event_handler;
214 return "media stream change occurred, need to set output type";
215 if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT)
216 return "decoder needs more samples";
217 else
218 return "unhandled error from ProcessOutput";
219 }
220 174
221 void MftH264Decoder::GetOutput() { 175 info_.provides_buffers_ = false;
222 CHECK(initialized_);
223 176
224 ScopedComPtr<IMFSample> output_sample; 177 // TODO(jiesun): Actually it is more likely an NV12 D3DSuface9.
225 if (!use_dxva_) { 178 // Until we had hardware composition working.
226 // If DXVA is enabled, the decoder will allocate the sample for us. 179 if (use_dxva_) {
227 output_sample.Attach(CreateEmptySampleWithBuffer(out_buffer_size_, 180 info_.stream_info_.surface_format_ = VideoFrame::YV12;
228 out_buffer_alignment_)); 181 info_.stream_info_.surface_type_ = VideoFrame::TYPE_SYSTEM_MEMORY;
229 if (!output_sample.get()) { 182 } else {
230 LOG(ERROR) << "GetSample: failed to create empty output sample"; 183 info_.stream_info_.surface_format_ = VideoFrame::YV12;
231 output_error_callback_->Run(kNoMemory); 184 info_.stream_info_.surface_type_ = VideoFrame::TYPE_SYSTEM_MEMORY;
232 return;
233 }
234 } 185 }
235 MFT_OUTPUT_DATA_BUFFER output_data_buffer;
236 HRESULT hr;
237 DWORD status;
238 for (;;) {
239 output_data_buffer.dwStreamID = 0;
240 output_data_buffer.pSample = output_sample.get();
241 output_data_buffer.dwStatus = 0;
242 output_data_buffer.pEvents = NULL;
243 hr = decoder_->ProcessOutput(0, // No flags
244 1, // # of out streams to pull from
245 &output_data_buffer,
246 &status);
247 IMFCollection* events = output_data_buffer.pEvents;
248 if (events) {
249 LOG(INFO) << "Got events from ProcessOuput, but discarding";
250 events->Release();
251 }
252 if (FAILED(hr)) {
253 LOG(INFO) << "ProcessOutput failed with status " << std::hex << hr
254 << ", meaning..." << ProcessOutputStatusToCString(hr);
255 if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
256 if (!SetDecoderOutputMediaType(output_format_)) {
257 LOG(ERROR) << "Failed to reset output type";
258 output_error_callback_->Run(kResetOutputStreamFailed);
259 return;
260 } else {
261 LOG(INFO) << "Reset output type done";
262 continue;
263 }
264 } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
265 // If we have read everything then we should've sent a drain message
266 // to the MFT. If the drain message is sent but it doesn't give out
267 // anymore output then we know the decoder has processed everything.
268 if (drain_message_sent_) {
269 LOG(INFO) << "Drain message was already sent + no output => done";
270 output_error_callback_->Run(kNoMoreOutput);
271 return;
272 } else {
273 if (!ReadInput()) {
274 LOG(INFO) << "Failed to read/process input. Sending drain message";
275 if (!SendEndOfStreamMessage() || !SendDrainMessage()) {
276 LOG(ERROR) << "Failed to send drain message";
277 output_error_callback_->Run(kNoMoreOutput);
278 return;
279 }
280 }
281 continue;
282 }
283 } else {
284 output_error_callback_->Run(kUnspecifiedError);
285 return;
286 }
287 } else {
288 // A decoded sample was successfully obtained.
289 LOG(INFO) << "Got a decoded sample from decoder";
290 if (use_dxva_) {
291 // If dxva is enabled, we did not provide a sample to ProcessOutput,
292 // i.e. output_sample is NULL.
293 output_sample.Attach(output_data_buffer.pSample);
294 if (!output_sample.get()) {
295 LOG(ERROR) << "Output sample using DXVA is NULL - ProcessOutput did "
296 << "not provide it!";
297 output_error_callback_->Run(kOutputSampleError);
298 return;
299 }
300 }
301 int64 timestamp, duration;
302 hr = output_sample->GetSampleTime(&timestamp);
303 hr = output_sample->GetSampleDuration(&duration);
304 if (FAILED(hr)) {
305 LOG(ERROR) << "Failed to get sample duration or timestamp "
306 << std::hex << hr;
307 output_error_callback_->Run(kOutputSampleError);
308 return;
309 }
310 186
311 // The duration and timestamps are in 100-ns units, so divide by 10 187 // codec_info.stream_info_.surface_width_/height_ are initialized
312 // to convert to microseconds. 188 // in InitInternal().
313 timestamp /= 10; 189 info_.success_ = InitInternal();
314 duration /= 10; 190 if (info_.success_) {
315 191 state_ = kNormal;
316 // Sanity checks for checking if there is really something in the sample. 192 event_handler_->OnInitializeComplete(info_);
317 DWORD buf_count; 193 } else {
318 hr = output_sample->GetBufferCount(&buf_count); 194 LOG(ERROR) << "MftH264Decoder::Initialize failed";
319 if (FAILED(hr)) {
320 LOG(ERROR) << "Failed to get buff count, hr = " << std::hex << hr;
321 output_error_callback_->Run(kOutputSampleError);
322 return;
323 }
324 if (buf_count == 0) {
325 LOG(ERROR) << "buf_count is 0, dropping sample";
326 output_error_callback_->Run(kOutputSampleError);
327 return;
328 }
329 ScopedComPtr<IMFMediaBuffer> out_buffer;
330 hr = output_sample->GetBufferByIndex(0, out_buffer.Receive());
331 if (FAILED(hr)) {
332 LOG(ERROR) << "Failed to get decoded output buffer";
333 output_error_callback_->Run(kOutputSampleError);
334 return;
335 }
336
337 // To obtain the data, the caller should call the Lock() method instead
338 // of using the data field.
339 // In NV12, there are only 2 planes - the Y plane, and the interleaved UV
340 // plane. Both have the same strides.
341 uint8* null_data[3] = { NULL, NULL, NULL };
342 int32 uv_stride = output_format_ == MFVideoFormat_NV12 ? stride_
343 : stride_ / 2;
344 int32 strides[3] = { stride_, uv_stride, uv_stride };
345 scoped_refptr<VideoFrame> decoded_frame;
346 VideoFrame::CreateFrameExternal(
347 use_dxva_ ? VideoFrame::TYPE_DIRECT3DSURFACE :
348 VideoFrame::TYPE_MFBUFFER,
349 output_format_ == MFVideoFormat_NV12 ? VideoFrame::NV12
350 : VideoFrame::YV12,
351 width_,
352 height_,
353 2,
354 null_data,
355 strides,
356 base::TimeDelta::FromMicroseconds(timestamp),
357 base::TimeDelta::FromMicroseconds(duration),
358 out_buffer.Detach(),
359 &decoded_frame);
360 CHECK(decoded_frame.get());
361 frames_decoded_++;
362 output_avail_callback_->Run(decoded_frame);
363 return;
364 }
365 } 195 }
366 } 196 }
367 197
368 bool MftH264Decoder::Flush() { 198 void MftH264Decoder::Uninitialize() {
369 CHECK(initialized_); 199 LOG(INFO) << "MftH264Decoder::Uninitialize";
370 HRESULT hr = decoder_->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, NULL); 200 if (state_ == kUninitialized) {
371 if (FAILED(hr)) { 201 LOG(ERROR) << "Uninitialize: invalid state";
372 LOG(ERROR) << "Failed to send the flush message to decoder"; 202 return;
373 return false;
374 } 203 }
375 next_frame_discontinuous_ = true; 204
376 return true; 205 // TODO(imcheng):
206 // Cannot shutdown COM libraries here because the COM objects still needs
207 // to be Release()'ed. We can explicitly release them here, or move the
208 // uninitialize to GpuVideoService...
209 if (device_window_)
210 DestroyWindow(device_window_);
211 decoder_.Release();
212 device_manager_.Release();
213 device_.Release();
214 d3d9_.Release();
215 ShutdownComLibraries();
216 state_ = kUninitialized;
217 event_handler_->OnUninitializeComplete();
377 } 218 }
378 219
379 // Private methods 220 void MftH264Decoder::Flush() {
221 LOG(INFO) << "MftH264Decoder::Flush";
222 if (state_ != kNormal) {
223 LOG(ERROR) << "Flush: invalid state";
224 return;
225 }
226 state_ = kFlushing;
227 if (!SendMFTMessage(MFT_MESSAGE_COMMAND_FLUSH)) {
228 LOG(WARNING) << "MftH264Decoder::Flush failed to send message";
229 }
230 state_ = kNormal;
231 event_handler_->OnFlushComplete();
232 }
380 233
381 bool MftH264Decoder::InitComMfLibraries() { 234 void MftH264Decoder::Seek() {
235 if (state_ != kNormal) {
236 LOG(ERROR) << "Seek: invalid state";
237 return;
238 }
239 LOG(INFO) << "MftH264Decoder::Seek";
240 // Seek not implemented.
241 event_handler_->OnSeekComplete();
242 }
243
244 void MftH264Decoder::EmptyThisBuffer(scoped_refptr<Buffer> buffer) {
245 LOG(INFO) << "MftH264Decoder::EmptyThisBuffer";
246 if (state_ == kUninitialized) {
247 LOG(ERROR) << "EmptyThisBuffer: invalid state";
248 }
249 ScopedComPtr<IMFSample> sample;
250 if (!buffer->IsEndOfStream()) {
251 sample.Attach(
252 CreateInputSample(buffer->GetData(),
253 buffer->GetDataSize(),
254 buffer->GetTimestamp().InMicroseconds() * 10,
255 buffer->GetDuration().InMicroseconds() * 10,
256 input_stream_info_.cbSize,
257 input_stream_info_.cbAlignment));
258 if (!sample.get()) {
259 LOG(ERROR) << "Failed to create an input sample";
260 } else {
261 if (FAILED(decoder_->ProcessInput(0, sample.get(), 0))) {
262 event_handler_->OnError();
263 }
264 }
265 } else {
266 if (state_ != MftH264Decoder::kEosDrain) {
267 // End of stream, send drain messages.
268 if (!SendMFTMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM) ||
269 !SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN)) {
270 LOG(ERROR) << "Failed to send EOS / drain messages to MFT";
271 event_handler_->OnError();
272 } else {
273 state_ = MftH264Decoder::kEosDrain;
274 }
275 }
276 }
277 DoDecode();
278 }
279
280 void MftH264Decoder::FillThisBuffer(scoped_refptr<VideoFrame> frame) {
281 LOG(INFO) << "MftH264Decoder::FillThisBuffer";
282 if (state_ == kUninitialized) {
283 LOG(ERROR) << "FillThisBuffer: invalid state";
284 return;
285 }
286 scoped_refptr<Buffer> buffer;
287 event_handler_->OnEmptyBufferCallback(buffer);
288 }
289
290 // private methods
291
292 // static
293 bool MftH264Decoder::StartupComLibraries() {
382 HRESULT hr; 294 HRESULT hr;
383 hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE); 295 hr = CoInitializeEx(NULL,
296 COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE);
384 if (FAILED(hr)) { 297 if (FAILED(hr)) {
385 LOG(ERROR) << "CoInit fail"; 298 LOG(ERROR) << "CoInit fail";
386 return false; 299 return false;
387 } 300 }
301
388 hr = MFStartup(MF_VERSION, MFSTARTUP_FULL); 302 hr = MFStartup(MF_VERSION, MFSTARTUP_FULL);
389 if (FAILED(hr)) { 303 if (FAILED(hr)) {
390 LOG(ERROR) << "MFStartup fail"; 304 LOG(ERROR) << "MFStartup fail";
391 CoUninitialize(); 305 CoUninitialize();
392 return false; 306 return false;
393 } 307 }
394 return true; 308 return true;
395 } 309 }
396 310
397 bool MftH264Decoder::InitDecoder(IDirect3DDeviceManager9* dev_manager, 311 // static
398 int frame_rate_num, int frame_rate_denom, 312 void MftH264Decoder::ShutdownComLibraries() {
399 int width, int height, 313 HRESULT hr;
400 int aspect_num, int aspect_denom) { 314 hr = MFShutdown();
401 decoder_ = GetH264Decoder(); 315 if (FAILED(hr)) {
402 if (!decoder_) 316 LOG(WARNING) << "Warning: MF failed to shutdown";
317 }
318 CoUninitialize();
319 }
320
321 bool MftH264Decoder::CreateD3DDevManager() {
322 d3d9_.Attach(Direct3DCreate9(D3D_SDK_VERSION));
323 if (d3d9_.get() == NULL) {
324 LOG(ERROR) << "Failed to create D3D9";
403 return false; 325 return false;
404 if (use_dxva_ && !SetDecoderD3d9Manager(dev_manager)) 326 }
327 static const TCHAR windowName[] = TEXT("MFT Decoder Hidden Window");
328 static const TCHAR className[] = TEXT("STATIC");
329 device_window_ = CreateWindowEx(WS_EX_NOACTIVATE,
330 className,
331 windowName,
332 WS_DISABLED | WS_POPUP,
333 0, 0, 1, 1,
334 HWND_MESSAGE,
335 NULL,
336 GetModuleHandle(NULL),
337 NULL);
338 CHECK(device_window_);
339
340 D3DPRESENT_PARAMETERS present_params = {0};
341 present_params.BackBufferWidth = 1;
342 present_params.BackBufferHeight = 1;
343 present_params.BackBufferFormat = D3DFMT_UNKNOWN;
344 present_params.BackBufferCount = 1;
345 present_params.SwapEffect = D3DSWAPEFFECT_DISCARD;
346 present_params.hDeviceWindow = device_window_;
347 present_params.Windowed = TRUE;
348 present_params.Flags = D3DPRESENTFLAG_VIDEO;
349 present_params.FullScreen_RefreshRateInHz = 0;
350 present_params.PresentationInterval = 0;
351
352 // D3DCREATE_HARDWARE_VERTEXPROCESSING specifies hardware vertex processing.
353 // (Is it even needed for just video decoding?)
354 HRESULT hr = d3d9_->CreateDevice(D3DADAPTER_DEFAULT,
355 D3DDEVTYPE_HAL,
356 device_window_,
357 D3DCREATE_HARDWARE_VERTEXPROCESSING,
358 &present_params,
359 device_.Receive());
360 if (FAILED(hr)) {
361 LOG(ERROR) << "Failed to create D3D Device";
405 return false; 362 return false;
406 if (!SetDecoderMediaTypes(frame_rate_num, frame_rate_denom, 363 }
407 width, height, 364
408 aspect_num, aspect_denom)) { 365 UINT dev_manager_reset_token = 0;
366 hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token,
367 device_manager_.Receive());
368 if (FAILED(hr)) {
369 LOG(ERROR) << "Couldn't create D3D Device manager";
370 return false;
371 }
372
373 hr = device_manager_->ResetDevice(device_.get(),
374 dev_manager_reset_token);
375 if (FAILED(hr)) {
376 LOG(ERROR) << "Failed to set device to device manager";
409 return false; 377 return false;
410 } 378 }
411 return true; 379 return true;
412 } 380 }
413 381
414 bool MftH264Decoder::SetDecoderD3d9Manager( 382 bool MftH264Decoder::InitInternal() {
415 IDirect3DDeviceManager9* dev_manager) { 383 if (!StartupComLibraries())
416 if (!use_dxva_) { 384 return false;
417 LOG(ERROR) << "SetDecoderD3d9Manager should only be called if DXVA is " 385 if (use_dxva_ && !CreateD3DDevManager())
418 << "enabled"; 386 return false;
387 if (!InitDecoder())
388 return false;
389 if (!GetStreamsInfoAndBufferReqs())
390 return false;
391 return SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING);
392 }
393
394 bool MftH264Decoder::InitDecoder() {
395 // TODO(jiesun): use MFEnum to get decoder CLSID.
396 HRESULT hr = CoCreateInstance(__uuidof(CMSH264DecoderMFT),
397 NULL,
398 CLSCTX_INPROC_SERVER,
399 __uuidof(IMFTransform),
400 reinterpret_cast<void**>(decoder_.Receive()));
401 if (FAILED(hr) || !decoder_.get()) {
402 LOG(ERROR) << "CoCreateInstance failed " << std::hex << std::showbase << hr;
419 return false; 403 return false;
420 } 404 }
421 if (!dev_manager) { 405
422 LOG(ERROR) << "dev_manager cannot be NULL"; 406 if (!CheckDecoderDxvaSupport())
407 return false;
408
409 if (use_dxva_) {
410 hr = decoder_->ProcessMessage(
411 MFT_MESSAGE_SET_D3D_MANAGER,
412 reinterpret_cast<ULONG_PTR>(device_manager_.get()));
413 if (FAILED(hr)) {
414 LOG(ERROR) << "Failed to set D3D9 device to decoder";
415 return false;
416 }
417 }
418
419 return SetDecoderMediaTypes();
420 }
421
422 bool MftH264Decoder::CheckDecoderDxvaSupport() {
423 ScopedComPtr<IMFAttributes> attributes;
424 HRESULT hr = decoder_->GetAttributes(attributes.Receive());
425 if (FAILED(hr)) {
426 LOG(ERROR) << "Unlock: Failed to get attributes, hr = "
427 << std::hex << std::showbase << hr;
423 return false; 428 return false;
424 } 429 }
425 HRESULT hr; 430
426 hr = decoder_->ProcessMessage(MFT_MESSAGE_SET_D3D_MANAGER, 431 UINT32 dxva;
427 reinterpret_cast<ULONG_PTR>(dev_manager)); 432 hr = attributes->GetUINT32(MF_SA_D3D_AWARE, &dxva);
428 if (FAILED(hr)) { 433 if (FAILED(hr) || !dxva) {
429 LOG(ERROR) << "Failed to set D3D9 device to decoder"; 434 LOG(ERROR) << "Failed to get DXVA attr, hr = "
435 << std::hex << std::showbase << hr
436 << "this might not be the right decoder.";
430 return false; 437 return false;
431 } 438 }
432 return true; 439 return true;
433 } 440 }
434 441
435 bool MftH264Decoder::SetDecoderMediaTypes(int frame_rate_num, 442 bool MftH264Decoder::SetDecoderMediaTypes() {
436 int frame_rate_denom, 443 if (!SetDecoderInputMediaType())
437 int width, int height,
438 int aspect_num, int aspect_denom) {
439 DCHECK(decoder_);
440 if (!SetDecoderInputMediaType(frame_rate_num, frame_rate_denom,
441 width, height,
442 aspect_num, aspect_denom))
443 return false; 444 return false;
444 if (!SetDecoderOutputMediaType(output_format_)) { 445 return SetDecoderOutputMediaType(use_dxva_ ? MFVideoFormat_NV12
445 return false; 446 : MFVideoFormat_YV12);
446 }
447 return true;
448 } 447 }
449 448
450 bool MftH264Decoder::SetDecoderInputMediaType(int frame_rate_num, 449 bool MftH264Decoder::SetDecoderInputMediaType() {
451 int frame_rate_denom,
452 int width, int height,
453 int aspect_num,
454 int aspect_denom) {
455 ScopedComPtr<IMFMediaType> media_type; 450 ScopedComPtr<IMFMediaType> media_type;
456 HRESULT hr; 451 HRESULT hr = MFCreateMediaType(media_type.Receive());
457 hr = MFCreateMediaType(media_type.Receive());
458 if (FAILED(hr)) { 452 if (FAILED(hr)) {
459 LOG(ERROR) << "Failed to create empty media type object"; 453 LOG(ERROR) << "Failed to create empty media type object";
460 return false; 454 return false;
461 } 455 }
456
462 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); 457 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
463 if (FAILED(hr)) { 458 if (FAILED(hr)) {
464 LOG(ERROR) << "SetGUID for major type failed"; 459 LOG(ERROR) << "SetGUID for major type failed";
465 return false; 460 return false;
466 } 461 }
462
467 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); 463 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
468 if (FAILED(hr)) { 464 if (FAILED(hr)) {
469 LOG(ERROR) << "SetGUID for subtype failed"; 465 LOG(ERROR) << "SetGUID for subtype failed";
470 return false; 466 return false;
471 } 467 }
472 468
473 // Provide additional info to the decoder to avoid a format change during
474 // streaming.
475 if (frame_rate_num > 0 && frame_rate_denom > 0) {
476 hr = MFSetAttributeRatio(media_type.get(), MF_MT_FRAME_RATE,
477 frame_rate_num, frame_rate_denom);
478 if (FAILED(hr)) {
479 LOG(ERROR) << "Failed to set frame rate";
480 return false;
481 }
482 }
483 if (width > 0 && height > 0) {
484 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, height);
485 if (FAILED(hr)) {
486 LOG(ERROR) << "Failed to set frame size";
487 return false;
488 }
489 }
490
491 // TODO(imcheng): Not sure about this, but this is the recommended value by
492 // MSDN.
493 hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE,
494 MFVideoInterlace_MixedInterlaceOrProgressive);
495 if (FAILED(hr)) {
496 LOG(ERROR) << "Failed to set interlace mode";
497 return false;
498 }
499 if (aspect_num > 0 && aspect_denom > 0) {
500 hr = MFSetAttributeRatio(media_type.get(), MF_MT_PIXEL_ASPECT_RATIO,
501 aspect_num, aspect_denom);
502 if (FAILED(hr)) {
503 LOG(ERROR) << "Failed to get aspect ratio";
504 return false;
505 }
506 }
507 hr = decoder_->SetInputType(0, media_type.get(), 0); // No flags 469 hr = decoder_->SetInputType(0, media_type.get(), 0); // No flags
508 if (FAILED(hr)) { 470 if (FAILED(hr)) {
509 LOG(ERROR) << "Failed to set decoder's input type"; 471 LOG(ERROR) << "Failed to set decoder's input type";
510 return false; 472 return false;
511 } 473 }
474
512 return true; 475 return true;
513 } 476 }
514 477
515 bool MftH264Decoder::SetDecoderOutputMediaType(const GUID subtype) { 478 bool MftH264Decoder::SetDecoderOutputMediaType(const GUID subtype) {
516 DWORD i = 0; 479 DWORD i = 0;
517 IMFMediaType* out_media_type; 480 IMFMediaType* out_media_type;
518 bool found = false; 481 bool found = false;
519 while (SUCCEEDED(decoder_->GetOutputAvailableType(0, i, &out_media_type))) { 482 while (SUCCEEDED(decoder_->GetOutputAvailableType(0, i, &out_media_type))) {
520 GUID out_subtype; 483 GUID out_subtype;
521 HRESULT hr; 484 HRESULT hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype);
522 hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype);
523 if (FAILED(hr)) { 485 if (FAILED(hr)) {
524 LOG(ERROR) << "Failed to GetGUID() on GetOutputAvailableType() " << i; 486 LOG(ERROR) << "Failed to GetGUID() on GetOutputAvailableType() " << i;
525 out_media_type->Release(); 487 out_media_type->Release();
526 continue; 488 continue;
527 } 489 }
528 if (out_subtype == subtype) { 490 if (out_subtype == subtype) {
529 LOG(INFO) << "|subtype| is at index "
530 << i << " in GetOutputAvailableType()";
531 hr = decoder_->SetOutputType(0, out_media_type, 0); // No flags 491 hr = decoder_->SetOutputType(0, out_media_type, 0); // No flags
532 hr = MFGetAttributeSize(out_media_type, MF_MT_FRAME_SIZE, 492 hr = MFGetAttributeSize(out_media_type, MF_MT_FRAME_SIZE,
533 reinterpret_cast<UINT32*>(&width_), 493 reinterpret_cast<UINT32*>(&info_.stream_info_.surface_width_),
534 reinterpret_cast<UINT32*>(&height_)); 494 reinterpret_cast<UINT32*>(&info_.stream_info_.surface_height_));
535 hr = MFGetStrideForBitmapInfoHeader(output_format_.Data1, 495 config_.width_ = info_.stream_info_.surface_width_;
536 width_, 496 config_.height_ = info_.stream_info_.surface_height_;
537 reinterpret_cast<LONG*>(&stride_));
538 if (FAILED(hr)) { 497 if (FAILED(hr)) {
539 LOG(ERROR) << "Failed to SetOutputType to |subtype| or obtain " 498 LOG(ERROR) << "Failed to SetOutputType to |subtype| or obtain "
540 << "width/height/stride " << std::hex << hr; 499 << "width/height " << std::hex << hr;
541 } else { 500 } else {
542 found = true;
543 out_media_type->Release(); 501 out_media_type->Release();
544 break; 502 return true;
545 } 503 }
546 } 504 }
547 i++; 505 i++;
548 out_media_type->Release(); 506 out_media_type->Release();
549 } 507 }
550 if (!found) { 508 return false;
551 LOG(ERROR) << "|subtype| was not found in GetOutputAvailableType()";
552 return false;
553 }
554 return true;
555 } 509 }
556 510
557 bool MftH264Decoder::SendStartMessage() { 511 bool MftH264Decoder::SendMFTMessage(MFT_MESSAGE_TYPE msg) {
558 HRESULT hr; 512 HRESULT hr = decoder_->ProcessMessage(msg, NULL);
559 hr = decoder_->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL); 513 return SUCCEEDED(hr);
560 if (FAILED(hr)) {
561 LOG(ERROR) << "Process start message failed, hr = "
562 << std::hex << std::showbase << hr;
563 return false;
564 } else {
565 LOG(INFO) << "Sent a message to decoder to indicate start of stream";
566 return true;
567 }
568 } 514 }
569 515
570 // Prints out info about the input/output streams, gets the minimum buffer sizes 516 // Prints out info about the input/output streams, gets the minimum buffer sizes
571 // for input and output samples. 517 // for input and output samples.
572 // The MFT will not allocate buffer for neither input nor output, so we have 518 // The MFT will not allocate buffer for neither input nor output, so we have
573 // to do it ourselves and make sure they're the correct size. 519 // to do it ourselves and make sure they're the correct size.
574 // Exception is when dxva is enabled, the decoder will allocate output. 520 // Exception is when dxva is enabled, the decoder will allocate output.
575 bool MftH264Decoder::GetStreamsInfoAndBufferReqs() { 521 bool MftH264Decoder::GetStreamsInfoAndBufferReqs() {
576 DCHECK(decoder_); 522 HRESULT hr = decoder_->GetInputStreamInfo(0, &input_stream_info_);
577 HRESULT hr;
578 MFT_INPUT_STREAM_INFO input_stream_info;
579 hr = decoder_->GetInputStreamInfo(0, &input_stream_info);
580 if (FAILED(hr)) { 523 if (FAILED(hr)) {
581 LOG(ERROR) << "Failed to get input stream info"; 524 LOG(ERROR) << "Failed to get input stream info";
582 return false; 525 return false;
583 } 526 }
584 LOG(INFO) << "Input stream info: "; 527 LOG(INFO) << "Input stream info: ";
585 LOG(INFO) << "Max latency: " << input_stream_info.hnsMaxLatency; 528 LOG(INFO) << "Max latency: " << input_stream_info_.hnsMaxLatency;
586 529
587 // There should be three flags, one for requiring a whole frame be in a 530 // There should be three flags, one for requiring a whole frame be in a
588 // single sample, one for requiring there be one buffer only in a single 531 // single sample, one for requiring there be one buffer only in a single
589 // sample, and one that specifies a fixed sample size. (as in cbSize) 532 // sample, and one that specifies a fixed sample size. (as in cbSize)
590 LOG(INFO) << "Flags: " 533 LOG(INFO) << "Flags: "
591 << std::hex << std::showbase << input_stream_info.dwFlags; 534 << std::hex << std::showbase << input_stream_info_.dwFlags;
592 CHECK_EQ(input_stream_info.dwFlags, 0x7u); 535 CHECK_EQ(input_stream_info_.dwFlags, 0x7u);
593 LOG(INFO) << "Min buffer size: " << input_stream_info.cbSize; 536 LOG(INFO) << "Min buffer size: " << input_stream_info_.cbSize;
594 LOG(INFO) << "Max lookahead: " << input_stream_info.cbMaxLookahead; 537 LOG(INFO) << "Max lookahead: " << input_stream_info_.cbMaxLookahead;
595 LOG(INFO) << "Alignment: " << input_stream_info.cbAlignment; 538 LOG(INFO) << "Alignment: " << input_stream_info_.cbAlignment;
596 in_buffer_alignment_ = input_stream_info.cbAlignment; 539
597 in_buffer_size_ = input_stream_info.cbSize; 540 hr = decoder_->GetOutputStreamInfo(0, &output_stream_info_);
598
599 MFT_OUTPUT_STREAM_INFO output_stream_info;
600 hr = decoder_->GetOutputStreamInfo(0, &output_stream_info);
601 if (FAILED(hr)) { 541 if (FAILED(hr)) {
602 LOG(ERROR) << "Failed to get output stream info"; 542 LOG(ERROR) << "Failed to get output stream info";
603 return false; 543 return false;
604 } 544 }
605 LOG(INFO) << "Output stream info: "; 545 LOG(INFO) << "Output stream info: ";
606
607 // The flags here should be the same and mean the same thing, except when 546 // The flags here should be the same and mean the same thing, except when
608 // DXVA is enabled, there is an extra 0x100 flag meaning decoder will 547 // DXVA is enabled, there is an extra 0x100 flag meaning decoder will
609 // allocate its own sample. 548 // allocate its own sample.
610 LOG(INFO) << "Flags: " 549 LOG(INFO) << "Flags: "
611 << std::hex << std::showbase << output_stream_info.dwFlags; 550 << std::hex << std::showbase << output_stream_info_.dwFlags;
612 CHECK_EQ(output_stream_info.dwFlags, use_dxva_ ? 0x107u : 0x7u); 551 CHECK_EQ(output_stream_info_.dwFlags, use_dxva_ ? 0x107u : 0x7u);
613 LOG(INFO) << "Min buffer size: " << output_stream_info.cbSize; 552 LOG(INFO) << "Min buffer size: " << output_stream_info_.cbSize;
614 LOG(INFO) << "Alignment: " << output_stream_info.cbAlignment; 553 LOG(INFO) << "Alignment: " << output_stream_info_.cbAlignment;
615 out_buffer_alignment_ = output_stream_info.cbAlignment;
616 out_buffer_size_ = output_stream_info.cbSize;
617 554
618 return true; 555 return true;
619 } 556 }
620 557
621 bool MftH264Decoder::ReadInput() { 558 bool MftH264Decoder::DoDecode() {
622 scoped_refptr<DataBuffer> input; 559 if (state_ != kNormal && state_ != kEosDrain) {
623 read_input_callback_->Run(&input); 560 LOG(ERROR) << "DoDecode: not in normal or drain state";
624 if (!input.get() || input->IsEndOfStream()) {
625 LOG(INFO) << "No more input";
626 return false; 561 return false;
562 }
563 scoped_refptr<VideoFrame> frame;
564 ScopedComPtr<IMFSample> output_sample;
565 if (!use_dxva_) {
566 output_sample.Attach(
567 CreateEmptySampleWithBuffer(output_stream_info_.cbSize,
568 output_stream_info_.cbAlignment));
569 if (!output_sample.get()) {
570 LOG(ERROR) << "GetSample: failed to create empty output sample";
571 event_handler_->OnError();
572 return false;
573 }
574 }
575 MFT_OUTPUT_DATA_BUFFER output_data_buffer;
576 memset(&output_data_buffer, 0, sizeof(output_data_buffer));
577 output_data_buffer.dwStreamID = 0;
578 output_data_buffer.pSample = output_sample;
579
580 DWORD status;
581 HRESULT hr = decoder_->ProcessOutput(0, // No flags
582 1, // # of out streams to pull from
583 &output_data_buffer,
584 &status);
585
586 IMFCollection* events = output_data_buffer.pEvents;
587 if (events != NULL) {
588 LOG(INFO) << "Got events from ProcessOuput, but discarding";
589 events->Release();
590 }
591
592 if (FAILED(hr)) {
593 if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
594 hr = SetDecoderOutputMediaType(use_dxva_ ? MFVideoFormat_NV12
595 : MFVideoFormat_YV12);
596 if (SUCCEEDED(hr)) {
597 event_handler_->OnFormatChange(info_.stream_info_);
598 return true;
599 } else {
600 event_handler_->OnError();
601 return false;
602 }
603 } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
604 if (state_ == kEosDrain) {
605 // No more output from the decoder. Notify EOS and stop playback.
606 scoped_refptr<VideoFrame> frame;
607 VideoFrame::CreateEmptyFrame(&frame);
608 event_handler_->OnFillBufferCallback(frame);
609 state_ = MftH264Decoder::kStopped;
610 return false;
611 }
612 return true;
613 } else {
614 LOG(ERROR) << "Unhandled error in DoDecode()";
615 state_ = MftH264Decoder::kStopped;
616 event_handler_->OnError();
617 return false;
618 }
619 }
620
621 // We succeeded in getting an output sample.
622 if (use_dxva_) {
623 // For DXVA we didn't provide the sample, i.e. output_sample was NULL.
624 output_sample.Attach(output_data_buffer.pSample);
625 }
626 if (!output_sample.get()) {
627 LOG(ERROR) << "ProcessOutput succeeded, but did not get a sample back";
628 event_handler_->OnError();
629 return true;
630 }
631
632 int64 timestamp = 0, duration = 0;
633 if (FAILED(output_sample->GetSampleTime(&timestamp)) ||
634 FAILED(output_sample->GetSampleDuration(&duration))) {
635 LOG(WARNING) << "Failed to get timestamp/duration from output";
636 }
637
638 // The duration and timestamps are in 100-ns units, so divide by 10
639 // to convert to microseconds.
640 timestamp /= 10;
641 duration /= 10;
642
643 // Sanity checks for checking if there is really something in the sample.
644 DWORD buf_count;
645 hr = output_sample->GetBufferCount(&buf_count);
646 if (FAILED(hr) || buf_count != 1) {
647 LOG(ERROR) << "Failed to get buffer count, or buffer count mismatch";
648 return true;
649 }
650
651 ScopedComPtr<IMFMediaBuffer> output_buffer;
652 hr = output_sample->GetBufferByIndex(0, output_buffer.Receive());
653 if (FAILED(hr)) {
654 LOG(ERROR) << "Failed to get buffer from sample";
655 return true;
656 }
657
658 VideoFrame::CreateFrame(info_.stream_info_.surface_format_,
659 info_.stream_info_.surface_width_,
660 info_.stream_info_.surface_height_,
661 base::TimeDelta::FromMicroseconds(timestamp),
662 base::TimeDelta::FromMicroseconds(duration),
663 &frame);
664 if (!frame.get()) {
665 LOG(ERROR) << "Failed to allocate video frame";
666 event_handler_->OnError();
667 return true;
668 }
669
670 if (use_dxva_) {
671 // temporary until we figure out how to send a D3D9 surface handle.
672 ScopedComPtr<IDirect3DSurface9> surface;
673 hr = MFGetService(output_buffer, MR_BUFFER_SERVICE,
674 IID_PPV_ARGS(surface.Receive()));
675 if (FAILED(hr))
676 return true;
677
678 // TODO(imcheng):
679 // This is causing some problems (LockRect does not work always).
680 // We won't need this when we figure out how to use the d3d
681 // surface directly.
682 // NV12 to YV12
683 D3DLOCKED_RECT d3dlocked_rect;
684 hr = surface->LockRect(&d3dlocked_rect, NULL, D3DLOCK_READONLY);
685 if (FAILED(hr)) {
686 LOG(ERROR) << "LockRect";
687 return true;
688 }
689 D3DSURFACE_DESC desc;
690 hr = surface->GetDesc(&desc);
691 if (FAILED(hr)) {
692 LOG(ERROR) << "GetDesc";
693 CHECK(SUCCEEDED(surface->UnlockRect()));
694 return true;
695 }
696
697 uint32 src_stride = d3dlocked_rect.Pitch;
698 uint32 dst_stride = config_.width_;
699 uint8* src_y = static_cast<uint8*>(d3dlocked_rect.pBits);
700 uint8* src_uv = src_y + src_stride * desc.Height;
701 uint8* dst_y = static_cast<uint8*>(frame->data(VideoFrame::kYPlane));
702 uint8* dst_u = static_cast<uint8*>(frame->data(VideoFrame::kVPlane));
703 uint8* dst_v = static_cast<uint8*>(frame->data(VideoFrame::kUPlane));
704
705 for (int y = 0; y < config_.height_; ++y) {
706 for (int x = 0; x < config_.width_; ++x) {
707 dst_y[x] = src_y[x];
708 if (!(y & 1)) {
709 if (x & 1)
710 dst_v[x>>1] = src_uv[x];
711 else
712 dst_u[x>>1] = src_uv[x];
713 }
714 }
715 dst_y += dst_stride;
716 src_y += src_stride;
717 if (!(y & 1)) {
718 src_uv += src_stride;
719 dst_v += dst_stride >> 1;
720 dst_u += dst_stride >> 1;
721 }
722 }
723 CHECK(SUCCEEDED(surface->UnlockRect()));
627 } else { 724 } else {
628 // We read an input stream, we can feed it into the decoder. 725 // Not DXVA.
629 return SendInput(input->GetData(), input->GetDataSize(), 726 uint8* src_y;
630 input->GetTimestamp().InMicroseconds() * 10, 727 DWORD max_length, current_length;
631 input->GetDuration().InMicroseconds() * 10); 728 HRESULT hr = output_buffer->Lock(&src_y, &max_length, &current_length);
632 } 729 if (FAILED(hr))
633 } 730 return true;
634 731 uint8* dst_y = static_cast<uint8*>(frame->data(VideoFrame::kYPlane));
635 bool MftH264Decoder::SendInput(const uint8* data, int size, int64 timestamp, 732
636 int64 duration) { 733 memcpy(dst_y, src_y, current_length);
637 CHECK(initialized_); 734 CHECK(SUCCEEDED(output_buffer->Unlock()));
638 CHECK(data); 735 }
639 CHECK_GT(size, 0); 736 // TODO(jiesun): non-System memory case
640 737 event_handler_->OnFillBufferCallback(frame);
641 bool current_frame_discontinuous = next_frame_discontinuous_;
642 next_frame_discontinuous_ = true;
643
644 if (drain_message_sent_) {
645 LOG(ERROR) << "Drain message was already sent, but trying to send more "
646 << "input to decoder";
647 return false;
648 }
649 ScopedComPtr<IMFSample> sample;
650 sample.Attach(CreateInputSample(data, size, timestamp, duration,
651 in_buffer_size_, in_buffer_alignment_));
652 if (!sample.get()) {
653 LOG(ERROR) << "Failed to convert input stream to sample";
654 return false;
655 }
656 HRESULT hr;
657 if (current_frame_discontinuous) {
658 hr = sample->SetUINT32(MFSampleExtension_Discontinuity, TRUE);
659 if (FAILED(hr)) {
660 LOG(ERROR) << "Failed to set sample discontinuity " << std::hex << hr;
661 }
662 }
663 hr = decoder_->ProcessInput(0, sample.get(), 0);
664 if (FAILED(hr)) {
665 LOG(ERROR) << "Failed to ProcessInput, hr = " << std::hex << hr;
666 return false;
667 }
668 frames_read_++;
669 next_frame_discontinuous_ = false;
670 return true; 738 return true;
671 } 739 }
672 740
673 bool MftH264Decoder::SendEndOfStreamMessage() {
674 CHECK(initialized_);
675 // Send the eos message with no parameters.
676 HRESULT hr = decoder_->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, 0);
677 if (FAILED(hr)) {
678 LOG(ERROR) << "Failed to send the drain message to decoder";
679 return false;
680 }
681 return true;
682 }
683
684 bool MftH264Decoder::SendDrainMessage() {
685 CHECK(initialized_);
686 if (drain_message_sent_) {
687 LOG(ERROR) << "Drain message was already sent before!";
688 return false;
689 }
690
691 // Send the drain message with no parameters.
692 HRESULT hr = decoder_->ProcessMessage(MFT_MESSAGE_COMMAND_DRAIN, NULL);
693 if (FAILED(hr)) {
694 LOG(ERROR) << "Failed to send the drain message to decoder";
695 return false;
696 }
697 drain_message_sent_ = true;
698 return true;
699 }
700
701 } // namespace media 741 } // namespace media
742
743 #endif // defined(OS_WIN)
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698