OLD | NEW |
1 // Copyright (c) 2010 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2010 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 // | 4 // |
5 // Demonstrates the use of MftH264Decoder. | 5 // Demonstrates the use of MftH264Decoder. |
6 | 6 |
7 #include <cstdio> | 7 #include <cstdio> |
8 | 8 |
9 #include <string> | 9 #include <string> |
10 | 10 |
11 #include <d3d9.h> | 11 #include <d3d9.h> |
12 #include <dxva2api.h> | 12 #include <dxva2api.h> |
13 | 13 |
14 #include "base/at_exit.h" | 14 #include "base/at_exit.h" |
15 #include "base/command_line.h" | 15 #include "base/command_line.h" |
16 #include "base/file_path.h" | 16 #include "base/file_path.h" |
17 #include "base/logging.h" | 17 #include "base/logging.h" |
18 #include "base/message_loop.h" | 18 #include "base/message_loop.h" |
19 #include "base/scoped_comptr_win.h" | 19 #include "base/scoped_comptr_win.h" |
20 #include "base/scoped_ptr.h" | 20 #include "base/scoped_ptr.h" |
21 #include "base/time.h" | 21 #include "base/time.h" |
| 22 #include "media/base/data_buffer.h" |
22 #include "media/base/media.h" | 23 #include "media/base/media.h" |
23 #include "media/base/video_frame.h" | 24 #include "media/base/video_frame.h" |
| 25 #include "media/base/yuv_convert.h" |
24 #include "media/ffmpeg/ffmpeg_common.h" | 26 #include "media/ffmpeg/ffmpeg_common.h" |
25 #include "media/ffmpeg/file_protocol.h" | 27 #include "media/ffmpeg/file_protocol.h" |
26 #include "media/mf/basic_renderer.h" | |
27 #include "media/mf/d3d_util.h" | |
28 #include "media/mf/file_reader_util.h" | 28 #include "media/mf/file_reader_util.h" |
29 #include "media/mf/mft_h264_decoder.h" | 29 #include "media/mf/mft_h264_decoder.h" |
30 | 30 |
31 using base::AtExitManager; | 31 using base::AtExitManager; |
32 using base::Time; | 32 using base::Time; |
33 using base::TimeDelta; | 33 using base::TimeDelta; |
34 using media::BasicRenderer; | 34 using media::Buffer; |
35 using media::NullRenderer; | 35 using media::DataBuffer; |
36 using media::FFmpegFileReader; | 36 using media::FFmpegFileReader; |
37 using media::MftH264Decoder; | 37 using media::MftH264Decoder; |
38 using media::MftRenderer; | 38 using media::VideoCodecConfig; |
| 39 using media::VideoCodecInfo; |
| 40 using media::VideoDecodeEngine; |
39 using media::VideoFrame; | 41 using media::VideoFrame; |
| 42 using media::VideoStreamInfo; |
40 | 43 |
41 namespace { | 44 namespace { |
42 | 45 |
43 const wchar_t* const kWindowClass = L"Chrome_H264_MFT"; | 46 const wchar_t* const kWindowClass = L"Chrome_H264_MFT"; |
44 const wchar_t* const kWindowTitle = L"H264_MFT"; | 47 const wchar_t* const kWindowTitle = L"H264_MFT"; |
45 const int kWindowStyleFlags = (WS_OVERLAPPEDWINDOW | WS_VISIBLE) & | 48 const int kWindowStyleFlags = (WS_OVERLAPPEDWINDOW | WS_VISIBLE) & |
46 ~(WS_MAXIMIZEBOX | WS_THICKFRAME); | 49 ~(WS_MAXIMIZEBOX | WS_THICKFRAME); |
47 | 50 |
48 void usage() { | 51 void usage() { |
49 static char* usage_msg = | 52 static char* usage_msg = |
50 "Usage: mft_h264_decoder [--enable-dxva] [--render] --input-file=FILE\n" | 53 "Usage: mft_h264_decoder [--enable-dxva] [--render] --input-file=FILE\n" |
51 "enable-dxva: Enables hardware accelerated decoding\n" | 54 "enable-dxva: Enables hardware accelerated decoding\n" |
52 "render: Render to window\n" | 55 "render: Render to window\n" |
53 "During rendering, press spacebar to skip forward at least 5 seconds.\n" | 56 "During rendering, press spacebar to skip forward at least 5 seconds.\n" |
54 "To display this message: mft_h264_decoder --help"; | 57 "To display this message: mft_h264_decoder --help"; |
55 fprintf(stderr, "%s\n", usage_msg); | 58 fprintf(stderr, "%s\n", usage_msg); |
56 } | 59 } |
57 | 60 |
58 static bool InitFFmpeg() { | 61 static bool InitFFmpeg() { |
59 if (!media::InitializeMediaLibrary(FilePath())) | 62 if (!media::InitializeMediaLibrary(FilePath())) |
60 return false; | 63 return false; |
61 avcodec_init(); | 64 avcodec_init(); |
62 av_register_all(); | 65 av_register_all(); |
63 av_register_protocol2(&kFFmpegFileProtocol, sizeof(kFFmpegFileProtocol)); | 66 av_register_protocol2(&kFFmpegFileProtocol, sizeof(kFFmpegFileProtocol)); |
64 return true; | 67 return true; |
65 } | 68 } |
66 | 69 |
67 bool InitComLibrary() { | |
68 HRESULT hr; | |
69 hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE); | |
70 if (FAILED(hr)) { | |
71 LOG(ERROR) << "CoInit fail"; | |
72 return false; | |
73 } | |
74 return true; | |
75 } | |
76 | |
77 // Creates a window with the given width and height. | 70 // Creates a window with the given width and height. |
78 // Returns: A handle to the window on success, NULL otherwise. | 71 // Returns: A handle to the window on success, NULL otherwise. |
79 static HWND CreateDrawWindow(int width, int height) { | 72 static HWND CreateDrawWindow(int width, int height) { |
80 WNDCLASS window_class = {0}; | 73 WNDCLASS window_class = {0}; |
81 window_class.lpszClassName = kWindowClass; | 74 window_class.lpszClassName = kWindowClass; |
82 window_class.hInstance = NULL; | 75 window_class.hInstance = NULL; |
83 window_class.hbrBackground = 0; | 76 window_class.hbrBackground = 0; |
84 window_class.lpfnWndProc = DefWindowProc; | 77 window_class.lpfnWndProc = DefWindowProc; |
85 window_class.hCursor = 0; | 78 window_class.hCursor = 0; |
86 | 79 |
87 if (RegisterClass(&window_class) == 0) { | 80 if (RegisterClass(&window_class) == 0) { |
88 LOG(ERROR) << "Failed to register window class"; | 81 LOG(ERROR) << "Failed to register window class"; |
89 return false; | 82 return false; |
90 } | 83 } |
91 HWND window = CreateWindow(kWindowClass, | 84 HWND window = CreateWindow(kWindowClass, |
92 kWindowTitle, | 85 kWindowTitle, |
93 kWindowStyleFlags, | 86 kWindowStyleFlags, |
94 100, | 87 100, |
95 100, | 88 100, |
96 width, | 89 width, |
97 height, | 90 height, |
98 NULL, | 91 NULL, |
99 NULL, | 92 NULL, |
100 NULL, | 93 NULL, |
101 NULL); | 94 NULL); |
102 if (window == NULL) { | 95 if (window == NULL) { |
103 LOG(ERROR) << "Failed to create window"; | 96 LOG(ERROR) << "Failed to create window"; |
104 return NULL; | 97 return NULL; |
105 } | 98 } |
| 99 RECT rect; |
| 100 rect.left = 0; |
| 101 rect.right = width; |
| 102 rect.top = 0; |
| 103 rect.bottom = height; |
| 104 AdjustWindowRect(&rect, kWindowStyleFlags, FALSE); |
| 105 MoveWindow(window, 0, 0, rect.right - rect.left, rect.bottom - rect.top, |
| 106 TRUE); |
106 return window; | 107 return window; |
107 } | 108 } |
108 | 109 |
109 class WindowObserver : public base::MessagePumpWin::Observer { | 110 class WindowObserver : public base::MessagePumpWin::Observer { |
110 public: | 111 public: |
111 WindowObserver(FFmpegFileReader* reader, MftH264Decoder* decoder) | 112 WindowObserver(FFmpegFileReader* reader, MftH264Decoder* decoder) |
112 : reader_(reader), | 113 : reader_(reader), |
113 decoder_(decoder) { | 114 decoder_(decoder) { |
114 } | 115 } |
115 | 116 |
116 virtual void WillProcessMessage(const MSG& msg) { | 117 virtual void WillProcessMessage(const MSG& msg) { |
117 if (msg.message == WM_CHAR && msg.wParam == ' ') { | 118 if (msg.message == WM_CHAR && msg.wParam == ' ') { |
118 if (!decoder_->Flush()) { | |
119 LOG(ERROR) << "Flush failed"; | |
120 } | |
121 // Seek forward 5 seconds. | 119 // Seek forward 5 seconds. |
| 120 decoder_->Flush(); |
122 reader_->SeekForward(5000000); | 121 reader_->SeekForward(5000000); |
123 } | 122 } |
124 } | 123 } |
125 | 124 |
126 virtual void DidProcessMessage(const MSG& msg) { | 125 virtual void DidProcessMessage(const MSG& msg) { |
127 } | 126 } |
128 | 127 |
129 private: | 128 private: |
130 FFmpegFileReader* reader_; | 129 FFmpegFileReader* reader_; |
131 MftH264Decoder* decoder_; | 130 MftH264Decoder* decoder_; |
132 }; | 131 }; |
133 | 132 |
134 static int Run(bool use_dxva, bool render, const std::string& input_file) { | 133 class MftH264DecoderHandler |
135 // If we are not rendering, we need a window anyway to create a D3D device, | 134 : public VideoDecodeEngine::EventHandler, |
136 // so we will just use the desktop window. (?) | 135 public base::RefCountedThreadSafe<MftH264DecoderHandler> { |
137 HWND window = GetDesktopWindow(); | 136 public: |
138 if (render) { | 137 MftH264DecoderHandler() : frames_read_(0), frames_decoded_(0) { |
139 window = CreateDrawWindow(640, 480); | 138 memset(&info_, 0, sizeof(info_)); |
140 if (window == NULL) { | 139 } |
141 LOG(ERROR) << "Failed to create window"; | 140 virtual ~MftH264DecoderHandler() {} |
142 return -1; | 141 virtual void OnInitializeComplete(const VideoCodecInfo& info) { |
| 142 info_ = info; |
| 143 } |
| 144 virtual void OnUninitializeComplete() { |
| 145 } |
| 146 virtual void OnFlushComplete() { |
| 147 } |
| 148 virtual void OnSeekComplete() {} |
| 149 virtual void OnError() {} |
| 150 virtual void OnFormatChange(VideoStreamInfo stream_info) { |
| 151 info_.stream_info_ = stream_info; |
| 152 } |
| 153 virtual void OnEmptyBufferCallback(scoped_refptr<Buffer> buffer) { |
| 154 if (reader_ && decoder_.get()) { |
| 155 scoped_refptr<DataBuffer> input; |
| 156 reader_->Read(&input); |
| 157 if (!input->IsEndOfStream()) |
| 158 frames_read_++; |
| 159 decoder_->EmptyThisBuffer(input); |
143 } | 160 } |
144 } | 161 } |
| 162 virtual void OnFillBufferCallback(scoped_refptr<VideoFrame> frame) { |
| 163 if (frame.get()) { |
| 164 if (frame->format() != VideoFrame::EMPTY) { |
| 165 frames_decoded_++; |
| 166 } |
| 167 } |
| 168 } |
| 169 virtual void SetReader(FFmpegFileReader* reader) { |
| 170 reader_ = reader; |
| 171 } |
| 172 virtual void SetDecoder(scoped_refptr<MftH264Decoder> decoder) { |
| 173 decoder_ = decoder; |
| 174 } |
| 175 virtual void DecodeSingleFrame() { |
| 176 scoped_refptr<VideoFrame> frame; |
| 177 decoder_->FillThisBuffer(frame); |
| 178 } |
| 179 virtual void Start() { |
| 180 while (decoder_->state() != MftH264Decoder::kStopped) |
| 181 DecodeSingleFrame(); |
| 182 } |
| 183 |
| 184 VideoCodecInfo info_; |
| 185 int frames_read_; |
| 186 int frames_decoded_; |
| 187 FFmpegFileReader* reader_; |
| 188 scoped_refptr<MftH264Decoder> decoder_; |
| 189 }; |
| 190 |
| 191 class RenderToWindowHandler : public MftH264DecoderHandler { |
| 192 public: |
| 193 RenderToWindowHandler(HWND window, MessageLoop* loop) |
| 194 : MftH264DecoderHandler(), |
| 195 window_(window), |
| 196 loop_(loop), |
| 197 has_output_(false) { |
| 198 } |
| 199 virtual ~RenderToWindowHandler() {} |
| 200 virtual void OnFillBufferCallback(scoped_refptr<VideoFrame> frame) { |
| 201 has_output_ = true; |
| 202 if (frame.get()) { |
| 203 if (frame->format() != VideoFrame::EMPTY) { |
| 204 frames_decoded_++; |
| 205 loop_->PostDelayedTask( |
| 206 FROM_HERE, |
| 207 NewRunnableMethod(this, &RenderToWindowHandler::DecodeSingleFrame), |
| 208 frame->GetDuration().InMilliseconds()); |
| 209 |
| 210 int width = frame->width(); |
| 211 int height = frame->height(); |
| 212 |
| 213 // Assume height does not change. |
| 214 static uint8* rgb_frame = new uint8[height * frame->stride(0) * 4]; |
| 215 uint8* frame_y = static_cast<uint8*>(frame->data(VideoFrame::kYPlane)); |
| 216 uint8* frame_u = static_cast<uint8*>(frame->data(VideoFrame::kUPlane)); |
| 217 uint8* frame_v = static_cast<uint8*>(frame->data(VideoFrame::kVPlane)); |
| 218 media::ConvertYUVToRGB32(frame_y, frame_v, frame_u, rgb_frame, |
| 219 width, (height + 15) & ~15, |
| 220 frame->stride(0), frame->stride(1), |
| 221 4 * frame->stride(0), media::YV12); |
| 222 PAINTSTRUCT ps; |
| 223 InvalidateRect(window_, NULL, TRUE); |
| 224 HDC hdc = BeginPaint(window_, &ps); |
| 225 BITMAPINFOHEADER hdr; |
| 226 hdr.biSize = sizeof(BITMAPINFOHEADER); |
| 227 hdr.biWidth = width; |
| 228 hdr.biHeight = -height; // minus means top-down bitmap |
| 229 hdr.biPlanes = 1; |
| 230 hdr.biBitCount = 32; |
| 231 hdr.biCompression = BI_RGB; // no compression |
| 232 hdr.biSizeImage = 0; |
| 233 hdr.biXPelsPerMeter = 1; |
| 234 hdr.biYPelsPerMeter = 1; |
| 235 hdr.biClrUsed = 0; |
| 236 hdr.biClrImportant = 0; |
| 237 int rv = StretchDIBits(hdc, 0, 0, width, height, 0, 0, width, height, |
| 238 rgb_frame, reinterpret_cast<BITMAPINFO*>(&hdr), |
| 239 DIB_RGB_COLORS, SRCCOPY); |
| 240 EndPaint(window_, &ps); |
| 241 if (!rv) { |
| 242 LOG(ERROR) << "StretchDIBits failed"; |
| 243 loop_->QuitNow(); |
| 244 } |
| 245 } else { // if frame is type EMPTY, there will be no more frames. |
| 246 loop_->QuitNow(); |
| 247 } |
| 248 } |
| 249 } |
| 250 virtual void DecodeSingleFrame() { |
| 251 if (decoder_->state() != MftH264Decoder::kStopped) { |
| 252 while (decoder_->state() != MftH264Decoder::kStopped && !has_output_) { |
| 253 scoped_refptr<VideoFrame> frame; |
| 254 decoder_->FillThisBuffer(frame); |
| 255 } |
| 256 if (decoder_->state() == MftH264Decoder::kStopped) |
| 257 loop_->QuitNow(); |
| 258 has_output_ = false; |
| 259 } else { |
| 260 loop_->QuitNow(); |
| 261 } |
| 262 } |
| 263 virtual void Start() { |
| 264 loop_->PostTask( |
| 265 FROM_HERE, |
| 266 NewRunnableMethod(this, &RenderToWindowHandler::DecodeSingleFrame)); |
| 267 loop_->Run(); |
| 268 } |
| 269 |
| 270 private: |
| 271 HWND window_; |
| 272 MessageLoop* loop_; |
| 273 bool has_output_; |
| 274 }; |
| 275 |
| 276 static int Run(bool use_dxva, bool render, const std::string& input_file) { |
145 scoped_ptr<FFmpegFileReader> reader(new FFmpegFileReader(input_file)); | 277 scoped_ptr<FFmpegFileReader> reader(new FFmpegFileReader(input_file)); |
146 if (reader.get() == NULL || !reader->Initialize()) { | 278 if (reader.get() == NULL || !reader->Initialize()) { |
147 LOG(ERROR) << "Failed to create/initialize reader"; | 279 LOG(ERROR) << "Failed to create/initialize reader"; |
148 return -1; | 280 return -1; |
149 } | 281 } |
150 int width = 0, height = 0; | 282 int width = 0, height = 0; |
151 if (!reader->GetWidth(&width) || !reader->GetHeight(&height)) { | 283 if (!reader->GetWidth(&width) || !reader->GetHeight(&height)) { |
152 LOG(WARNING) << "Failed to get width/height from reader"; | 284 LOG(WARNING) << "Failed to get width/height from reader"; |
153 } | 285 } |
154 int aspect_ratio_num = 0, aspect_ratio_denom = 0; | 286 VideoCodecConfig config; |
155 if (!reader->GetAspectRatio(&aspect_ratio_num, &aspect_ratio_denom)) { | 287 config.width_ = width; |
156 LOG(WARNING) << "Failed to get aspect ratio"; | 288 config.height_ = height; |
157 } | 289 HWND window = NULL; |
158 int frame_rate_num = 0, frame_rate_denom = 0; | 290 if (render) { |
159 if (!reader->GetFrameRate(&frame_rate_num, &frame_rate_denom)) { | 291 window = CreateDrawWindow(width, height); |
160 LOG(WARNING) << "Failed to get frame rate"; | 292 if (window == NULL) { |
161 } | 293 LOG(ERROR) << "Failed to create window"; |
162 ScopedComPtr<IDirect3D9> d3d9; | |
163 ScopedComPtr<IDirect3DDevice9> device; | |
164 ScopedComPtr<IDirect3DDeviceManager9> dev_manager; | |
165 if (use_dxva) { | |
166 dev_manager.Attach(media::CreateD3DDevManager(window, | |
167 d3d9.Receive(), | |
168 device.Receive())); | |
169 if (dev_manager.get() == NULL) { | |
170 LOG(ERROR) << "Cannot create D3D9 manager"; | |
171 return -1; | 294 return -1; |
172 } | 295 } |
173 } | 296 } |
| 297 |
174 scoped_refptr<MftH264Decoder> mft(new MftH264Decoder(use_dxva)); | 298 scoped_refptr<MftH264Decoder> mft(new MftH264Decoder(use_dxva)); |
175 scoped_refptr<MftRenderer> renderer; | 299 if (!mft.get()) { |
176 if (render) { | 300 LOG(ERROR) << "Failed to create fake MFT"; |
177 renderer = new BasicRenderer(mft.get(), window, device); | |
178 } else { | |
179 renderer = new NullRenderer(mft.get()); | |
180 } | |
181 if (mft.get() == NULL) { | |
182 LOG(ERROR) << "Failed to create fake renderer / MFT"; | |
183 return -1; | 301 return -1; |
184 } | 302 } |
185 if (!mft->Init(dev_manager, | 303 |
186 frame_rate_num, frame_rate_denom, | 304 scoped_refptr<MftH264DecoderHandler> handler; |
187 width, height, | 305 if (render) |
188 aspect_ratio_num, aspect_ratio_denom, | 306 handler = new RenderToWindowHandler(window, MessageLoop::current()); |
189 NewCallback(reader.get(), &FFmpegFileReader::Read), | 307 else |
190 NewCallback(renderer.get(), &MftRenderer::ProcessFrame), | 308 handler = new MftH264DecoderHandler(); |
191 NewCallback(renderer.get(), | 309 handler->SetDecoder(mft); |
192 &MftRenderer::OnDecodeError))) { | 310 handler->SetReader(reader.get()); |
193 LOG(ERROR) << "Failed to initialize mft"; | 311 if (!handler.get()) { |
| 312 LOG(ERROR) << "FAiled to create handler"; |
194 return -1; | 313 return -1; |
195 } | 314 } |
| 315 |
| 316 mft->Initialize(MessageLoop::current(), handler.get(), config); |
196 scoped_ptr<WindowObserver> observer; | 317 scoped_ptr<WindowObserver> observer; |
197 // If rendering, resize the window to fit the video frames. | 318 // If rendering, resize the window to fit the video frames. |
198 if (render) { | 319 if (render) { |
199 RECT rect; | |
200 rect.left = 0; | |
201 rect.right = mft->width(); | |
202 rect.top = 0; | |
203 rect.bottom = mft->height(); | |
204 AdjustWindowRect(&rect, kWindowStyleFlags, FALSE); | |
205 if (!MoveWindow(window, 0, 0, rect.right - rect.left, | |
206 rect.bottom - rect.top, TRUE)) { | |
207 LOG(WARNING) << "Warning: Failed to resize window"; | |
208 } | |
209 observer.reset(new WindowObserver(reader.get(), mft.get())); | 320 observer.reset(new WindowObserver(reader.get(), mft.get())); |
210 MessageLoopForUI::current()->AddObserver(observer.get()); | 321 MessageLoopForUI::current()->AddObserver(observer.get()); |
211 } | 322 } |
212 if (use_dxva) { | 323 |
213 // Reset the device's back buffer dimensions to match the window's | |
214 // dimensions. | |
215 if (!media::AdjustD3DDeviceBackBufferDimensions(device.get(), | |
216 window, | |
217 mft->width(), | |
218 mft->height())) { | |
219 LOG(WARNING) << "Warning: Failed to reset device to have correct " | |
220 << "backbuffer dimension, scaling might occur"; | |
221 } | |
222 } | |
223 Time decode_start(Time::Now()); | 324 Time decode_start(Time::Now()); |
224 | 325 handler->Start(); |
225 MessageLoopForUI::current()->PostTask(FROM_HERE, | |
226 NewRunnableMethod(renderer.get(), &MftRenderer::StartPlayback)); | |
227 MessageLoopForUI::current()->Run(NULL); | |
228 | |
229 TimeDelta decode_time = Time::Now() - decode_start; | 326 TimeDelta decode_time = Time::Now() - decode_start; |
230 | 327 |
231 printf("All done, frames read: %d, frames decoded: %d\n", | 328 printf("All done, frames read: %d, frames decoded: %d\n", |
232 mft->frames_read(), mft->frames_decoded()); | 329 handler->frames_read_, handler->frames_decoded_); |
233 printf("Took %lldms\n", decode_time.InMilliseconds()); | 330 printf("Took %lldms\n", decode_time.InMilliseconds()); |
| 331 if (window) |
| 332 DestroyWindow(window); |
234 return 0; | 333 return 0; |
235 } | 334 } |
236 | 335 |
237 } // namespace | 336 } // namespace |
238 | 337 |
239 int main(int argc, char** argv) { | 338 int main(int argc, char** argv) { |
240 AtExitManager at_exit; | 339 AtExitManager at_exit; |
241 MessageLoopForUI message_loop; | 340 MessageLoopForUI message_loop; |
242 CommandLine::Init(argc, argv); | 341 CommandLine::Init(argc, argv); |
243 if (argc == 1) { | 342 if (argc == 1) { |
(...skipping 15 matching lines...) Expand all Loading... |
259 return -1; | 358 return -1; |
260 } | 359 } |
261 printf("enable-dxva: %d\n", use_dxva); | 360 printf("enable-dxva: %d\n", use_dxva); |
262 printf("render: %d\n", render); | 361 printf("render: %d\n", render); |
263 printf("input-file: %s\n", input_file.c_str()); | 362 printf("input-file: %s\n", input_file.c_str()); |
264 | 363 |
265 if (!InitFFmpeg()) { | 364 if (!InitFFmpeg()) { |
266 LOG(ERROR) << "InitFFMpeg() failed"; | 365 LOG(ERROR) << "InitFFMpeg() failed"; |
267 return -1; | 366 return -1; |
268 } | 367 } |
269 if (!InitComLibrary()) { | |
270 LOG(ERROR) << "InitComLibraries() failed"; | |
271 return -1; | |
272 } | |
273 int ret = Run(use_dxva, render, input_file); | 368 int ret = Run(use_dxva, render, input_file); |
274 | 369 |
275 printf("Done\n"); | 370 printf("Done\n"); |
276 return ret; | 371 return ret; |
277 } | 372 } |
OLD | NEW |