OLD | NEW |
| (Empty) |
1 // Copyright (c) 2010 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "media/mf/basic_renderer.h" | |
6 | |
7 #include <d3d9.h> | |
8 #include <mfapi.h> | |
9 #include <mfidl.h> | |
10 | |
11 #include "base/message_loop.h" | |
12 #include "base/scoped_comptr_win.h" | |
13 #include "media/base/yuv_convert.h" | |
14 | |
15 // For MFGetService and MF_BUFFER_SERVICE (getting D3D surface from buffer) | |
16 #pragma comment(lib, "mf.lib") | |
17 #pragma comment(lib, "strmiids.lib") | |
18 | |
19 namespace media { | |
20 | |
21 // Converts the given raw data buffer into RGB32 format, and drawing the result | |
22 // into the given window. This is only used when DXVA2 is not enabled. | |
23 // Returns: true on success. | |
24 bool ConvertToRGBAndDrawToWindow(HWND video_window, uint8* data, int width, | |
25 int height, int stride) { | |
26 CHECK(video_window != NULL); | |
27 CHECK(data != NULL); | |
28 CHECK_GT(width, 0); | |
29 CHECK_GT(height, 0); | |
30 CHECK_GE(stride, width); | |
31 height = (height + 15) & ~15; | |
32 bool success = true; | |
33 uint8* y_start = reinterpret_cast<uint8*>(data); | |
34 uint8* u_start = y_start + height * stride * 5 / 4; | |
35 uint8* v_start = y_start + height * stride; | |
36 static uint8* rgb_frame = new uint8[height * stride * 4]; | |
37 int y_stride = stride; | |
38 int uv_stride = stride / 2; | |
39 int rgb_stride = stride * 4; | |
40 ConvertYUVToRGB32(y_start, u_start, v_start, rgb_frame, | |
41 width, height, y_stride, uv_stride, | |
42 rgb_stride, YV12); | |
43 PAINTSTRUCT ps; | |
44 InvalidateRect(video_window, NULL, TRUE); | |
45 HDC hdc = BeginPaint(video_window, &ps); | |
46 BITMAPINFOHEADER hdr; | |
47 hdr.biSize = sizeof(BITMAPINFOHEADER); | |
48 hdr.biWidth = width; | |
49 hdr.biHeight = -height; // minus means top-down bitmap | |
50 hdr.biPlanes = 1; | |
51 hdr.biBitCount = 32; | |
52 hdr.biCompression = BI_RGB; // no compression | |
53 hdr.biSizeImage = 0; | |
54 hdr.biXPelsPerMeter = 1; | |
55 hdr.biYPelsPerMeter = 1; | |
56 hdr.biClrUsed = 0; | |
57 hdr.biClrImportant = 0; | |
58 int rv = StretchDIBits(hdc, 0, 0, width, height, 0, 0, width, height, | |
59 rgb_frame, reinterpret_cast<BITMAPINFO*>(&hdr), | |
60 DIB_RGB_COLORS, SRCCOPY); | |
61 if (rv == 0) { | |
62 LOG(ERROR) << "StretchDIBits failed"; | |
63 MessageLoopForUI::current()->QuitNow(); | |
64 success = false; | |
65 } | |
66 EndPaint(video_window, &ps); | |
67 | |
68 return success; | |
69 } | |
70 | |
71 // Obtains the underlying raw data buffer for the given IMFMediaBuffer, and | |
72 // calls ConvertToRGBAndDrawToWindow() with it. | |
73 // Returns: true on success. | |
74 bool PaintMediaBufferOntoWindow(HWND video_window, IMFMediaBuffer* video_buffer, | |
75 int width, int height, int stride) { | |
76 CHECK(video_buffer != NULL); | |
77 HRESULT hr; | |
78 BYTE* data; | |
79 DWORD buffer_length; | |
80 DWORD data_length; | |
81 hr = video_buffer->Lock(&data, &buffer_length, &data_length); | |
82 if (FAILED(hr)) { | |
83 LOG(ERROR) << "Failed to lock IMFMediaBuffer"; | |
84 return false; | |
85 } | |
86 if (!ConvertToRGBAndDrawToWindow(video_window, | |
87 reinterpret_cast<uint8*>(data), | |
88 width, | |
89 height, | |
90 stride)) { | |
91 LOG(ERROR) << "Failed to convert raw buffer to RGB and draw to window"; | |
92 video_buffer->Unlock(); | |
93 return false; | |
94 } | |
95 video_buffer->Unlock(); | |
96 return true; | |
97 } | |
98 | |
99 // Obtains the D3D9 surface from the given IMFMediaBuffer, then calls methods | |
100 // in the D3D device to draw to the window associated with it. | |
101 // Returns: true on success. | |
102 bool PaintD3D9BufferOntoWindow(IDirect3DDevice9* device, | |
103 IMFMediaBuffer* video_buffer) { | |
104 CHECK(device != NULL); | |
105 ScopedComPtr<IDirect3DSurface9> surface; | |
106 HRESULT hr = MFGetService(video_buffer, MR_BUFFER_SERVICE, | |
107 IID_PPV_ARGS(surface.Receive())); | |
108 if (FAILED(hr)) { | |
109 LOG(ERROR) << "Failed to get D3D9 surface from buffer"; | |
110 return false; | |
111 } | |
112 hr = device->Clear(0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB(0, 0, 0), | |
113 1.0f, 0); | |
114 if (FAILED(hr)) { | |
115 LOG(ERROR) << "Device->Clear() failed"; | |
116 return false; | |
117 } | |
118 ScopedComPtr<IDirect3DSurface9> backbuffer; | |
119 hr = device->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, | |
120 backbuffer.Receive()); | |
121 if (FAILED(hr)) { | |
122 LOG(ERROR) << "Device->GetBackBuffer() failed"; | |
123 return false; | |
124 } | |
125 hr = device->StretchRect(surface.get(), NULL, backbuffer.get(), NULL, | |
126 D3DTEXF_NONE); | |
127 if (FAILED(hr)) { | |
128 LOG(ERROR) << "Device->StretchRect() failed"; | |
129 return false; | |
130 } | |
131 hr = device->Present(NULL, NULL, NULL, NULL); | |
132 if (FAILED(hr)) { | |
133 if (hr == E_FAIL) { | |
134 LOG(WARNING) << "Present() returned E_FAIL"; | |
135 } else { | |
136 static int frames_dropped = 0; | |
137 LOG(ERROR) << "Device->Present() failed " | |
138 << std::hex << std::showbase << hr; | |
139 if (++frames_dropped == 10) { | |
140 LOG(ERROR) << "Dropped too many frames, quitting"; | |
141 MessageLoopForUI::current()->QuitNow(); | |
142 return false; | |
143 } | |
144 } | |
145 } | |
146 return true; | |
147 } | |
148 | |
149 static void ReleaseOutputBuffer(VideoFrame* frame) { | |
150 if (frame != NULL && | |
151 frame->type() == VideoFrame::TYPE_MFBUFFER || | |
152 frame->type() == VideoFrame::TYPE_DIRECT3DSURFACE) { | |
153 static_cast<IMFMediaBuffer*>(frame->private_buffer())->Release(); | |
154 } | |
155 } | |
156 | |
157 // NullRenderer | |
158 | |
159 NullRenderer::NullRenderer(MftH264Decoder* decoder) : MftRenderer(decoder) {} | |
160 NullRenderer::~NullRenderer() {} | |
161 | |
162 void NullRenderer::ProcessFrame(scoped_refptr<VideoFrame> frame) { | |
163 ReleaseOutputBuffer(frame); | |
164 MessageLoop::current()->PostTask( | |
165 FROM_HERE, NewRunnableMethod(decoder_.get(), | |
166 &MftH264Decoder::GetOutput)); | |
167 } | |
168 | |
169 void NullRenderer::StartPlayback() { | |
170 MessageLoop::current()->PostTask( | |
171 FROM_HERE, NewRunnableMethod(decoder_.get(), | |
172 &MftH264Decoder::GetOutput)); | |
173 } | |
174 | |
175 void NullRenderer::OnDecodeError(MftH264Decoder::Error error) { | |
176 MessageLoop::current()->Quit(); | |
177 } | |
178 | |
179 // BasicRenderer | |
180 | |
181 BasicRenderer::BasicRenderer(MftH264Decoder* decoder, | |
182 HWND window, IDirect3DDevice9* device) | |
183 : MftRenderer(decoder), | |
184 window_(window), | |
185 device_(device) { | |
186 } | |
187 | |
188 BasicRenderer::~BasicRenderer() {} | |
189 | |
190 void BasicRenderer::ProcessFrame(scoped_refptr<VideoFrame> frame) { | |
191 MessageLoopForUI::current()->PostDelayedTask( | |
192 FROM_HERE, NewRunnableMethod(decoder_.get(), | |
193 &MftH264Decoder::GetOutput), | |
194 frame->GetDuration().InMilliseconds()); | |
195 if (device_ != NULL) { | |
196 if (!PaintD3D9BufferOntoWindow(device_, | |
197 static_cast<IMFMediaBuffer*>(frame->private_buffer()))) { | |
198 MessageLoopForUI::current()->QuitNow(); | |
199 } | |
200 } else { | |
201 if (!PaintMediaBufferOntoWindow( | |
202 window_, static_cast<IMFMediaBuffer*>(frame->private_buffer()), | |
203 frame->width(), frame->height(), frame->stride(0))) { | |
204 MessageLoopForUI::current()->QuitNow(); | |
205 } | |
206 } | |
207 ReleaseOutputBuffer(frame); | |
208 } | |
209 | |
210 void BasicRenderer::StartPlayback() { | |
211 MessageLoopForUI::current()->PostTask( | |
212 FROM_HERE, NewRunnableMethod(decoder_.get(), | |
213 &MftH264Decoder::GetOutput)); | |
214 } | |
215 | |
216 void BasicRenderer::OnDecodeError(MftH264Decoder::Error error) { | |
217 MessageLoopForUI::current()->Quit(); | |
218 } | |
219 | |
220 } // namespace media | |
OLD | NEW |