OLD | NEW |
| (Empty) |
1 // Copyright 2011 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "config.h" | |
6 | |
7 #if USE(ACCELERATED_COMPOSITING) | |
8 | |
9 #include "CCVideoLayerImpl.h" | |
10 | |
11 #include "CCIOSurfaceDrawQuad.h" | |
12 #include "CCLayerTreeHostImpl.h" | |
13 #include "CCProxy.h" | |
14 #include "CCQuadSink.h" | |
15 #include "CCResourceProvider.h" | |
16 #include "CCStreamVideoDrawQuad.h" | |
17 #include "CCTextureDrawQuad.h" | |
18 #include "CCYUVVideoDrawQuad.h" | |
19 #include "Extensions3DChromium.h" | |
20 #include "GraphicsContext3D.h" | |
21 #include "NotImplemented.h" | |
22 #include <public/WebVideoFrame.h> | |
23 | |
24 namespace cc { | |
25 | |
26 CCVideoLayerImpl::CCVideoLayerImpl(int id, WebKit::WebVideoFrameProvider* provid
er) | |
27 : CCLayerImpl(id) | |
28 , m_provider(provider) | |
29 , m_frame(0) | |
30 , m_externalTextureResource(0) | |
31 { | |
32 // This matrix is the default transformation for stream textures, and flips
on the Y axis. | |
33 m_streamTextureMatrix = WebKit::WebTransformationMatrix( | |
34 1, 0, 0, 0, | |
35 0, -1, 0, 0, | |
36 0, 0, 1, 0, | |
37 0, 1, 0, 1); | |
38 | |
39 // This only happens during a commit on the compositor thread while the main | |
40 // thread is blocked. That makes this a thread-safe call to set the video | |
41 // frame provider client that does not require a lock. The same is true of | |
42 // the call in the destructor. | |
43 ASSERT(CCProxy::isMainThreadBlocked()); | |
44 m_provider->setVideoFrameProviderClient(this); | |
45 } | |
46 | |
47 CCVideoLayerImpl::~CCVideoLayerImpl() | |
48 { | |
49 // See comment in constructor for why this doesn't need a lock. | |
50 ASSERT(CCProxy::isMainThreadBlocked()); | |
51 if (m_provider) { | |
52 m_provider->setVideoFrameProviderClient(0); | |
53 m_provider = 0; | |
54 } | |
55 freePlaneData(layerTreeHostImpl()->resourceProvider()); | |
56 | |
57 #if !ASSERT_DISABLED | |
58 for (unsigned i = 0; i < WebKit::WebVideoFrame::maxPlanes; ++i) | |
59 ASSERT(!m_framePlanes[i].resourceId); | |
60 ASSERT(!m_externalTextureResource); | |
61 #endif | |
62 } | |
63 | |
64 void CCVideoLayerImpl::stopUsingProvider() | |
65 { | |
66 // Block the provider from shutting down until this client is done | |
67 // using the frame. | |
68 MutexLocker locker(m_providerMutex); | |
69 ASSERT(!m_frame); | |
70 m_provider = 0; | |
71 } | |
72 | |
73 // Convert WebKit::WebVideoFrame::Format to GraphicsContext3D's format enum valu
es. | |
74 static GC3Denum convertVFCFormatToGC3DFormat(const WebKit::WebVideoFrame& frame) | |
75 { | |
76 switch (frame.format()) { | |
77 case WebKit::WebVideoFrame::FormatYV12: | |
78 case WebKit::WebVideoFrame::FormatYV16: | |
79 return GraphicsContext3D::LUMINANCE; | |
80 case WebKit::WebVideoFrame::FormatNativeTexture: | |
81 return frame.textureTarget(); | |
82 case WebKit::WebVideoFrame::FormatInvalid: | |
83 case WebKit::WebVideoFrame::FormatRGB32: | |
84 case WebKit::WebVideoFrame::FormatEmpty: | |
85 case WebKit::WebVideoFrame::FormatI420: | |
86 notImplemented(); | |
87 } | |
88 return GraphicsContext3D::INVALID_VALUE; | |
89 } | |
90 | |
91 void CCVideoLayerImpl::willDraw(CCResourceProvider* resourceProvider) | |
92 { | |
93 ASSERT(CCProxy::isImplThread()); | |
94 CCLayerImpl::willDraw(resourceProvider); | |
95 | |
96 // Explicitly lock and unlock the provider mutex so it can be held from | |
97 // willDraw to didDraw. Since the compositor thread is in the middle of | |
98 // drawing, the layer will not be destroyed before didDraw is called. | |
99 // Therefore, the only thing that will prevent this lock from being released | |
100 // is the GPU process locking it. As the GPU process can't cause the | |
101 // destruction of the provider (calling stopUsingProvider), holding this | |
102 // lock should not cause a deadlock. | |
103 m_providerMutex.lock(); | |
104 | |
105 willDrawInternal(resourceProvider); | |
106 freeUnusedPlaneData(resourceProvider); | |
107 | |
108 if (!m_frame) | |
109 m_providerMutex.unlock(); | |
110 } | |
111 | |
112 void CCVideoLayerImpl::willDrawInternal(CCResourceProvider* resourceProvider) | |
113 { | |
114 ASSERT(CCProxy::isImplThread()); | |
115 ASSERT(!m_externalTextureResource); | |
116 | |
117 if (!m_provider) { | |
118 m_frame = 0; | |
119 return; | |
120 } | |
121 | |
122 m_frame = m_provider->getCurrentFrame(); | |
123 | |
124 if (!m_frame) | |
125 return; | |
126 | |
127 m_format = convertVFCFormatToGC3DFormat(*m_frame); | |
128 | |
129 if (m_format == GraphicsContext3D::INVALID_VALUE) { | |
130 m_provider->putCurrentFrame(m_frame); | |
131 m_frame = 0; | |
132 return; | |
133 } | |
134 | |
135 if (m_frame->planes() > WebKit::WebVideoFrame::maxPlanes) { | |
136 m_provider->putCurrentFrame(m_frame); | |
137 m_frame = 0; | |
138 return; | |
139 } | |
140 | |
141 if (!allocatePlaneData(resourceProvider)) { | |
142 m_provider->putCurrentFrame(m_frame); | |
143 m_frame = 0; | |
144 return; | |
145 } | |
146 | |
147 if (!copyPlaneData(resourceProvider)) { | |
148 m_provider->putCurrentFrame(m_frame); | |
149 m_frame = 0; | |
150 return; | |
151 } | |
152 | |
153 if (m_format == GraphicsContext3D::TEXTURE_2D) | |
154 m_externalTextureResource = resourceProvider->createResourceFromExternal
Texture(m_frame->textureId()); | |
155 } | |
156 | |
157 void CCVideoLayerImpl::appendQuads(CCQuadSink& quadSink, CCAppendQuadsData& appe
ndQuadsData) | |
158 { | |
159 ASSERT(CCProxy::isImplThread()); | |
160 | |
161 if (!m_frame) | |
162 return; | |
163 | |
164 CCSharedQuadState* sharedQuadState = quadSink.useSharedQuadState(createShare
dQuadState()); | |
165 appendDebugBorderQuad(quadSink, sharedQuadState, appendQuadsData); | |
166 | |
167 // FIXME: When we pass quads out of process, we need to double-buffer, or | |
168 // otherwise synchonize use of all textures in the quad. | |
169 | |
170 IntRect quadRect(IntPoint(), contentBounds()); | |
171 | |
172 switch (m_format) { | |
173 case GraphicsContext3D::LUMINANCE: { | |
174 // YUV software decoder. | |
175 const FramePlane& yPlane = m_framePlanes[WebKit::WebVideoFrame::yPlane]; | |
176 const FramePlane& uPlane = m_framePlanes[WebKit::WebVideoFrame::uPlane]; | |
177 const FramePlane& vPlane = m_framePlanes[WebKit::WebVideoFrame::vPlane]; | |
178 scoped_ptr<CCYUVVideoDrawQuad> yuvVideoQuad = CCYUVVideoDrawQuad::create
(sharedQuadState, quadRect, yPlane, uPlane, vPlane); | |
179 quadSink.append(yuvVideoQuad.PassAs<CCDrawQuad>(), appendQuadsData); | |
180 break; | |
181 } | |
182 case GraphicsContext3D::RGBA: { | |
183 // RGBA software decoder. | |
184 const FramePlane& plane = m_framePlanes[WebKit::WebVideoFrame::rgbPlane]
; | |
185 float widthScaleFactor = static_cast<float>(plane.visibleSize.width()) /
plane.size.width(); | |
186 | |
187 bool premultipliedAlpha = true; | |
188 FloatRect uvRect(0, 0, widthScaleFactor, 1); | |
189 bool flipped = false; | |
190 scoped_ptr<CCTextureDrawQuad> textureQuad = CCTextureDrawQuad::create(sh
aredQuadState, quadRect, plane.resourceId, premultipliedAlpha, uvRect, flipped); | |
191 quadSink.append(textureQuad.PassAs<CCDrawQuad>(), appendQuadsData); | |
192 break; | |
193 } | |
194 case GraphicsContext3D::TEXTURE_2D: { | |
195 // NativeTexture hardware decoder. | |
196 bool premultipliedAlpha = true; | |
197 FloatRect uvRect(0, 0, 1, 1); | |
198 bool flipped = false; | |
199 scoped_ptr<CCTextureDrawQuad> textureQuad = CCTextureDrawQuad::create(sh
aredQuadState, quadRect, m_externalTextureResource, premultipliedAlpha, uvRect,
flipped); | |
200 quadSink.append(textureQuad.PassAs<CCDrawQuad>(), appendQuadsData); | |
201 break; | |
202 } | |
203 case Extensions3D::TEXTURE_RECTANGLE_ARB: { | |
204 IntSize textureSize(m_frame->width(), m_frame->height()); | |
205 scoped_ptr<CCIOSurfaceDrawQuad> ioSurfaceQuad = CCIOSurfaceDrawQuad::cre
ate(sharedQuadState, quadRect, textureSize, m_frame->textureId(), CCIOSurfaceDra
wQuad::Unflipped); | |
206 quadSink.append(ioSurfaceQuad.PassAs<CCDrawQuad>(), appendQuadsData); | |
207 break; | |
208 } | |
209 case Extensions3DChromium::GL_TEXTURE_EXTERNAL_OES: { | |
210 // StreamTexture hardware decoder. | |
211 scoped_ptr<CCStreamVideoDrawQuad> streamVideoQuad = CCStreamVideoDrawQua
d::create(sharedQuadState, quadRect, m_frame->textureId(), m_streamTextureMatrix
); | |
212 quadSink.append(streamVideoQuad.PassAs<CCDrawQuad>(), appendQuadsData); | |
213 break; | |
214 } | |
215 default: | |
216 CRASH(); // Someone updated convertVFCFormatToGC3DFormat above but updat
e this! | |
217 } | |
218 } | |
219 | |
220 void CCVideoLayerImpl::didDraw(CCResourceProvider* resourceProvider) | |
221 { | |
222 ASSERT(CCProxy::isImplThread()); | |
223 CCLayerImpl::didDraw(resourceProvider); | |
224 | |
225 if (!m_frame) | |
226 return; | |
227 | |
228 if (m_format == GraphicsContext3D::TEXTURE_2D) { | |
229 ASSERT(m_externalTextureResource); | |
230 // FIXME: the following assert will not be true when sending resources t
o a | |
231 // parent compositor. We will probably need to hold on to m_frame for | |
232 // longer, and have several "current frames" in the pipeline. | |
233 ASSERT(!resourceProvider->inUseByConsumer(m_externalTextureResource)); | |
234 resourceProvider->deleteResource(m_externalTextureResource); | |
235 m_externalTextureResource = 0; | |
236 } | |
237 | |
238 m_provider->putCurrentFrame(m_frame); | |
239 m_frame = 0; | |
240 | |
241 m_providerMutex.unlock(); | |
242 } | |
243 | |
244 static int videoFrameDimension(int originalDimension, unsigned plane, int format
) | |
245 { | |
246 if (format == WebKit::WebVideoFrame::FormatYV12 && plane != WebKit::WebVideo
Frame::yPlane) | |
247 return originalDimension / 2; | |
248 return originalDimension; | |
249 } | |
250 | |
251 static bool hasPaddingBytes(const WebKit::WebVideoFrame& frame, unsigned plane) | |
252 { | |
253 return frame.stride(plane) > videoFrameDimension(frame.width(), plane, frame
.format()); | |
254 } | |
255 | |
256 IntSize CCVideoLayerImpl::computeVisibleSize(const WebKit::WebVideoFrame& frame,
unsigned plane) | |
257 { | |
258 int visibleWidth = videoFrameDimension(frame.width(), plane, frame.format())
; | |
259 int originalWidth = visibleWidth; | |
260 int visibleHeight = videoFrameDimension(frame.height(), plane, frame.format(
)); | |
261 | |
262 // When there are dead pixels at the edge of the texture, decrease | |
263 // the frame width by 1 to prevent the rightmost pixels from | |
264 // interpolating with the dead pixels. | |
265 if (hasPaddingBytes(frame, plane)) | |
266 --visibleWidth; | |
267 | |
268 // In YV12, every 2x2 square of Y values corresponds to one U and | |
269 // one V value. If we decrease the width of the UV plane, we must decrease t
he | |
270 // width of the Y texture by 2 for proper alignment. This must happen | |
271 // always, even if Y's texture does not have padding bytes. | |
272 if (plane == WebKit::WebVideoFrame::yPlane && frame.format() == WebKit::WebV
ideoFrame::FormatYV12) { | |
273 if (hasPaddingBytes(frame, WebKit::WebVideoFrame::uPlane)) | |
274 visibleWidth = originalWidth - 2; | |
275 } | |
276 | |
277 return IntSize(visibleWidth, visibleHeight); | |
278 } | |
279 | |
280 bool CCVideoLayerImpl::FramePlane::allocateData(CCResourceProvider* resourceProv
ider) | |
281 { | |
282 if (resourceId) | |
283 return true; | |
284 | |
285 resourceId = resourceProvider->createResource(CCRenderer::ImplPool, size, fo
rmat, CCResourceProvider::TextureUsageAny); | |
286 return resourceId; | |
287 } | |
288 | |
289 void CCVideoLayerImpl::FramePlane::freeData(CCResourceProvider* resourceProvider
) | |
290 { | |
291 if (!resourceId) | |
292 return; | |
293 | |
294 resourceProvider->deleteResource(resourceId); | |
295 resourceId = 0; | |
296 } | |
297 | |
298 bool CCVideoLayerImpl::allocatePlaneData(CCResourceProvider* resourceProvider) | |
299 { | |
300 int maxTextureSize = resourceProvider->maxTextureSize(); | |
301 for (unsigned planeIndex = 0; planeIndex < m_frame->planes(); ++planeIndex)
{ | |
302 CCVideoLayerImpl::FramePlane& plane = m_framePlanes[planeIndex]; | |
303 | |
304 IntSize requiredTextureSize(m_frame->stride(planeIndex), videoFrameDimen
sion(m_frame->height(), planeIndex, m_frame->format())); | |
305 // FIXME: Remove the test against maxTextureSize when tiled layers are i
mplemented. | |
306 if (requiredTextureSize.isZero() || requiredTextureSize.width() > maxTex
tureSize || requiredTextureSize.height() > maxTextureSize) | |
307 return false; | |
308 | |
309 if (plane.size != requiredTextureSize || plane.format != m_format) { | |
310 plane.freeData(resourceProvider); | |
311 plane.size = requiredTextureSize; | |
312 plane.format = m_format; | |
313 } | |
314 | |
315 if (!plane.resourceId) { | |
316 if (!plane.allocateData(resourceProvider)) | |
317 return false; | |
318 plane.visibleSize = computeVisibleSize(*m_frame, planeIndex); | |
319 } | |
320 } | |
321 return true; | |
322 } | |
323 | |
324 bool CCVideoLayerImpl::copyPlaneData(CCResourceProvider* resourceProvider) | |
325 { | |
326 size_t softwarePlaneCount = m_frame->planes(); | |
327 if (!softwarePlaneCount) | |
328 return true; | |
329 | |
330 for (size_t softwarePlaneIndex = 0; softwarePlaneIndex < softwarePlaneCount;
++softwarePlaneIndex) { | |
331 CCVideoLayerImpl::FramePlane& plane = m_framePlanes[softwarePlaneIndex]; | |
332 const uint8_t* softwarePlanePixels = static_cast<const uint8_t*>(m_frame
->data(softwarePlaneIndex)); | |
333 IntRect planeRect(IntPoint(), plane.size); | |
334 resourceProvider->upload(plane.resourceId, softwarePlanePixels, planeRec
t, planeRect, IntSize()); | |
335 } | |
336 return true; | |
337 } | |
338 | |
339 void CCVideoLayerImpl::freePlaneData(CCResourceProvider* resourceProvider) | |
340 { | |
341 for (unsigned i = 0; i < WebKit::WebVideoFrame::maxPlanes; ++i) | |
342 m_framePlanes[i].freeData(resourceProvider); | |
343 } | |
344 | |
345 void CCVideoLayerImpl::freeUnusedPlaneData(CCResourceProvider* resourceProvider) | |
346 { | |
347 unsigned firstUnusedPlane = m_frame ? m_frame->planes() : 0; | |
348 for (unsigned i = firstUnusedPlane; i < WebKit::WebVideoFrame::maxPlanes; ++
i) | |
349 m_framePlanes[i].freeData(resourceProvider); | |
350 } | |
351 | |
352 void CCVideoLayerImpl::didReceiveFrame() | |
353 { | |
354 setNeedsRedraw(); | |
355 } | |
356 | |
357 void CCVideoLayerImpl::didUpdateMatrix(const float matrix[16]) | |
358 { | |
359 m_streamTextureMatrix = WebKit::WebTransformationMatrix( | |
360 matrix[0], matrix[1], matrix[2], matrix[3], | |
361 matrix[4], matrix[5], matrix[6], matrix[7], | |
362 matrix[8], matrix[9], matrix[10], matrix[11], | |
363 matrix[12], matrix[13], matrix[14], matrix[15]); | |
364 setNeedsRedraw(); | |
365 } | |
366 | |
367 void CCVideoLayerImpl::didLoseContext() | |
368 { | |
369 freePlaneData(layerTreeHostImpl()->resourceProvider()); | |
370 } | |
371 | |
372 void CCVideoLayerImpl::setNeedsRedraw() | |
373 { | |
374 layerTreeHostImpl()->setNeedsRedraw(); | |
375 } | |
376 | |
377 void CCVideoLayerImpl::dumpLayerProperties(std::string* str, int indent) const | |
378 { | |
379 str->append(indentString(indent)); | |
380 str->append("video layer\n"); | |
381 CCLayerImpl::dumpLayerProperties(str, indent); | |
382 } | |
383 | |
384 const char* CCVideoLayerImpl::layerTypeAsString() const | |
385 { | |
386 return "VideoLayer"; | |
387 } | |
388 | |
389 } | |
390 | |
391 #endif // USE(ACCELERATED_COMPOSITING) | |
OLD | NEW |