OLD | NEW |
---|---|
1 // Copyright 2011 The Chromium Authors. All rights reserved. | 1 // Copyright 2011 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "cc/video_layer_impl.h" | 5 #include "cc/video_layer_impl.h" |
6 | 6 |
7 #include "base/logging.h" | 7 #include "base/logging.h" |
8 #include "cc/io_surface_draw_quad.h" | 8 #include "cc/io_surface_draw_quad.h" |
9 #include "cc/layer_tree_impl.h" | 9 #include "cc/layer_tree_impl.h" |
10 #include "cc/math_util.h" | 10 #include "cc/math_util.h" |
11 #include "cc/quad_sink.h" | 11 #include "cc/quad_sink.h" |
12 #include "cc/renderer.h" | 12 #include "cc/renderer.h" |
13 #include "cc/resource_provider.h" | 13 #include "cc/resource_provider.h" |
14 #include "cc/stream_video_draw_quad.h" | 14 #include "cc/stream_video_draw_quad.h" |
15 #include "cc/texture_draw_quad.h" | 15 #include "cc/texture_draw_quad.h" |
16 #include "cc/video_frame_provider_client_impl.h" | 16 #include "cc/video_frame_provider_client_impl.h" |
17 #include "cc/yuv_video_draw_quad.h" | 17 #include "cc/yuv_video_draw_quad.h" |
18 #include "gpu/GLES2/gl2extchromium.h" | 18 #include "gpu/GLES2/gl2extchromium.h" |
19 #include "media/filters/skcanvas_video_renderer.h" | 19 #include "media/filters/skcanvas_video_renderer.h" |
20 #include "third_party/khronos/GLES2/gl2.h" | 20 #include "third_party/khronos/GLES2/gl2.h" |
21 #include "third_party/khronos/GLES2/gl2ext.h" | 21 #include "third_party/khronos/GLES2/gl2ext.h" |
22 | 22 |
23 #if defined(GOOGLE_TV) | |
24 #include "cc/solid_color_draw_quad.h" | |
25 #endif | |
26 | |
23 namespace cc { | 27 namespace cc { |
24 | 28 |
25 // static | 29 // static |
26 scoped_ptr<VideoLayerImpl> VideoLayerImpl::create(LayerTreeImpl* treeImpl, int i d, VideoFrameProvider* provider) | 30 scoped_ptr<VideoLayerImpl> VideoLayerImpl::create(LayerTreeImpl* treeImpl, int i d, VideoFrameProvider* provider) |
27 { | 31 { |
28 scoped_ptr<VideoLayerImpl> layer(new VideoLayerImpl(treeImpl, id)); | 32 scoped_ptr<VideoLayerImpl> layer(new VideoLayerImpl(treeImpl, id)); |
29 layer->setProviderClientImpl(VideoFrameProviderClientImpl::Create(provider)) ; | 33 layer->setProviderClientImpl(VideoFrameProviderClientImpl::Create(provider)) ; |
30 DCHECK(treeImpl->proxy()->isImplThread()); | 34 DCHECK(treeImpl->proxy()->isImplThread()); |
31 DCHECK(treeImpl->proxy()->isMainThreadBlocked()); | 35 DCHECK(treeImpl->proxy()->isMainThreadBlocked()); |
32 return layer.Pass(); | 36 return layer.Pass(); |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
82 | 86 |
83 // Convert media::VideoFrame::Format to OpenGL enum values. | 87 // Convert media::VideoFrame::Format to OpenGL enum values. |
84 static GLenum convertVFCFormatToGLenum(const media::VideoFrame& frame) | 88 static GLenum convertVFCFormatToGLenum(const media::VideoFrame& frame) |
85 { | 89 { |
86 switch (frame.format()) { | 90 switch (frame.format()) { |
87 case media::VideoFrame::YV12: | 91 case media::VideoFrame::YV12: |
88 case media::VideoFrame::YV16: | 92 case media::VideoFrame::YV16: |
89 return GL_LUMINANCE; | 93 return GL_LUMINANCE; |
90 case media::VideoFrame::NATIVE_TEXTURE: | 94 case media::VideoFrame::NATIVE_TEXTURE: |
91 return frame.texture_target(); | 95 return frame.texture_target(); |
96 #if defined(GOOGLE_TV) | |
97 case media::VideoFrame::HOLE: | |
98 return GL_INVALID_VALUE; | |
99 #endif | |
92 case media::VideoFrame::INVALID: | 100 case media::VideoFrame::INVALID: |
93 case media::VideoFrame::RGB32: | 101 case media::VideoFrame::RGB32: |
94 case media::VideoFrame::EMPTY: | 102 case media::VideoFrame::EMPTY: |
95 case media::VideoFrame::I420: | 103 case media::VideoFrame::I420: |
96 NOTREACHED(); | 104 NOTREACHED(); |
97 break; | 105 break; |
98 } | 106 } |
99 return GL_INVALID_VALUE; | 107 return GL_INVALID_VALUE; |
100 } | 108 } |
101 | 109 |
102 size_t VideoLayerImpl::numPlanes() const | 110 size_t VideoLayerImpl::numPlanes() const |
103 { | 111 { |
104 if (!m_frame) | 112 if (!m_frame) |
105 return 0; | 113 return 0; |
106 | 114 |
107 if (m_convertYUV) | 115 if (m_convertYUV) |
108 return 1; | 116 return 1; |
109 | 117 |
110 switch (m_frame->format()) { | 118 switch (m_frame->format()) { |
119 #if defined(GOOGLE_TV) | |
120 case media::VideoFrame::HOLE: | |
121 return 0; | |
122 #endif | |
111 case media::VideoFrame::RGB32: | 123 case media::VideoFrame::RGB32: |
112 return 1; | 124 return 1; |
113 case media::VideoFrame::YV12: | 125 case media::VideoFrame::YV12: |
114 case media::VideoFrame::YV16: | 126 case media::VideoFrame::YV16: |
115 return 3; | 127 return 3; |
116 case media::VideoFrame::INVALID: | 128 case media::VideoFrame::INVALID: |
117 case media::VideoFrame::EMPTY: | 129 case media::VideoFrame::EMPTY: |
118 case media::VideoFrame::I420: | 130 case media::VideoFrame::I420: |
119 break; | 131 break; |
120 case media::VideoFrame::NATIVE_TEXTURE: | 132 case media::VideoFrame::NATIVE_TEXTURE: |
(...skipping 24 matching lines...) Expand all Loading... | |
145 m_providerClientImpl->ReleaseLock(); | 157 m_providerClientImpl->ReleaseLock(); |
146 } | 158 } |
147 | 159 |
148 void VideoLayerImpl::willDrawInternal(ResourceProvider* resourceProvider) | 160 void VideoLayerImpl::willDrawInternal(ResourceProvider* resourceProvider) |
149 { | 161 { |
150 DCHECK(!m_externalTextureResource); | 162 DCHECK(!m_externalTextureResource); |
151 | 163 |
152 if (!m_frame) | 164 if (!m_frame) |
153 return; | 165 return; |
154 | 166 |
167 #if defined(GOOGLE_TV) | |
168 if (m_frame->format() == media::VideoFrame::HOLE) | |
169 return; | |
170 #endif | |
171 | |
155 m_format = convertVFCFormatToGLenum(*m_frame); | 172 m_format = convertVFCFormatToGLenum(*m_frame); |
156 | 173 |
157 // If these fail, we'll have to add draw logic that handles offset bitmap/ | 174 // If these fail, we'll have to add draw logic that handles offset bitmap/ |
158 // texture UVs. For now, just expect (0, 0) offset, since all our decoders | 175 // texture UVs. For now, just expect (0, 0) offset, since all our decoders |
159 // so far don't offset. | 176 // so far don't offset. |
160 DCHECK_EQ(m_frame->visible_rect().x(), 0); | 177 DCHECK_EQ(m_frame->visible_rect().x(), 0); |
161 DCHECK_EQ(m_frame->visible_rect().y(), 0); | 178 DCHECK_EQ(m_frame->visible_rect().y(), 0); |
162 | 179 |
163 if (m_format == GL_INVALID_VALUE) { | 180 if (m_format == GL_INVALID_VALUE) { |
164 m_providerClientImpl->PutCurrentFrame(m_frame); | 181 m_providerClientImpl->PutCurrentFrame(m_frame); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
208 gfx::Rect opaqueRect(contentsOpaque() ? quadRect : gfx::Rect()); | 225 gfx::Rect opaqueRect(contentsOpaque() ? quadRect : gfx::Rect()); |
209 gfx::Rect visibleRect = m_frame->visible_rect(); | 226 gfx::Rect visibleRect = m_frame->visible_rect(); |
210 gfx::Size codedSize = m_frame->coded_size(); | 227 gfx::Size codedSize = m_frame->coded_size(); |
211 | 228 |
212 // pixels for macroblocked formats. | 229 // pixels for macroblocked formats. |
213 const float texWidthScale = | 230 const float texWidthScale = |
214 static_cast<float>(visibleRect.width()) / codedSize.width(); | 231 static_cast<float>(visibleRect.width()) / codedSize.width(); |
215 const float texHeightScale = | 232 const float texHeightScale = |
216 static_cast<float>(visibleRect.height()) / codedSize.height(); | 233 static_cast<float>(visibleRect.height()) / codedSize.height(); |
217 | 234 |
235 #if defined(GOOGLE_TV) | |
jamesr
2013/02/26 07:31:35
please put a comment in here below the #if defined
wonsik2
2013/02/27 02:16:41
Done.
| |
236 if (m_frame->format() == media::VideoFrame::HOLE) { | |
237 scoped_ptr<SolidColorDrawQuad> solidColorDrawQuad = SolidColorDrawQuad:: Create(); | |
238 // Create a solid color quad with transparent black and force no | |
239 // blending. | |
240 solidColorDrawQuad->SetAll(sharedQuadState, quadRect, quadRect, quadRect , false, SK_ColorTRANSPARENT); | |
241 quadSink.append(solidColorDrawQuad.PassAs<DrawQuad>(), appendQuadsData); | |
242 return; | |
243 } | |
244 #endif | |
245 | |
218 switch (m_format) { | 246 switch (m_format) { |
219 case GL_LUMINANCE: { | 247 case GL_LUMINANCE: { |
220 // YUV software decoder. | 248 // YUV software decoder. |
221 const FramePlane& yPlane = m_framePlanes[media::VideoFrame::kYPlane]; | 249 const FramePlane& yPlane = m_framePlanes[media::VideoFrame::kYPlane]; |
222 const FramePlane& uPlane = m_framePlanes[media::VideoFrame::kUPlane]; | 250 const FramePlane& uPlane = m_framePlanes[media::VideoFrame::kUPlane]; |
223 const FramePlane& vPlane = m_framePlanes[media::VideoFrame::kVPlane]; | 251 const FramePlane& vPlane = m_framePlanes[media::VideoFrame::kVPlane]; |
224 gfx::SizeF texScale(texWidthScale, texHeightScale); | 252 gfx::SizeF texScale(texWidthScale, texHeightScale); |
225 scoped_ptr<YUVVideoDrawQuad> yuvVideoQuad = YUVVideoDrawQuad::Create(); | 253 scoped_ptr<YUVVideoDrawQuad> yuvVideoQuad = YUVVideoDrawQuad::Create(); |
226 yuvVideoQuad->SetNew(sharedQuadState, quadRect, opaqueRect, texScale, yP lane, uPlane, vPlane); | 254 yuvVideoQuad->SetNew(sharedQuadState, quadRect, opaqueRect, texScale, yP lane, uPlane, vPlane); |
227 quadSink.append(yuvVideoQuad.PassAs<DrawQuad>(), appendQuadsData); | 255 quadSink.append(yuvVideoQuad.PassAs<DrawQuad>(), appendQuadsData); |
(...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
417 { | 445 { |
418 m_providerClientImpl = providerClientImpl; | 446 m_providerClientImpl = providerClientImpl; |
419 } | 447 } |
420 | 448 |
421 const char* VideoLayerImpl::layerTypeAsString() const | 449 const char* VideoLayerImpl::layerTypeAsString() const |
422 { | 450 { |
423 return "VideoLayer"; | 451 return "VideoLayer"; |
424 } | 452 } |
425 | 453 |
426 } // namespace cc | 454 } // namespace cc |
OLD | NEW |