Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(21)

Side by Side Diff: cc/layers/video_layer_impl.cc

Issue 13445009: cc: Move video upload to VideoResourceUpdater. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: consume textures that were produced Created 7 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « cc/layers/video_layer_impl.h ('k') | cc/output/delegating_renderer_unittest.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 The Chromium Authors. All rights reserved. 1 // Copyright 2011 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "cc/layers/video_layer_impl.h" 5 #include "cc/layers/video_layer_impl.h"
6 6
7 #include "base/bind.h"
7 #include "base/logging.h" 8 #include "base/logging.h"
8 #include "cc/base/math_util.h"
9 #include "cc/layers/quad_sink.h" 9 #include "cc/layers/quad_sink.h"
10 #include "cc/layers/video_frame_provider_client_impl.h" 10 #include "cc/layers/video_frame_provider_client_impl.h"
11 #include "cc/output/renderer.h"
12 #include "cc/quads/io_surface_draw_quad.h" 11 #include "cc/quads/io_surface_draw_quad.h"
13 #include "cc/quads/stream_video_draw_quad.h" 12 #include "cc/quads/stream_video_draw_quad.h"
14 #include "cc/quads/texture_draw_quad.h" 13 #include "cc/quads/texture_draw_quad.h"
15 #include "cc/quads/yuv_video_draw_quad.h" 14 #include "cc/quads/yuv_video_draw_quad.h"
16 #include "cc/resources/resource_provider.h" 15 #include "cc/resources/resource_provider.h"
17 #include "cc/trees/layer_tree_impl.h" 16 #include "cc/trees/layer_tree_impl.h"
18 #include "gpu/GLES2/gl2extchromium.h" 17 #include "cc/trees/proxy.h"
19 #include "media/filters/skcanvas_video_renderer.h" 18 #include "media/base/video_frame.h"
20 #include "third_party/khronos/GLES2/gl2.h"
21 #include "third_party/khronos/GLES2/gl2ext.h"
22 19
23 #if defined(GOOGLE_TV) 20 #if defined(GOOGLE_TV)
24 #include "cc/quads/solid_color_draw_quad.h" 21 #include "cc/quads/solid_color_draw_quad.h"
25 #endif 22 #endif
26 23
27 namespace cc { 24 namespace cc {
28 25
29 // static 26 // static
30 scoped_ptr<VideoLayerImpl> VideoLayerImpl::Create( 27 scoped_ptr<VideoLayerImpl> VideoLayerImpl::Create(
31 LayerTreeImpl* tree_impl, 28 LayerTreeImpl* tree_impl,
32 int id, 29 int id,
33 VideoFrameProvider* provider) { 30 VideoFrameProvider* provider) {
34 scoped_ptr<VideoLayerImpl> layer(new VideoLayerImpl(tree_impl, id)); 31 scoped_ptr<VideoLayerImpl> layer(new VideoLayerImpl(tree_impl, id));
35 layer->SetProviderClientImpl(VideoFrameProviderClientImpl::Create(provider)); 32 layer->SetProviderClientImpl(VideoFrameProviderClientImpl::Create(provider));
36 DCHECK(tree_impl->proxy()->IsImplThread()); 33 DCHECK(tree_impl->proxy()->IsImplThread());
37 DCHECK(tree_impl->proxy()->IsMainThreadBlocked()); 34 DCHECK(tree_impl->proxy()->IsMainThreadBlocked());
38 return layer.Pass(); 35 return layer.Pass();
39 } 36 }
40 37
41 VideoLayerImpl::VideoLayerImpl(LayerTreeImpl* tree_impl, int id) 38 VideoLayerImpl::VideoLayerImpl(LayerTreeImpl* tree_impl, int id)
42 : LayerImpl(tree_impl, id), 39 : LayerImpl(tree_impl, id),
43 frame_(NULL), 40 frame_(NULL) {}
44 format_(media::VideoFrame::INVALID),
45 convert_yuv_(false),
46 external_texture_resource_(0) {}
47 41
48 VideoLayerImpl::~VideoLayerImpl() { 42 VideoLayerImpl::~VideoLayerImpl() {
49 if (!provider_client_impl_->Stopped()) { 43 if (!provider_client_impl_->Stopped()) {
50 // In impl side painting, we may have a pending and active layer 44 // In impl side painting, we may have a pending and active layer
51 // associated with the video provider at the same time. Both have a ref 45 // associated with the video provider at the same time. Both have a ref
52 // on the VideoFrameProviderClientImpl, but we stop when the first 46 // on the VideoFrameProviderClientImpl, but we stop when the first
53 // LayerImpl (the one on the pending tree) is destroyed since we know 47 // LayerImpl (the one on the pending tree) is destroyed since we know
54 // the main thread is blocked for this commit. 48 // the main thread is blocked for this commit.
55 DCHECK(layer_tree_impl()->proxy()->IsImplThread()); 49 DCHECK(layer_tree_impl()->proxy()->IsImplThread());
56 DCHECK(layer_tree_impl()->proxy()->IsMainThreadBlocked()); 50 DCHECK(layer_tree_impl()->proxy()->IsMainThreadBlocked());
57 provider_client_impl_->Stop(); 51 provider_client_impl_->Stop();
58 } 52 }
59 FreeFramePlanes(layer_tree_impl()->resource_provider());
60
61 #ifndef NDEBUG
62 for (size_t i = 0; i < media::VideoFrame::kMaxPlanes; ++i)
63 DCHECK(!frame_planes_[i].resource_id);
64 DCHECK(!external_texture_resource_);
65 #endif
66 } 53 }
67 54
68 scoped_ptr<LayerImpl> VideoLayerImpl::CreateLayerImpl( 55 scoped_ptr<LayerImpl> VideoLayerImpl::CreateLayerImpl(
69 LayerTreeImpl* tree_impl) { 56 LayerTreeImpl* tree_impl) {
70 return scoped_ptr<LayerImpl>(new VideoLayerImpl(tree_impl, id())); 57 return scoped_ptr<LayerImpl>(new VideoLayerImpl(tree_impl, id()));
71 } 58 }
72 59
73 void VideoLayerImpl::PushPropertiesTo(LayerImpl* layer) { 60 void VideoLayerImpl::PushPropertiesTo(LayerImpl* layer) {
74 LayerImpl::PushPropertiesTo(layer); 61 LayerImpl::PushPropertiesTo(layer);
75 62
76 VideoLayerImpl* other = static_cast<VideoLayerImpl*>(layer); 63 VideoLayerImpl* other = static_cast<VideoLayerImpl*>(layer);
77 other->SetProviderClientImpl(provider_client_impl_); 64 other->SetProviderClientImpl(provider_client_impl_);
78 } 65 }
79 66
80 void VideoLayerImpl::DidBecomeActive() { 67 void VideoLayerImpl::DidBecomeActive() {
81 provider_client_impl_->set_active_video_layer(this); 68 provider_client_impl_->set_active_video_layer(this);
82 } 69 }
83 70
71 static void EmptyCallback(unsigned sync_point) {}
72
84 void VideoLayerImpl::WillDraw(ResourceProvider* resource_provider) { 73 void VideoLayerImpl::WillDraw(ResourceProvider* resource_provider) {
85 LayerImpl::WillDraw(resource_provider); 74 LayerImpl::WillDraw(resource_provider);
86 75
87
88 // Explicitly acquire and release the provider mutex so it can be held from 76 // Explicitly acquire and release the provider mutex so it can be held from
89 // WillDraw to DidDraw. Since the compositor thread is in the middle of 77 // WillDraw to DidDraw. Since the compositor thread is in the middle of
90 // drawing, the layer will not be destroyed before DidDraw is called. 78 // drawing, the layer will not be destroyed before DidDraw is called.
91 // Therefore, the only thing that will prevent this lock from being released 79 // Therefore, the only thing that will prevent this lock from being released
92 // is the GPU process locking it. As the GPU process can't cause the 80 // is the GPU process locking it. As the GPU process can't cause the
93 // destruction of the provider (calling StopUsingProvider), holding this 81 // destruction of the provider (calling StopUsingProvider), holding this
94 // lock should not cause a deadlock. 82 // lock should not cause a deadlock.
95 frame_ = provider_client_impl_->AcquireLockAndCurrentFrame(); 83 frame_ = provider_client_impl_->AcquireLockAndCurrentFrame();
96 84
97 WillDrawInternal(resource_provider); 85 if (!frame_) {
98 FreeUnusedFramePlanes(resource_provider);
99
100 if (!frame_)
101 provider_client_impl_->ReleaseLock(); 86 provider_client_impl_->ReleaseLock();
102 }
103
104 void VideoLayerImpl::WillDrawInternal(ResourceProvider* resource_provider) {
105 DCHECK(!external_texture_resource_);
106
107 if (!frame_)
108 return;
109
110 format_ = frame_->format();
111
112 #if defined(GOOGLE_TV)
113 if (format_ == media::VideoFrame::HOLE)
114 return;
115 #endif
116
117 // If these fail, we'll have to add draw logic that handles offset bitmap/
118 // texture UVs. For now, just expect (0, 0) offset, since all our decoders
119 // so far don't offset.
120 DCHECK_EQ(frame_->visible_rect().x(), 0);
121 DCHECK_EQ(frame_->visible_rect().y(), 0);
122
123 if (format_ == media::VideoFrame::INVALID) {
124 provider_client_impl_->PutCurrentFrame(frame_);
125 frame_ = NULL;
126 return; 87 return;
127 } 88 }
128 89
129 // TODO(skaslev): If we're in software compositing mode, we do the YUV -> RGB 90 if (!updater_)
130 // conversion here. That involves an extra copy of each frame to a bitmap. 91 updater_.reset(new VideoResourceUpdater(resource_provider));
131 // Obviously, this is suboptimal and should be addressed once ubercompositor
132 // starts shaping up.
133 convert_yuv_ =
134 resource_provider->default_resource_type() == ResourceProvider::Bitmap &&
135 (format_ == media::VideoFrame::YV12 ||
136 format_ == media::VideoFrame::YV16);
137 92
138 if (convert_yuv_) 93 VideoFrameExternalResources external_resources;
139 format_ = media::VideoFrame::RGB32; 94 if (frame_->format() == media::VideoFrame::NATIVE_TEXTURE) {
95 // TODO(danakj): To make this work for ubercomp, push this code out to
96 // WebMediaPlayer and have it set a callback so it knows it can reuse the
97 // texture.
98 TextureMailbox::ReleaseCallback empty_callback = base::Bind(&EmptyCallback);
99 external_resources = updater_->CreateForHardwarePlanes(
100 frame_, empty_callback);
101 } else {
102 external_resources = updater_->CreateForSoftwarePlanes(frame_);
103 }
140 104
141 if (!SetupFramePlanes(resource_provider)) { 105 frame_resource_type_ = external_resources.type;
142 provider_client_impl_->PutCurrentFrame(frame_); 106
143 frame_ = NULL; 107 if (external_resources.type ==
108 VideoFrameExternalResources::SOFTWARE_RESOURCE) {
109 software_resources_ = external_resources.software_resources;
110 software_release_callback_ =
111 external_resources.software_release_callback;
144 return; 112 return;
145 } 113 }
146 114
147 if (format_ == media::VideoFrame::NATIVE_TEXTURE && 115 for (size_t i = 0; i < external_resources.mailboxes.size(); ++i) {
148 frame_->texture_target() == GL_TEXTURE_2D) { 116 frame_resources_.push_back(
149 external_texture_resource_ = 117 resource_provider->CreateResourceFromTextureMailbox(
150 resource_provider->CreateResourceFromExternalTexture( 118 external_resources.mailboxes[i]));
151 frame_->texture_id());
152 } 119 }
153 } 120 }
154 121
155 void VideoLayerImpl::AppendQuads(QuadSink* quad_sink, 122 void VideoLayerImpl::AppendQuads(QuadSink* quad_sink,
156 AppendQuadsData* append_quads_data) { 123 AppendQuadsData* append_quads_data) {
157 if (!frame_) 124 if (!frame_)
158 return; 125 return;
159 126
160 SharedQuadState* shared_quad_state = 127 SharedQuadState* shared_quad_state =
161 quad_sink->UseSharedQuadState(CreateSharedQuadState()); 128 quad_sink->UseSharedQuadState(CreateSharedQuadState());
162 AppendDebugBorderQuad(quad_sink, shared_quad_state, append_quads_data); 129 AppendDebugBorderQuad(quad_sink, shared_quad_state, append_quads_data);
163 130
164 // TODO(danakj): When we pass quads out of process, we need to double-buffer,
165 // or otherwise synchonize use of all textures in the quad.
166
167 gfx::Rect quad_rect(content_bounds()); 131 gfx::Rect quad_rect(content_bounds());
168 gfx::Rect opaque_rect(contents_opaque() ? quad_rect : gfx::Rect()); 132 gfx::Rect opaque_rect(contents_opaque() ? quad_rect : gfx::Rect());
169 gfx::Rect visible_rect = frame_->visible_rect(); 133 gfx::Rect visible_rect = frame_->visible_rect();
170 gfx::Size coded_size = frame_->coded_size(); 134 gfx::Size coded_size = frame_->coded_size();
171 135
172 // pixels for macroblocked formats. 136 // Pixels for macroblocked formats.
173 float tex_width_scale = 137 float tex_width_scale =
174 static_cast<float>(visible_rect.width()) / coded_size.width(); 138 static_cast<float>(visible_rect.width()) / coded_size.width();
175 float tex_height_scale = 139 float tex_height_scale =
176 static_cast<float>(visible_rect.height()) / coded_size.height(); 140 static_cast<float>(visible_rect.height()) / coded_size.height();
177 141
178 #if defined(GOOGLE_TV) 142 switch (frame_resource_type_) {
179 // This block and other blocks wrapped around #if defined(GOOGLE_TV) is not 143 // TODO(danakj): Remove this, hide it in the hardware path.
180 // maintained by the general compositor team. Please contact the following 144 case VideoFrameExternalResources::SOFTWARE_RESOURCE: {
181 // people instead: 145 DCHECK_EQ(frame_resources_.size(), 0u);
182 // 146 DCHECK_EQ(software_resources_.size(), 1u);
183 // wonsik@chromium.org 147 if (software_resources_.size() < 1u)
184 // ycheo@chromium.org 148 break;
185 149 bool premultiplied_alpha = true;
186 if (frame_->format() == media::VideoFrame::HOLE) { 150 gfx::PointF uv_top_left(0.f, 0.f);
187 scoped_ptr<SolidColorDrawQuad> solid_color_draw_quad = 151 gfx::PointF uv_bottom_right(tex_width_scale, tex_height_scale);
188 SolidColorDrawQuad::Create(); 152 float opacity[] = {1.0f, 1.0f, 1.0f, 1.0f};
189 // Create a solid color quad with transparent black and force no 153 bool flipped = false;
190 // blending. 154 scoped_ptr<TextureDrawQuad> texture_quad = TextureDrawQuad::Create();
191 solid_color_draw_quad->SetAll( 155 texture_quad->SetNew(shared_quad_state,
192 shared_quad_state, quad_rect, quad_rect, quad_rect, false, 156 quad_rect,
193 SK_ColorTRANSPARENT); 157 opaque_rect,
194 quad_sink->Append(solid_color_draw_quad.PassAs<DrawQuad>(), 158 software_resources_[0],
195 append_quads_data); 159 premultiplied_alpha,
196 return; 160 uv_top_left,
197 } 161 uv_bottom_right,
198 #endif 162 opacity,
199 163 flipped);
200 switch (format_) { 164 quad_sink->Append(texture_quad.PassAs<DrawQuad>(), append_quads_data);
201 case media::VideoFrame::YV12: 165 break;
202 case media::VideoFrame::YV16: { 166 }
203 // YUV software decoder. 167 case VideoFrameExternalResources::YUV_RESOURCE: {
204 const FramePlane& y_plane = frame_planes_[media::VideoFrame::kYPlane]; 168 DCHECK_EQ(frame_resources_.size(), 3u);
205 const FramePlane& u_plane = frame_planes_[media::VideoFrame::kUPlane]; 169 if (frame_resources_.size() < 3u)
206 const FramePlane& v_plane = frame_planes_[media::VideoFrame::kVPlane]; 170 break;
207 gfx::SizeF tex_scale(tex_width_scale, tex_height_scale); 171 gfx::SizeF tex_scale(tex_width_scale, tex_height_scale);
208 scoped_ptr<YUVVideoDrawQuad> yuv_video_quad = YUVVideoDrawQuad::Create(); 172 scoped_ptr<YUVVideoDrawQuad> yuv_video_quad = YUVVideoDrawQuad::Create();
209 yuv_video_quad->SetNew(shared_quad_state, 173 yuv_video_quad->SetNew(shared_quad_state,
210 quad_rect, 174 quad_rect,
211 opaque_rect, 175 opaque_rect,
212 tex_scale, 176 tex_scale,
213 y_plane, 177 frame_resources_[0],
214 u_plane, 178 frame_resources_[1],
215 v_plane); 179 frame_resources_[2]);
216 quad_sink->Append(yuv_video_quad.PassAs<DrawQuad>(), append_quads_data); 180 quad_sink->Append(yuv_video_quad.PassAs<DrawQuad>(), append_quads_data);
217 break; 181 break;
218 } 182 }
219 case media::VideoFrame::RGB32: { 183 case VideoFrameExternalResources::RGB_RESOURCE: {
220 // RGBA software decoder: a converted YUV frame (see: convert_yuv_). 184 DCHECK_EQ(frame_resources_.size(), 1u);
221 const FramePlane& plane = frame_planes_[media::VideoFrame::kRGBPlane]; 185 if (frame_resources_.size() < 1u)
186 break;
222 bool premultiplied_alpha = true; 187 bool premultiplied_alpha = true;
223 gfx::PointF uv_top_left(0.f, 0.f); 188 gfx::PointF uv_top_left(0.f, 0.f);
224 gfx::PointF uv_bottom_right(tex_width_scale, tex_height_scale); 189 gfx::PointF uv_bottom_right(tex_width_scale, tex_height_scale);
225 float opacity[] = {1.0f, 1.0f, 1.0f, 1.0f}; 190 float opacity[] = {1.0f, 1.0f, 1.0f, 1.0f};
226 bool flipped = false; 191 bool flipped = false;
227 scoped_ptr<TextureDrawQuad> texture_quad = TextureDrawQuad::Create(); 192 scoped_ptr<TextureDrawQuad> texture_quad = TextureDrawQuad::Create();
228 texture_quad->SetNew(shared_quad_state, 193 texture_quad->SetNew(shared_quad_state,
229 quad_rect, 194 quad_rect,
230 opaque_rect, 195 opaque_rect,
231 plane.resource_id, 196 frame_resources_[0],
232 premultiplied_alpha, 197 premultiplied_alpha,
233 uv_top_left, 198 uv_top_left,
234 uv_bottom_right, 199 uv_bottom_right,
235 opacity, 200 opacity,
236 flipped); 201 flipped);
237 quad_sink->Append(texture_quad.PassAs<DrawQuad>(), append_quads_data); 202 quad_sink->Append(texture_quad.PassAs<DrawQuad>(), append_quads_data);
238 break; 203 break;
239 } 204 }
240 case media::VideoFrame::NATIVE_TEXTURE: 205 case VideoFrameExternalResources::STREAM_TEXTURE_RESOURCE: {
241 switch (frame_->texture_target()) { 206 DCHECK_EQ(frame_resources_.size(), 1u);
242 case GL_TEXTURE_2D: { 207 if (frame_resources_.size() < 1u)
243 // NativeTexture hardware decoder. 208 break;
244 bool premultiplied_alpha = true; 209 gfx::Transform transform(
245 gfx::PointF uv_top_left(0.f, 0.f); 210 provider_client_impl_->stream_texture_matrix());
246 gfx::PointF uv_bottom_right(tex_width_scale, tex_height_scale); 211 transform.Scale(tex_width_scale, tex_height_scale);
247 float opacity[] = {1.0f, 1.0f, 1.0f, 1.0f}; 212 scoped_ptr<StreamVideoDrawQuad> stream_video_quad =
248 bool flipped = false; 213 StreamVideoDrawQuad::Create();
249 scoped_ptr<TextureDrawQuad> texture_quad = TextureDrawQuad::Create(); 214 stream_video_quad->SetNew(shared_quad_state,
250 texture_quad->SetNew(shared_quad_state, 215 quad_rect,
251 quad_rect, 216 opaque_rect,
252 opaque_rect, 217 frame_resources_[0],
253 external_texture_resource_, 218 transform);
254 premultiplied_alpha, 219 quad_sink->Append(stream_video_quad.PassAs<DrawQuad>(),
255 uv_top_left, 220 append_quads_data);
256 uv_bottom_right,
257 opacity,
258 flipped);
259 quad_sink->Append(texture_quad.PassAs<DrawQuad>(), append_quads_data);
260 break;
261 }
262 case GL_TEXTURE_RECTANGLE_ARB: {
263 gfx::Size visible_size(visible_rect.width(), visible_rect.height());
264 scoped_ptr<IOSurfaceDrawQuad> io_surface_quad =
265 IOSurfaceDrawQuad::Create();
266 io_surface_quad->SetNew(shared_quad_state,
267 quad_rect,
268 opaque_rect,
269 visible_size,
270 frame_->texture_id(),
271 IOSurfaceDrawQuad::UNFLIPPED);
272 quad_sink->Append(io_surface_quad.PassAs<DrawQuad>(),
273 append_quads_data);
274 break;
275 }
276 case GL_TEXTURE_EXTERNAL_OES: {
277 // StreamTexture hardware decoder.
278 gfx::Transform transform(
279 provider_client_impl_->stream_texture_matrix());
280 transform.Scale(tex_width_scale, tex_height_scale);
281 scoped_ptr<StreamVideoDrawQuad> stream_video_quad =
282 StreamVideoDrawQuad::Create();
283 stream_video_quad->SetNew(shared_quad_state,
284 quad_rect,
285 opaque_rect,
286 frame_->texture_id(),
287 transform);
288 quad_sink->Append(stream_video_quad.PassAs<DrawQuad>(),
289 append_quads_data);
290 break;
291 }
292 default:
293 NOTREACHED();
294 break;
295 }
296 break; 221 break;
297 case media::VideoFrame::INVALID: 222 }
298 case media::VideoFrame::EMPTY: 223 case VideoFrameExternalResources::IO_SURFACE: {
299 case media::VideoFrame::I420: 224 DCHECK_EQ(frame_resources_.size(), 1u);
225 if (frame_resources_.size() < 1u)
226 break;
227 gfx::Size visible_size(visible_rect.width(), visible_rect.height());
228 scoped_ptr<IOSurfaceDrawQuad> io_surface_quad =
229 IOSurfaceDrawQuad::Create();
230 io_surface_quad->SetNew(shared_quad_state,
231 quad_rect,
232 opaque_rect,
233 visible_size,
234 frame_resources_[0],
235 IOSurfaceDrawQuad::UNFLIPPED);
236 quad_sink->Append(io_surface_quad.PassAs<DrawQuad>(),
237 append_quads_data);
238 break;
239 }
300 #if defined(GOOGLE_TV) 240 #if defined(GOOGLE_TV)
301 case media::VideoFrame::HOLE: 241 // This block and other blocks wrapped around #if defined(GOOGLE_TV) is not
242 // maintained by the general compositor team. Please contact the following
243 // people instead:
244 //
245 // wonsik@chromium.org
246 // ycheo@chromium.org
247 case VideoFrameExternalResources::HOLE: {
248 DCHECK_EQ(frame_resources_.size(), 0u);
249 scoped_ptr<SolidColorDrawQuad> solid_color_draw_quad =
250 SolidColorDrawQuad::Create();
251 // Create a solid color quad with transparent black and force no
252 // blending.
253 solid_color_draw_quad->SetAll(
254 shared_quad_state, quad_rect, quad_rect, quad_rect, false,
255 SK_ColorTRANSPARENT);
256 quad_sink->Append(solid_color_draw_quad.PassAs<DrawQuad>(),
257 append_quads_data);
258 break;
259 }
302 #endif 260 #endif
303 NOTREACHED(); 261 case VideoFrameExternalResources::NONE:
262 NOTIMPLEMENTED();
304 break; 263 break;
305 } 264 }
306 } 265 }
307 266
308 void VideoLayerImpl::DidDraw(ResourceProvider* resource_provider) { 267 void VideoLayerImpl::DidDraw(ResourceProvider* resource_provider) {
309 LayerImpl::DidDraw(resource_provider); 268 LayerImpl::DidDraw(resource_provider);
310 269
311 if (!frame_) 270 if (!frame_)
312 return; 271 return;
313 272
314 if (format_ == media::VideoFrame::NATIVE_TEXTURE && 273 if (frame_resource_type_ ==
315 frame_->texture_target() == GL_TEXTURE_2D) { 274 VideoFrameExternalResources::SOFTWARE_RESOURCE) {
316 DCHECK(external_texture_resource_); 275 for (size_t i = 0; i < software_resources_.size(); ++i)
317 // TODO(danakj): the following assert will not be true when sending 276 software_release_callback_.Run(0);
318 // resources to a parent compositor. We will probably need to hold on to 277
319 // frame_ for longer, and have several "current frames" in the pipeline. 278 software_resources_.clear();
320 DCHECK(!resource_provider->InUseByConsumer(external_texture_resource_)); 279 software_release_callback_.Reset();
321 resource_provider->DeleteResource(external_texture_resource_); 280 } else {
322 external_texture_resource_ = 0; 281 for (size_t i = 0; i < frame_resources_.size(); ++i)
282 resource_provider->DeleteResource(frame_resources_[i]);
283 frame_resources_.clear();
323 } 284 }
324 285
325 provider_client_impl_->PutCurrentFrame(frame_); 286 provider_client_impl_->PutCurrentFrame(frame_);
326 frame_ = NULL; 287 frame_ = NULL;
327 288
328 provider_client_impl_->ReleaseLock(); 289 provider_client_impl_->ReleaseLock();
329 } 290 }
330 291
331 static gfx::Size VideoFrameDimension(media::VideoFrame* frame, int plane) {
332 gfx::Size dimensions = frame->coded_size();
333 switch (frame->format()) {
334 case media::VideoFrame::YV12:
335 if (plane != media::VideoFrame::kYPlane) {
336 dimensions.set_width(dimensions.width() / 2);
337 dimensions.set_height(dimensions.height() / 2);
338 }
339 break;
340 case media::VideoFrame::YV16:
341 if (plane != media::VideoFrame::kYPlane)
342 dimensions.set_width(dimensions.width() / 2);
343 break;
344 default:
345 break;
346 }
347 return dimensions;
348 }
349
350 bool VideoLayerImpl::FramePlane::AllocateData(
351 ResourceProvider* resource_provider) {
352 if (resource_id)
353 return true;
354
355 resource_id = resource_provider->CreateResource(
356 size, format, ResourceProvider::TextureUsageAny);
357 return resource_id != 0;
358 }
359
360 void VideoLayerImpl::FramePlane::FreeData(ResourceProvider* resource_provider) {
361 if (!resource_id)
362 return;
363
364 resource_provider->DeleteResource(resource_id);
365 resource_id = 0;
366 }
367
368 // Convert media::VideoFrame::Format to OpenGL enum values.
369 static GLenum ConvertVFCFormatToGLenum(const media::VideoFrame::Format format) {
370 switch (format) {
371 case media::VideoFrame::YV12:
372 case media::VideoFrame::YV16:
373 return GL_LUMINANCE;
374 case media::VideoFrame::RGB32:
375 return GL_RGBA;
376 case media::VideoFrame::NATIVE_TEXTURE:
377 #if defined(GOOGLE_TV)
378 case media::VideoFrame::HOLE:
379 #endif
380 case media::VideoFrame::INVALID:
381 case media::VideoFrame::EMPTY:
382 case media::VideoFrame::I420:
383 NOTREACHED();
384 break;
385 }
386 return GL_INVALID_VALUE;
387 }
388
389 bool VideoLayerImpl::SetupFramePlanes(ResourceProvider* resource_provider) {
390 const size_t plane_count = media::VideoFrame::NumPlanes(format_);
391 if (!plane_count)
392 return true;
393
394 const int max_texture_size = resource_provider->max_texture_size();
395 const GLenum pixel_format = ConvertVFCFormatToGLenum(format_);
396 for (size_t plane_index = 0; plane_index < plane_count; ++plane_index) {
397 VideoLayerImpl::FramePlane* plane = &frame_planes_[plane_index];
398
399 gfx::Size required_texture_size = VideoFrameDimension(frame_, plane_index);
400 // TODO(danakj): Remove the test against max_texture_size when tiled layers
401 // are implemented.
402 if (required_texture_size.IsEmpty() ||
403 required_texture_size.width() > max_texture_size ||
404 required_texture_size.height() > max_texture_size)
405 return false;
406
407 if (plane->size != required_texture_size || plane->format != pixel_format) {
408 plane->FreeData(resource_provider);
409 plane->size = required_texture_size;
410 plane->format = pixel_format;
411 }
412
413 if (!plane->AllocateData(resource_provider))
414 return false;
415 }
416
417 if (convert_yuv_) {
418 if (!video_renderer_)
419 video_renderer_.reset(new media::SkCanvasVideoRenderer);
420 const VideoLayerImpl::FramePlane& plane =
421 frame_planes_[media::VideoFrame::kRGBPlane];
422 ResourceProvider::ScopedWriteLockSoftware lock(resource_provider,
423 plane.resource_id);
424 video_renderer_->Paint(frame_,
425 lock.sk_canvas(),
426 frame_->visible_rect(),
427 0xff);
428 return true;
429 }
430
431 for (size_t plane_index = 0; plane_index < plane_count; ++plane_index) {
432 const VideoLayerImpl::FramePlane& plane = frame_planes_[plane_index];
433 // Only planar formats planes should need upload.
434 DCHECK_EQ(plane.format, static_cast<unsigned>(GL_LUMINANCE));
435 const uint8_t* software_plane_pixels = frame_->data(plane_index);
436 gfx::Rect image_rect(0,
437 0,
438 frame_->stride(plane_index),
439 plane.size.height());
440 gfx::Rect source_rect(plane.size);
441 resource_provider->SetPixels(plane.resource_id,
442 software_plane_pixels,
443 image_rect,
444 source_rect,
445 gfx::Vector2d());
446 }
447 return true;
448 }
449
450 void VideoLayerImpl::FreeFramePlanes(ResourceProvider* resource_provider) {
451 for (size_t i = 0; i < media::VideoFrame::kMaxPlanes; ++i)
452 frame_planes_[i].FreeData(resource_provider);
453 }
454
455 void VideoLayerImpl::FreeUnusedFramePlanes(
456 ResourceProvider* resource_provider) {
457 size_t first_unused_plane = (frame_ ? media::VideoFrame::NumPlanes(format_)
458 : 0);
459 for (size_t i = first_unused_plane; i < media::VideoFrame::kMaxPlanes; ++i)
460 frame_planes_[i].FreeData(resource_provider);
461 }
462
463 void VideoLayerImpl::DidLoseOutputSurface() { 292 void VideoLayerImpl::DidLoseOutputSurface() {
464 FreeFramePlanes(layer_tree_impl()->resource_provider()); 293 updater_.reset();
465 } 294 }
466 295
467 void VideoLayerImpl::SetNeedsRedraw() { 296 void VideoLayerImpl::SetNeedsRedraw() {
468 set_update_rect(gfx::UnionRects(update_rect(), gfx::RectF(bounds()))); 297 set_update_rect(gfx::UnionRects(update_rect(), gfx::RectF(bounds())));
469 layer_tree_impl()->SetNeedsRedraw(); 298 layer_tree_impl()->SetNeedsRedraw();
470 } 299 }
471 300
472 void VideoLayerImpl::SetProviderClientImpl( 301 void VideoLayerImpl::SetProviderClientImpl(
473 scoped_refptr<VideoFrameProviderClientImpl> provider_client_impl) { 302 scoped_refptr<VideoFrameProviderClientImpl> provider_client_impl) {
474 provider_client_impl_ = provider_client_impl; 303 provider_client_impl_ = provider_client_impl;
475 } 304 }
476 305
477 const char* VideoLayerImpl::LayerTypeAsString() const { 306 const char* VideoLayerImpl::LayerTypeAsString() const {
478 return "VideoLayer"; 307 return "VideoLayer";
479 } 308 }
480 309
481 } // namespace cc 310 } // namespace cc
OLDNEW
« no previous file with comments | « cc/layers/video_layer_impl.h ('k') | cc/output/delegating_renderer_unittest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698