OLD | NEW |
---|---|
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. |
watk
2015/08/28 21:52:05
s/2013/2015
liberato (no reviews please)
2015/09/04 17:59:47
Done.
| |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/common/gpu/media/android_video_decode_accelerator.h" | 5 #include "content/common/gpu/media/android_video_decode_accelerator_base.h" |
6 | 6 |
7 #include "base/bind.h" | 7 #include "base/bind.h" |
8 #include "base/logging.h" | 8 #include "base/logging.h" |
9 #include "base/message_loop/message_loop.h" | 9 #include "base/message_loop/message_loop.h" |
10 #include "base/metrics/histogram.h" | 10 #include "base/metrics/histogram.h" |
11 #include "content/common/gpu/gpu_channel.h" | 11 #include "content/common/gpu/gpu_channel.h" |
12 #include "gpu/command_buffer/service/gles2_cmd_decoder.h" | 12 #include "gpu/command_buffer/service/gles2_cmd_decoder.h" |
13 #include "media/base/bitstream_buffer.h" | 13 #include "media/base/bitstream_buffer.h" |
14 #include "media/base/limits.h" | 14 #include "media/base/limits.h" |
15 #include "media/base/video_decoder_config.h" | 15 #include "media/base/video_decoder_config.h" |
16 #include "media/video/picture.h" | 16 #include "media/video/picture.h" |
17 #include "ui/gl/android/scoped_java_surface.h" | 17 #include "ui/gl/android/scoped_java_surface.h" |
18 #include "ui/gl/android/surface_texture.h" | 18 #include "ui/gl/android/surface_texture.h" |
19 #include "ui/gl/gl_bindings.h" | 19 #include "ui/gl/gl_bindings.h" |
20 | 20 |
21 namespace content { | 21 namespace content { |
22 | 22 |
23 // Helper macros for dealing with failure. If |result| evaluates false, emit | 23 // Helper macros for dealing with failure. If |result| evaluates false, emit |
24 // |log| to ERROR, register |error| with the decoder, and return. | 24 // |log| to ERROR, register |error| with the decoder, and return. |
25 #define RETURN_ON_FAILURE(result, log, error) \ | 25 #define RETURN_ON_FAILURE(result, log, error) \ |
26 do { \ | 26 do { \ |
27 if (!(result)) { \ | 27 if (!(result)) { \ |
28 DLOG(ERROR) << log; \ | 28 DLOG(ERROR) << log; \ |
29 base::MessageLoop::current()->PostTask( \ | 29 base::MessageLoop::current()->PostTask( \ |
30 FROM_HERE, \ | 30 FROM_HERE, \ |
31 base::Bind(&AndroidVideoDecodeAccelerator::NotifyError, \ | 31 base::Bind(&AndroidVideoDecodeAcceleratorBase::NotifyError, \ |
32 weak_this_factory_.GetWeakPtr(), \ | 32 weak_this_factory_.GetWeakPtr(), \ |
33 error)); \ | 33 error)); \ |
34 state_ = ERROR; \ | 34 state_ = ERROR; \ |
35 return; \ | 35 return; \ |
36 } \ | 36 } \ |
37 } while (0) | 37 } while (0) |
38 | 38 |
39 // TODO(dwkang): We only need kMaxVideoFrames to pass media stack's prerolling | |
40 // phase, but 1 is added due to crbug.com/176036. This should be tuned when we | |
41 // have actual use case. | |
42 enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 }; | |
43 | |
44 // Max number of bitstreams notified to the client with | 39 // Max number of bitstreams notified to the client with |
45 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream. | 40 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream. |
46 enum { kMaxBitstreamsNotifiedInAdvance = 32 }; | 41 enum { kMaxBitstreamsNotifiedInAdvance = 32 }; |
47 | 42 |
48 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID) | 43 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID) |
49 // MediaCodec is only guaranteed to support baseline, but some devices may | 44 // MediaCodec is only guaranteed to support baseline, but some devices may |
50 // support others. Advertise support for all H264 profiles and let the | 45 // support others. Advertise support for all H264 profiles and let the |
51 // MediaCodec fail when decoding if it's not actually supported. It's assumed | 46 // MediaCodec fail when decoding if it's not actually supported. It's assumed |
52 // that consumers won't have software fallback for H264 on Android anyway. | 47 // that consumers won't have software fallback for H264 on Android anyway. |
53 static const media::VideoCodecProfile kSupportedH264Profiles[] = { | 48 static const media::VideoCodecProfile kSupportedH264Profiles[] = { |
(...skipping 26 matching lines...) Expand all Loading... | |
80 // pictures have been fed to saturate any internal buffering). This is | 75 // pictures have been fed to saturate any internal buffering). This is |
81 // speculative and it's unclear that this would be a win (nor that there's a | 76 // speculative and it's unclear that this would be a win (nor that there's a |
82 // reasonably device-agnostic way to fill in the "believes" above). | 77 // reasonably device-agnostic way to fill in the "believes" above). |
83 return base::TimeDelta::FromMilliseconds(10); | 78 return base::TimeDelta::FromMilliseconds(10); |
84 } | 79 } |
85 | 80 |
86 static inline const base::TimeDelta NoWaitTimeOut() { | 81 static inline const base::TimeDelta NoWaitTimeOut() { |
87 return base::TimeDelta::FromMicroseconds(0); | 82 return base::TimeDelta::FromMicroseconds(0); |
88 } | 83 } |
89 | 84 |
90 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator( | 85 AndroidVideoDecodeAcceleratorBase::AndroidVideoDecodeAcceleratorBase( |
91 const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder, | 86 const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder, |
92 const base::Callback<bool(void)>& make_context_current) | 87 const base::Callback<bool(void)>& make_context_current) |
93 : client_(NULL), | 88 : client_(NULL), |
94 make_context_current_(make_context_current), | 89 make_context_current_(make_context_current), |
95 codec_(media::kCodecH264), | 90 codec_(media::kCodecH264), |
96 state_(NO_ERROR), | 91 state_(NO_ERROR), |
97 surface_texture_id_(0), | 92 surface_texture_id_(0), |
98 picturebuffers_requested_(false), | 93 picturebuffers_requested_(false), |
99 gl_decoder_(decoder), | 94 gl_decoder_(decoder), |
100 weak_this_factory_(this) {} | 95 weak_this_factory_(this) {} |
101 | 96 |
102 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() { | 97 AndroidVideoDecodeAcceleratorBase::~AndroidVideoDecodeAcceleratorBase() { |
103 DCHECK(thread_checker_.CalledOnValidThread()); | 98 DCHECK(thread_checker_.CalledOnValidThread()); |
104 } | 99 } |
105 | 100 |
106 bool AndroidVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile, | 101 bool AndroidVideoDecodeAcceleratorBase::Initialize( |
107 Client* client) { | 102 media::VideoCodecProfile profile, |
103 Client* client) { | |
108 DCHECK(!media_codec_); | 104 DCHECK(!media_codec_); |
109 DCHECK(thread_checker_.CalledOnValidThread()); | 105 DCHECK(thread_checker_.CalledOnValidThread()); |
110 | 106 |
111 client_ = client; | 107 client_ = client; |
112 codec_ = VideoCodecProfileToVideoCodec(profile); | 108 codec_ = VideoCodecProfileToVideoCodec(profile); |
113 | 109 |
114 bool profile_supported = codec_ == media::kCodecVP8; | 110 bool profile_supported = codec_ == media::kCodecVP8; |
115 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID) | 111 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID) |
116 profile_supported |= | 112 profile_supported |= |
117 (codec_ == media::kCodecVP9 || codec_ == media::kCodecH264); | 113 (codec_ == media::kCodecVP9 || codec_ == media::kCodecH264); |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
157 surface_texture_ = gfx::SurfaceTexture::Create(surface_texture_id_); | 153 surface_texture_ = gfx::SurfaceTexture::Create(surface_texture_id_); |
158 | 154 |
159 if (!ConfigureMediaCodec()) { | 155 if (!ConfigureMediaCodec()) { |
160 LOG(ERROR) << "Failed to create MediaCodec instance."; | 156 LOG(ERROR) << "Failed to create MediaCodec instance."; |
161 return false; | 157 return false; |
162 } | 158 } |
163 | 159 |
164 return true; | 160 return true; |
165 } | 161 } |
166 | 162 |
167 void AndroidVideoDecodeAccelerator::DoIOTask() { | 163 void AndroidVideoDecodeAcceleratorBase::DoIOTask() { |
168 DCHECK(thread_checker_.CalledOnValidThread()); | 164 DCHECK(thread_checker_.CalledOnValidThread()); |
169 if (state_ == ERROR) { | 165 if (state_ == ERROR) { |
170 return; | 166 return; |
171 } | 167 } |
172 | 168 |
173 QueueInput(); | 169 QueueInput(); |
174 DequeueOutput(); | 170 DequeueOutput(); |
175 } | 171 } |
176 | 172 |
177 void AndroidVideoDecodeAccelerator::QueueInput() { | 173 void AndroidVideoDecodeAcceleratorBase::QueueInput() { |
178 DCHECK(thread_checker_.CalledOnValidThread()); | 174 DCHECK(thread_checker_.CalledOnValidThread()); |
179 if (bitstreams_notified_in_advance_.size() > kMaxBitstreamsNotifiedInAdvance) | 175 if (bitstreams_notified_in_advance_.size() > kMaxBitstreamsNotifiedInAdvance) |
180 return; | 176 return; |
181 if (pending_bitstream_buffers_.empty()) | 177 if (pending_bitstream_buffers_.empty()) |
182 return; | 178 return; |
183 | 179 |
184 int input_buf_index = 0; | 180 int input_buf_index = 0; |
185 media::MediaCodecStatus status = media_codec_->DequeueInputBuffer( | 181 media::MediaCodecStatus status = media_codec_->DequeueInputBuffer( |
186 NoWaitTimeOut(), &input_buf_index); | 182 NoWaitTimeOut(), &input_buf_index); |
187 if (status != media::MEDIA_CODEC_OK) { | 183 if (status != media::MEDIA_CODEC_OK) { |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
226 | 222 |
227 // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output | 223 // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output |
228 // will be returned from the bitstream buffer. However, MediaCodec API is | 224 // will be returned from the bitstream buffer. However, MediaCodec API is |
229 // not enough to guarantee it. | 225 // not enough to guarantee it. |
230 // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to | 226 // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to |
231 // keep getting more bitstreams from the client, and throttle them by using | 227 // keep getting more bitstreams from the client, and throttle them by using |
232 // |bitstreams_notified_in_advance_|. | 228 // |bitstreams_notified_in_advance_|. |
233 // TODO(dwkang): check if there is a way to remove this workaround. | 229 // TODO(dwkang): check if there is a way to remove this workaround. |
234 base::MessageLoop::current()->PostTask( | 230 base::MessageLoop::current()->PostTask( |
235 FROM_HERE, | 231 FROM_HERE, |
236 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | 232 base::Bind(&AndroidVideoDecodeAcceleratorBase::NotifyEndOfBitstreamBuffer, |
237 weak_this_factory_.GetWeakPtr(), | 233 weak_this_factory_.GetWeakPtr(), |
238 bitstream_buffer.id())); | 234 bitstream_buffer.id())); |
239 bitstreams_notified_in_advance_.push_back(bitstream_buffer.id()); | 235 bitstreams_notified_in_advance_.push_back(bitstream_buffer.id()); |
240 } | 236 } |
241 | 237 |
242 void AndroidVideoDecodeAccelerator::DequeueOutput() { | 238 void AndroidVideoDecodeAcceleratorBase::DequeueOutput() { |
243 DCHECK(thread_checker_.CalledOnValidThread()); | 239 DCHECK(thread_checker_.CalledOnValidThread()); |
244 if (picturebuffers_requested_ && output_picture_buffers_.empty()) | 240 if (picturebuffers_requested_ && output_picture_buffers_.empty()) |
245 return; | 241 return; |
246 | 242 |
247 if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) { | 243 if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) { |
248 // Don't have any picture buffer to send. Need to wait more. | 244 // Don't have any picture buffer to send. Need to wait more. |
249 return; | 245 return; |
250 } | 246 } |
251 | 247 |
252 bool eos = false; | 248 bool eos = false; |
(...skipping 12 matching lines...) Expand all Loading... | |
265 | 261 |
266 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: { | 262 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: { |
267 int32 width, height; | 263 int32 width, height; |
268 media_codec_->GetOutputFormat(&width, &height); | 264 media_codec_->GetOutputFormat(&width, &height); |
269 | 265 |
270 if (!picturebuffers_requested_) { | 266 if (!picturebuffers_requested_) { |
271 picturebuffers_requested_ = true; | 267 picturebuffers_requested_ = true; |
272 size_ = gfx::Size(width, height); | 268 size_ = gfx::Size(width, height); |
273 base::MessageLoop::current()->PostTask( | 269 base::MessageLoop::current()->PostTask( |
274 FROM_HERE, | 270 FROM_HERE, |
275 base::Bind(&AndroidVideoDecodeAccelerator::RequestPictureBuffers, | 271 base::Bind(&AndroidVideoDecodeAcceleratorBase:: |
272 RequestPictureBuffers, | |
276 weak_this_factory_.GetWeakPtr())); | 273 weak_this_factory_.GetWeakPtr())); |
277 } else { | 274 } else { |
278 // Dynamic resolution change support is not specified by the Android | 275 // Dynamic resolution change support is not specified by the Android |
279 // platform at and before JB-MR1, so it's not possible to smoothly | 276 // platform at and before JB-MR1, so it's not possible to smoothly |
280 // continue playback at this point. Instead, error out immediately, | 277 // continue playback at this point. Instead, error out immediately, |
281 // expecting clients to Reset() as appropriate to avoid this. | 278 // expecting clients to Reset() as appropriate to avoid this. |
282 // b/7093648 | 279 // b/7093648 |
283 RETURN_ON_FAILURE(size_ == gfx::Size(width, height), | 280 RETURN_ON_FAILURE(size_ == gfx::Size(width, height), |
284 "Dynamic resolution change is not supported.", | 281 "Dynamic resolution change is not supported.", |
285 PLATFORM_FAILURE); | 282 PLATFORM_FAILURE); |
286 } | 283 } |
287 return; | 284 return; |
288 } | 285 } |
289 | 286 |
290 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED: | 287 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED: |
291 break; | 288 break; |
292 | 289 |
293 case media::MEDIA_CODEC_OK: | 290 case media::MEDIA_CODEC_OK: |
294 DCHECK_GE(buf_index, 0); | 291 DCHECK_GE(buf_index, 0); |
295 break; | 292 break; |
296 | 293 |
297 default: | 294 default: |
298 NOTREACHED(); | 295 NOTREACHED(); |
299 break; | 296 break; |
300 } | 297 } |
301 } while (buf_index < 0); | 298 } while (buf_index < 0); |
302 | 299 |
303 // This ignores the emitted ByteBuffer and instead relies on rendering to the | |
304 // codec's SurfaceTexture and then copying from that texture to the client's | |
305 // PictureBuffer's texture. This means that each picture's data is written | |
306 // three times: once to the ByteBuffer, once to the SurfaceTexture, and once | |
307 // to the client's texture. It would be nicer to either: | |
308 // 1) Render directly to the client's texture from MediaCodec (one write); or | |
309 // 2) Upload the ByteBuffer to the client's texture (two writes). | |
310 // Unfortunately neither is possible: | |
311 // 1) MediaCodec's use of SurfaceTexture is a singleton, and the texture | |
312 // written to can't change during the codec's lifetime. b/11990461 | |
313 // 2) The ByteBuffer is likely to contain the pixels in a vendor-specific, | |
314 // opaque/non-standard format. It's not possible to negotiate the decoder | |
315 // to emit a specific colorspace, even using HW CSC. b/10706245 | |
316 // So, we live with these two extra copies per picture :( | |
317 media_codec_->ReleaseOutputBuffer(buf_index, true); | |
318 | |
319 if (eos) { | 300 if (eos) { |
301 // TODO(liberato): Before refactoring into *Base, this was unconditionally | |
302 // done before the eos check, with render==true. However, since that | |
303 // frame wasn't sent anywhere in the eos case, we now do it here with | |
304 // render==false. We need to see if eos can actually deliver a valid | |
305 // frame with it. | |
306 media_codec_->ReleaseOutputBuffer(buf_index, false); | |
watk
2015/08/28 21:52:05
AFAIK, this is ok and we won't get a valid frame w
liberato (no reviews please)
2015/09/04 17:59:47
yeah, i this so too. i'll leave the TODO here unt
| |
320 base::MessageLoop::current()->PostTask( | 307 base::MessageLoop::current()->PostTask( |
321 FROM_HERE, | 308 FROM_HERE, |
322 base::Bind(&AndroidVideoDecodeAccelerator::NotifyFlushDone, | 309 base::Bind(&AndroidVideoDecodeAcceleratorBase::NotifyFlushDone, |
323 weak_this_factory_.GetWeakPtr())); | 310 weak_this_factory_.GetWeakPtr())); |
324 } else { | 311 } else { |
325 int64 bitstream_buffer_id = timestamp.InMicroseconds(); | 312 int64 bitstream_buffer_id = timestamp.InMicroseconds(); |
326 SendCurrentSurfaceToClient(static_cast<int32>(bitstream_buffer_id)); | 313 SendCurrentSurfaceToClient(buf_index, |
314 static_cast<int32>(bitstream_buffer_id)); | |
327 | 315 |
328 // Removes ids former or equal than the id from decoder. Note that | 316 // Removes ids former or equal than the id from decoder. Note that |
329 // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder | 317 // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder |
330 // because of frame reordering issue. We just maintain this roughly and use | 318 // because of frame reordering issue. We just maintain this roughly and use |
331 // for the throttling purpose. | 319 // for the throttling purpose. |
332 std::list<int32>::iterator it; | 320 std::list<int32>::iterator it; |
333 for (it = bitstreams_notified_in_advance_.begin(); | 321 for (it = bitstreams_notified_in_advance_.begin(); |
334 it != bitstreams_notified_in_advance_.end(); | 322 it != bitstreams_notified_in_advance_.end(); |
335 ++it) { | 323 ++it) { |
336 if (*it == bitstream_buffer_id) { | 324 if (*it == bitstream_buffer_id) { |
337 bitstreams_notified_in_advance_.erase( | 325 bitstreams_notified_in_advance_.erase( |
338 bitstreams_notified_in_advance_.begin(), ++it); | 326 bitstreams_notified_in_advance_.begin(), ++it); |
339 break; | 327 break; |
340 } | 328 } |
341 } | 329 } |
342 } | 330 } |
343 } | 331 } |
344 | 332 |
345 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient( | 333 void AndroidVideoDecodeAcceleratorBase::SendCurrentSurfaceToClient( |
334 int32 codec_buf_index, | |
watk
2015/08/28 21:52:05
Slight naming inconsistency with the header: codec
liberato (no reviews please)
2015/09/04 17:59:47
thanks.
| |
346 int32 bitstream_id) { | 335 int32 bitstream_id) { |
347 DCHECK(thread_checker_.CalledOnValidThread()); | 336 DCHECK(thread_checker_.CalledOnValidThread()); |
348 DCHECK_NE(bitstream_id, -1); | 337 DCHECK_NE(bitstream_id, -1); |
349 DCHECK(!free_picture_ids_.empty()); | 338 DCHECK(!free_picture_ids_.empty()); |
350 | 339 |
351 RETURN_ON_FAILURE(make_context_current_.Run(), | 340 RETURN_ON_FAILURE(make_context_current_.Run(), |
352 "Failed to make this decoder's GL context current.", | 341 "Failed to make this decoder's GL context current.", |
353 PLATFORM_FAILURE); | 342 PLATFORM_FAILURE); |
354 | 343 |
355 int32 picture_buffer_id = free_picture_ids_.front(); | 344 int32 picture_buffer_id = free_picture_ids_.front(); |
356 free_picture_ids_.pop(); | 345 free_picture_ids_.pop(); |
357 | 346 |
358 float transfrom_matrix[16]; | |
359 surface_texture_->UpdateTexImage(); | |
360 surface_texture_->GetTransformMatrix(transfrom_matrix); | |
361 | |
362 OutputBufferMap::const_iterator i = | 347 OutputBufferMap::const_iterator i = |
363 output_picture_buffers_.find(picture_buffer_id); | 348 output_picture_buffers_.find(picture_buffer_id); |
364 RETURN_ON_FAILURE(i != output_picture_buffers_.end(), | 349 RETURN_ON_FAILURE(i != output_picture_buffers_.end(), |
365 "Can't find a PictureBuffer for " << picture_buffer_id, | 350 "Can't find a PictureBuffer for " << picture_buffer_id, |
366 PLATFORM_FAILURE); | 351 PLATFORM_FAILURE); |
367 uint32 picture_buffer_texture_id = i->second.texture_id(); | |
368 | 352 |
369 RETURN_ON_FAILURE(gl_decoder_.get(), | 353 // Make sure that the decoder is available for AssignCurrent*(). |
354 RETURN_ON_FAILURE(GetGlDecoder(), | |
370 "Failed to get gles2 decoder instance.", | 355 "Failed to get gles2 decoder instance.", |
371 ILLEGAL_STATE); | 356 ILLEGAL_STATE); |
watk
2015/08/28 21:52:05
Should this check be done inside AssignCurrent? Th
liberato (no reviews please)
2015/09/04 17:59:47
yeah, i went back and forth on this one. i kept i
| |
372 // Defer initializing the CopyTextureCHROMIUMResourceManager until it is | |
373 // needed because it takes 10s of milliseconds to initialize. | |
374 if (!copier_) { | |
375 copier_.reset(new gpu::CopyTextureCHROMIUMResourceManager()); | |
376 copier_->Initialize(gl_decoder_.get()); | |
377 } | |
378 | 357 |
379 // Here, we copy |surface_texture_id_| to the picture buffer instead of | 358 AssignCurrentSurfaceToPictureBuffer(codec_buf_index, i->second); |
380 // setting new texture to |surface_texture_| by calling attachToGLContext() | |
381 // because: | |
382 // 1. Once we call detachFrameGLContext(), it deletes the texture previous | |
383 // attached. | |
384 // 2. SurfaceTexture requires us to apply a transform matrix when we show | |
385 // the texture. | |
386 // TODO(hkuang): get the StreamTexture transform matrix in GPU process | |
387 // instead of using default matrix crbug.com/226218. | |
388 const static GLfloat default_matrix[16] = {1.0f, 0.0f, 0.0f, 0.0f, | |
389 0.0f, 1.0f, 0.0f, 0.0f, | |
390 0.0f, 0.0f, 1.0f, 0.0f, | |
391 0.0f, 0.0f, 0.0f, 1.0f}; | |
392 copier_->DoCopyTextureWithTransform(gl_decoder_.get(), | |
393 GL_TEXTURE_EXTERNAL_OES, | |
394 surface_texture_id_, | |
395 picture_buffer_texture_id, | |
396 size_.width(), | |
397 size_.height(), | |
398 false, | |
399 false, | |
400 false, | |
401 default_matrix); | |
402 | 359 |
403 // TODO(henryhsu): Pass (0, 0) as visible size will cause several test | 360 // TODO(henryhsu): Pass (0, 0) as visible size will cause several test |
404 // cases failed. We should make sure |size_| is coded size or visible size. | 361 // cases failed. We should make sure |size_| is coded size or visible size. |
405 base::MessageLoop::current()->PostTask( | 362 base::MessageLoop::current()->PostTask( |
406 FROM_HERE, base::Bind(&AndroidVideoDecodeAccelerator::NotifyPictureReady, | 363 FROM_HERE, |
407 weak_this_factory_.GetWeakPtr(), | 364 base::Bind(&AndroidVideoDecodeAcceleratorBase::NotifyPictureReady, |
408 media::Picture(picture_buffer_id, bitstream_id, | 365 weak_this_factory_.GetWeakPtr(), |
409 gfx::Rect(size_), false))); | 366 media::Picture(picture_buffer_id, bitstream_id, |
367 gfx::Rect(size_), false))); | |
410 } | 368 } |
411 | 369 |
412 void AndroidVideoDecodeAccelerator::Decode( | 370 void AndroidVideoDecodeAcceleratorBase::Decode( |
413 const media::BitstreamBuffer& bitstream_buffer) { | 371 const media::BitstreamBuffer& bitstream_buffer) { |
414 DCHECK(thread_checker_.CalledOnValidThread()); | 372 DCHECK(thread_checker_.CalledOnValidThread()); |
415 if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) { | 373 if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) { |
416 base::MessageLoop::current()->PostTask( | 374 base::MessageLoop::current()->PostTask( |
417 FROM_HERE, | 375 FROM_HERE, |
418 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | 376 base::Bind(&AndroidVideoDecodeAcceleratorBase:: |
377 NotifyEndOfBitstreamBuffer, | |
419 weak_this_factory_.GetWeakPtr(), | 378 weak_this_factory_.GetWeakPtr(), |
420 bitstream_buffer.id())); | 379 bitstream_buffer.id())); |
421 return; | 380 return; |
422 } | 381 } |
423 | 382 |
424 pending_bitstream_buffers_.push( | 383 pending_bitstream_buffers_.push( |
425 std::make_pair(bitstream_buffer, base::Time::Now())); | 384 std::make_pair(bitstream_buffer, base::Time::Now())); |
426 | 385 |
427 DoIOTask(); | 386 DoIOTask(); |
428 } | 387 } |
429 | 388 |
430 void AndroidVideoDecodeAccelerator::AssignPictureBuffers( | 389 void AndroidVideoDecodeAcceleratorBase::RequestPictureBuffers() { |
390 client_->ProvidePictureBuffers(GetNumPictureBuffers(), size_, | |
391 GetTextureTarget()); | |
392 } | |
393 | |
394 void AndroidVideoDecodeAcceleratorBase::AssignPictureBuffers( | |
431 const std::vector<media::PictureBuffer>& buffers) { | 395 const std::vector<media::PictureBuffer>& buffers) { |
432 DCHECK(thread_checker_.CalledOnValidThread()); | 396 DCHECK(thread_checker_.CalledOnValidThread()); |
433 DCHECK(output_picture_buffers_.empty()); | 397 DCHECK(output_picture_buffers_.empty()); |
434 DCHECK(free_picture_ids_.empty()); | 398 DCHECK(free_picture_ids_.empty()); |
435 | 399 |
436 for (size_t i = 0; i < buffers.size(); ++i) { | 400 for (size_t i = 0; i < buffers.size(); ++i) { |
437 RETURN_ON_FAILURE(buffers[i].size() == size_, | 401 RETURN_ON_FAILURE(buffers[i].size() == size_, |
438 "Invalid picture buffer size was passed.", | 402 "Invalid picture buffer size was passed.", |
439 INVALID_ARGUMENT); | 403 INVALID_ARGUMENT); |
440 int32 id = buffers[i].id(); | 404 int32 id = buffers[i].id(); |
441 output_picture_buffers_.insert(std::make_pair(id, buffers[i])); | 405 output_picture_buffers_.insert(std::make_pair(id, buffers[i])); |
442 free_picture_ids_.push(id); | 406 free_picture_ids_.push(id); |
443 // Since the client might be re-using |picture_buffer_id| values, forget | 407 // Since the client might be re-using |picture_buffer_id| values, forget |
444 // about previously-dismissed IDs now. See ReusePictureBuffer() comment | 408 // about previously-dismissed IDs now. See ReusePictureBuffer() comment |
445 // about "zombies" for why we maintain this set in the first place. | 409 // about "zombies" for why we maintain this set in the first place. |
446 dismissed_picture_ids_.erase(id); | 410 dismissed_picture_ids_.erase(id); |
447 } | 411 } |
448 | 412 |
449 RETURN_ON_FAILURE(output_picture_buffers_.size() >= kNumPictureBuffers, | 413 RETURN_ON_FAILURE(output_picture_buffers_.size() >= GetNumPictureBuffers(), |
450 "Invalid picture buffers were passed.", | 414 "Invalid picture buffers were passed.", |
451 INVALID_ARGUMENT); | 415 INVALID_ARGUMENT); |
452 | 416 |
453 DoIOTask(); | 417 DoIOTask(); |
454 } | 418 } |
455 | 419 |
456 void AndroidVideoDecodeAccelerator::ReusePictureBuffer( | 420 void AndroidVideoDecodeAcceleratorBase::ReusePictureBuffer( |
457 int32 picture_buffer_id) { | 421 int32 picture_buffer_id) { |
458 DCHECK(thread_checker_.CalledOnValidThread()); | 422 DCHECK(thread_checker_.CalledOnValidThread()); |
459 | 423 |
460 // This ReusePictureBuffer() might have been in a pipe somewhere (queued in | 424 // This ReusePictureBuffer() might have been in a pipe somewhere (queued in |
461 // IPC, or in a PostTask either at the sender or receiver) when we sent a | 425 // IPC, or in a PostTask either at the sender or receiver) when we sent a |
462 // DismissPictureBuffer() for this |picture_buffer_id|. Account for such | 426 // DismissPictureBuffer() for this |picture_buffer_id|. Account for such |
463 // potential "zombie" IDs here. | 427 // potential "zombie" IDs here. |
464 if (dismissed_picture_ids_.erase(picture_buffer_id)) | 428 if (dismissed_picture_ids_.erase(picture_buffer_id)) |
465 return; | 429 return; |
466 | 430 |
467 free_picture_ids_.push(picture_buffer_id); | 431 free_picture_ids_.push(picture_buffer_id); |
468 | 432 |
469 DoIOTask(); | 433 DoIOTask(); |
470 } | 434 } |
471 | 435 |
472 void AndroidVideoDecodeAccelerator::Flush() { | 436 void AndroidVideoDecodeAcceleratorBase::Flush() { |
473 DCHECK(thread_checker_.CalledOnValidThread()); | 437 DCHECK(thread_checker_.CalledOnValidThread()); |
474 | 438 |
475 Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0)); | 439 Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0)); |
476 } | 440 } |
477 | 441 |
478 bool AndroidVideoDecodeAccelerator::ConfigureMediaCodec() { | 442 bool AndroidVideoDecodeAcceleratorBase::ConfigureMediaCodec() { |
479 DCHECK(thread_checker_.CalledOnValidThread()); | 443 DCHECK(thread_checker_.CalledOnValidThread()); |
480 DCHECK(surface_texture_.get()); | 444 DCHECK(surface_texture_.get()); |
481 | 445 |
482 gfx::ScopedJavaSurface surface(surface_texture_.get()); | 446 gfx::ScopedJavaSurface surface(surface_texture_.get()); |
483 | 447 |
484 // Pass a dummy 320x240 canvas size and let the codec signal the real size | 448 // Pass a dummy 320x240 canvas size and let the codec signal the real size |
485 // when it's known from the bitstream. | 449 // when it's known from the bitstream. |
486 media_codec_.reset(media::VideoCodecBridge::CreateDecoder( | 450 media_codec_.reset(media::VideoCodecBridge::CreateDecoder( |
487 codec_, false, gfx::Size(320, 240), surface.j_surface().obj(), NULL)); | 451 codec_, false, gfx::Size(320, 240), surface.j_surface().obj(), NULL)); |
488 if (!media_codec_) | 452 if (!media_codec_) |
489 return false; | 453 return false; |
490 | 454 |
491 io_timer_.Start(FROM_HERE, | 455 io_timer_.Start(FROM_HERE, |
492 DecodePollDelay(), | 456 DecodePollDelay(), |
493 this, | 457 this, |
494 &AndroidVideoDecodeAccelerator::DoIOTask); | 458 &AndroidVideoDecodeAcceleratorBase::DoIOTask); |
495 return true; | 459 return true; |
496 } | 460 } |
497 | 461 |
498 void AndroidVideoDecodeAccelerator::Reset() { | 462 void AndroidVideoDecodeAcceleratorBase::Reset() { |
499 DCHECK(thread_checker_.CalledOnValidThread()); | 463 DCHECK(thread_checker_.CalledOnValidThread()); |
500 | 464 |
501 while (!pending_bitstream_buffers_.empty()) { | 465 while (!pending_bitstream_buffers_.empty()) { |
502 int32 bitstream_buffer_id = pending_bitstream_buffers_.front().first.id(); | 466 int32 bitstream_buffer_id = pending_bitstream_buffers_.front().first.id(); |
503 pending_bitstream_buffers_.pop(); | 467 pending_bitstream_buffers_.pop(); |
504 | 468 |
505 if (bitstream_buffer_id != -1) { | 469 if (bitstream_buffer_id != -1) { |
506 base::MessageLoop::current()->PostTask( | 470 base::MessageLoop::current()->PostTask( |
507 FROM_HERE, | 471 FROM_HERE, |
508 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | 472 base::Bind(&AndroidVideoDecodeAcceleratorBase:: |
473 NotifyEndOfBitstreamBuffer, | |
509 weak_this_factory_.GetWeakPtr(), | 474 weak_this_factory_.GetWeakPtr(), |
510 bitstream_buffer_id)); | 475 bitstream_buffer_id)); |
511 } | 476 } |
512 } | 477 } |
513 bitstreams_notified_in_advance_.clear(); | 478 bitstreams_notified_in_advance_.clear(); |
514 | 479 |
515 for (OutputBufferMap::iterator it = output_picture_buffers_.begin(); | 480 for (OutputBufferMap::iterator it = output_picture_buffers_.begin(); |
516 it != output_picture_buffers_.end(); | 481 it != output_picture_buffers_.end(); |
517 ++it) { | 482 ++it) { |
518 client_->DismissPictureBuffer(it->first); | 483 client_->DismissPictureBuffer(it->first); |
519 dismissed_picture_ids_.insert(it->first); | 484 dismissed_picture_ids_.insert(it->first); |
520 } | 485 } |
521 output_picture_buffers_.clear(); | 486 output_picture_buffers_.clear(); |
522 std::queue<int32> empty; | 487 std::queue<int32> empty; |
523 std::swap(free_picture_ids_, empty); | 488 std::swap(free_picture_ids_, empty); |
524 CHECK(free_picture_ids_.empty()); | 489 CHECK(free_picture_ids_.empty()); |
525 picturebuffers_requested_ = false; | 490 picturebuffers_requested_ = false; |
526 | 491 |
527 // On some devices, and up to at least JB-MR1, | 492 // On some devices, and up to at least JB-MR1, |
528 // - flush() can fail after EOS (b/8125974); and | 493 // - flush() can fail after EOS (b/8125974); and |
529 // - mid-stream resolution change is unsupported (b/7093648). | 494 // - mid-stream resolution change is unsupported (b/7093648). |
530 // To cope with these facts, we always stop & restart the codec on Reset(). | 495 // To cope with these facts, we always stop & restart the codec on Reset(). |
531 io_timer_.Stop(); | 496 io_timer_.Stop(); |
532 media_codec_->Stop(); | 497 media_codec_->Stop(); |
533 ConfigureMediaCodec(); | 498 ConfigureMediaCodec(); |
534 state_ = NO_ERROR; | 499 state_ = NO_ERROR; |
535 | 500 |
536 base::MessageLoop::current()->PostTask( | 501 base::MessageLoop::current()->PostTask( |
537 FROM_HERE, | 502 FROM_HERE, |
538 base::Bind(&AndroidVideoDecodeAccelerator::NotifyResetDone, | 503 base::Bind(&AndroidVideoDecodeAcceleratorBase::NotifyResetDone, |
539 weak_this_factory_.GetWeakPtr())); | 504 weak_this_factory_.GetWeakPtr())); |
540 } | 505 } |
541 | 506 |
542 void AndroidVideoDecodeAccelerator::Destroy() { | 507 void AndroidVideoDecodeAcceleratorBase::Destroy() { |
543 DCHECK(thread_checker_.CalledOnValidThread()); | 508 DCHECK(thread_checker_.CalledOnValidThread()); |
544 | 509 |
545 weak_this_factory_.InvalidateWeakPtrs(); | 510 weak_this_factory_.InvalidateWeakPtrs(); |
546 if (media_codec_) { | 511 if (media_codec_) { |
547 io_timer_.Stop(); | 512 io_timer_.Stop(); |
548 media_codec_->Stop(); | 513 media_codec_->Stop(); |
549 } | 514 } |
550 if (surface_texture_id_) | 515 if (surface_texture_id_) |
551 glDeleteTextures(1, &surface_texture_id_); | 516 glDeleteTextures(1, &surface_texture_id_); |
552 if (copier_) | |
553 copier_->Destroy(); | |
554 delete this; | 517 delete this; |
555 } | 518 } |
556 | 519 |
557 bool AndroidVideoDecodeAccelerator::CanDecodeOnIOThread() { | 520 bool AndroidVideoDecodeAcceleratorBase::CanDecodeOnIOThread() { |
558 return false; | 521 return false; |
559 } | 522 } |
560 | 523 |
561 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() { | 524 void AndroidVideoDecodeAcceleratorBase::NotifyPictureReady( |
562 client_->ProvidePictureBuffers(kNumPictureBuffers, size_, GL_TEXTURE_2D); | |
563 } | |
564 | |
565 void AndroidVideoDecodeAccelerator::NotifyPictureReady( | |
566 const media::Picture& picture) { | 525 const media::Picture& picture) { |
567 client_->PictureReady(picture); | 526 client_->PictureReady(picture); |
568 } | 527 } |
569 | 528 |
570 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer( | 529 void AndroidVideoDecodeAcceleratorBase::NotifyEndOfBitstreamBuffer( |
571 int input_buffer_id) { | 530 int input_buffer_id) { |
572 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); | 531 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); |
573 } | 532 } |
574 | 533 |
575 void AndroidVideoDecodeAccelerator::NotifyFlushDone() { | 534 void AndroidVideoDecodeAcceleratorBase::NotifyFlushDone() { |
576 client_->NotifyFlushDone(); | 535 client_->NotifyFlushDone(); |
577 } | 536 } |
578 | 537 |
579 void AndroidVideoDecodeAccelerator::NotifyResetDone() { | 538 void AndroidVideoDecodeAcceleratorBase::NotifyResetDone() { |
580 client_->NotifyResetDone(); | 539 client_->NotifyResetDone(); |
581 } | 540 } |
582 | 541 |
583 void AndroidVideoDecodeAccelerator::NotifyError( | 542 void AndroidVideoDecodeAcceleratorBase::NotifyError( |
584 media::VideoDecodeAccelerator::Error error) { | 543 media::VideoDecodeAccelerator::Error error) { |
585 client_->NotifyError(error); | 544 client_->NotifyError(error); |
586 } | 545 } |
587 | 546 |
547 media::VideoDecodeAccelerator::Client* | |
548 AndroidVideoDecodeAcceleratorBase::GetClient() const { | |
watk
2015/08/28 21:52:05
This and the following aren't in declaration order
liberato (no reviews please)
2015/09/04 17:59:47
i think that several functions were out of order b
| |
549 return client_; | |
550 } | |
551 | |
552 const gfx::Size& AndroidVideoDecodeAcceleratorBase::GetSize() const { | |
553 return size_; | |
554 } | |
555 | |
556 const base::ThreadChecker& | |
557 AndroidVideoDecodeAcceleratorBase::ThreadChecker() const { | |
558 return thread_checker_; | |
559 } | |
560 | |
561 gfx::SurfaceTexture* | |
562 AndroidVideoDecodeAcceleratorBase::GetSurfaceTexture() const { | |
563 return surface_texture_.get(); | |
564 } | |
565 | |
566 uint32 AndroidVideoDecodeAcceleratorBase::GetSurfaceTextureId() const { | |
567 return surface_texture_id_; | |
568 } | |
569 | |
570 gpu::gles2::GLES2Decoder* | |
571 AndroidVideoDecodeAcceleratorBase::GetGlDecoder() const { | |
572 return gl_decoder_.get(); | |
573 } | |
574 | |
575 media::VideoCodecBridge* AndroidVideoDecodeAcceleratorBase::GetMediaCodec() { | |
576 return media_codec_.get(); | |
577 } | |
578 | |
588 // static | 579 // static |
589 media::VideoDecodeAccelerator::SupportedProfiles | 580 media::VideoDecodeAccelerator::SupportedProfiles |
590 AndroidVideoDecodeAccelerator::GetSupportedProfiles() { | 581 AndroidVideoDecodeAcceleratorBase::GetSupportedProfiles() { |
591 SupportedProfiles profiles; | 582 SupportedProfiles profiles; |
592 | 583 |
593 if (!media::VideoCodecBridge::IsKnownUnaccelerated( | 584 if (!media::VideoCodecBridge::IsKnownUnaccelerated( |
594 media::kCodecVP8, media::MEDIA_CODEC_DECODER)) { | 585 media::kCodecVP8, media::MEDIA_CODEC_DECODER)) { |
595 SupportedProfile profile; | 586 SupportedProfile profile; |
596 profile.profile = media::VP8PROFILE_ANY; | 587 profile.profile = media::VP8PROFILE_ANY; |
597 profile.min_resolution.SetSize(0, 0); | 588 profile.min_resolution.SetSize(0, 0); |
598 profile.max_resolution.SetSize(1920, 1088); | 589 profile.max_resolution.SetSize(1920, 1088); |
599 profiles.push_back(profile); | 590 profiles.push_back(profile); |
600 } | 591 } |
(...skipping 17 matching lines...) Expand all Loading... | |
618 // software fallback for H264 on Android anyway. | 609 // software fallback for H264 on Android anyway. |
619 profile.max_resolution.SetSize(3840, 2160); | 610 profile.max_resolution.SetSize(3840, 2160); |
620 profiles.push_back(profile); | 611 profiles.push_back(profile); |
621 } | 612 } |
622 #endif | 613 #endif |
623 | 614 |
624 return profiles; | 615 return profiles; |
625 } | 616 } |
626 | 617 |
627 } // namespace content | 618 } // namespace content |
OLD | NEW |