OLD | NEW |
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/common/gpu/media/android_video_decode_accelerator.h" | 5 #include "content/common/gpu/media/android_video_decode_accelerator_impl.h" |
6 | 6 |
7 #include "base/bind.h" | 7 #include "base/bind.h" |
8 #include "base/logging.h" | 8 #include "base/logging.h" |
9 #include "base/message_loop/message_loop.h" | 9 #include "base/message_loop/message_loop.h" |
10 #include "base/metrics/histogram.h" | 10 #include "base/metrics/histogram.h" |
11 #include "content/common/gpu/gpu_channel.h" | 11 #include "content/common/gpu/gpu_channel.h" |
| 12 #include "content/common/gpu/media/avda_return_on_failure.h" |
12 #include "gpu/command_buffer/service/gles2_cmd_decoder.h" | 13 #include "gpu/command_buffer/service/gles2_cmd_decoder.h" |
13 #include "media/base/bitstream_buffer.h" | 14 #include "media/base/bitstream_buffer.h" |
14 #include "media/base/limits.h" | 15 #include "media/base/limits.h" |
15 #include "media/base/video_decoder_config.h" | 16 #include "media/base/video_decoder_config.h" |
16 #include "media/video/picture.h" | 17 #include "media/video/picture.h" |
17 #include "ui/gl/android/scoped_java_surface.h" | 18 #include "ui/gl/android/scoped_java_surface.h" |
18 #include "ui/gl/android/surface_texture.h" | 19 #include "ui/gl/android/surface_texture.h" |
19 #include "ui/gl/gl_bindings.h" | 20 #include "ui/gl/gl_bindings.h" |
20 | 21 |
21 namespace content { | 22 namespace content { |
22 | 23 |
23 // Helper macros for dealing with failure. If |result| evaluates false, emit | 24 AndroidVideoDecodeAccelerator* AndroidVideoDecodeAccelerator::Create( |
24 // |log| to ERROR, register |error| with the decoder, and return. | 25 const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder, |
25 #define RETURN_ON_FAILURE(result, log, error) \ | 26 const base::Callback<bool(void)>& make_context_current, |
26 do { \ | 27 scoped_refptr<AndroidVideoDecodeAccelerator::BackingStrategy> strategy) |
27 if (!(result)) { \ | 28 { |
28 DLOG(ERROR) << log; \ | 29 return new AndroidVideoDecodeAcceleratorImpl(decoder, |
29 base::MessageLoop::current()->PostTask( \ | 30 make_context_current, strategy); |
30 FROM_HERE, \ | 31 } |
31 base::Bind(&AndroidVideoDecodeAccelerator::NotifyError, \ | |
32 weak_this_factory_.GetWeakPtr(), \ | |
33 error)); \ | |
34 state_ = ERROR; \ | |
35 return; \ | |
36 } \ | |
37 } while (0) | |
38 | |
39 // TODO(dwkang): We only need kMaxVideoFrames to pass media stack's prerolling | |
40 // phase, but 1 is added due to crbug.com/176036. This should be tuned when we | |
41 // have actual use case. | |
42 enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 }; | |
43 | 32 |
44 // Max number of bitstreams notified to the client with | 33 // Max number of bitstreams notified to the client with |
45 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream. | 34 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream. |
46 enum { kMaxBitstreamsNotifiedInAdvance = 32 }; | 35 enum { kMaxBitstreamsNotifiedInAdvance = 32 }; |
47 | 36 |
48 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID) | 37 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID) |
49 // MediaCodec is only guaranteed to support baseline, but some devices may | 38 // MediaCodec is only guaranteed to support baseline, but some devices may |
50 // support others. Advertise support for all H264 profiles and let the | 39 // support others. Advertise support for all H264 profiles and let the |
51 // MediaCodec fail when decoding if it's not actually supported. It's assumed | 40 // MediaCodec fail when decoding if it's not actually supported. It's assumed |
52 // that consumers won't have software fallback for H264 on Android anyway. | 41 // that consumers won't have software fallback for H264 on Android anyway. |
(...skipping 27 matching lines...) Expand all Loading... |
80 // pictures have been fed to saturate any internal buffering). This is | 69 // pictures have been fed to saturate any internal buffering). This is |
81 // speculative and it's unclear that this would be a win (nor that there's a | 70 // speculative and it's unclear that this would be a win (nor that there's a |
82 // reasonably device-agnostic way to fill in the "believes" above). | 71 // reasonably device-agnostic way to fill in the "believes" above). |
83 return base::TimeDelta::FromMilliseconds(10); | 72 return base::TimeDelta::FromMilliseconds(10); |
84 } | 73 } |
85 | 74 |
86 static inline const base::TimeDelta NoWaitTimeOut() { | 75 static inline const base::TimeDelta NoWaitTimeOut() { |
87 return base::TimeDelta::FromMicroseconds(0); | 76 return base::TimeDelta::FromMicroseconds(0); |
88 } | 77 } |
89 | 78 |
90 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator( | 79 AndroidVideoDecodeAcceleratorImpl::AndroidVideoDecodeAcceleratorImpl( |
91 const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder, | 80 const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder, |
92 const base::Callback<bool(void)>& make_context_current) | 81 const base::Callback<bool(void)>& make_context_current, |
| 82 scoped_refptr<BackingStrategy> strategy) |
93 : client_(NULL), | 83 : client_(NULL), |
94 make_context_current_(make_context_current), | 84 make_context_current_(make_context_current), |
95 codec_(media::kCodecH264), | 85 codec_(media::kCodecH264), |
96 state_(NO_ERROR), | 86 state_(NO_ERROR), |
97 surface_texture_id_(0), | 87 surface_texture_id_(0), |
98 picturebuffers_requested_(false), | 88 picturebuffers_requested_(false), |
99 gl_decoder_(decoder), | 89 gl_decoder_(decoder), |
| 90 strategy_(strategy), |
100 weak_this_factory_(this) {} | 91 weak_this_factory_(this) {} |
101 | 92 |
102 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() { | 93 AndroidVideoDecodeAcceleratorImpl::~AndroidVideoDecodeAcceleratorImpl() { |
103 DCHECK(thread_checker_.CalledOnValidThread()); | 94 DCHECK(thread_checker_.CalledOnValidThread()); |
104 } | 95 } |
105 | 96 |
106 bool AndroidVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile, | 97 bool AndroidVideoDecodeAcceleratorImpl::Initialize( |
107 Client* client) { | 98 media::VideoCodecProfile profile, |
| 99 Client* client) { |
108 DCHECK(!media_codec_); | 100 DCHECK(!media_codec_); |
109 DCHECK(thread_checker_.CalledOnValidThread()); | 101 DCHECK(thread_checker_.CalledOnValidThread()); |
110 | 102 |
111 client_ = client; | 103 client_ = client; |
112 codec_ = VideoCodecProfileToVideoCodec(profile); | 104 codec_ = VideoCodecProfileToVideoCodec(profile); |
113 | 105 |
| 106 strategy_->SetStateProvider(this); |
| 107 |
114 bool profile_supported = codec_ == media::kCodecVP8; | 108 bool profile_supported = codec_ == media::kCodecVP8; |
115 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID) | 109 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID) |
116 profile_supported |= | 110 profile_supported |= |
117 (codec_ == media::kCodecVP9 || codec_ == media::kCodecH264); | 111 (codec_ == media::kCodecVP9 || codec_ == media::kCodecH264); |
118 #endif | 112 #endif |
119 | 113 |
120 if (!profile_supported) { | 114 if (!profile_supported) { |
121 LOG(ERROR) << "Unsupported profile: " << profile; | 115 LOG(ERROR) << "Unsupported profile: " << profile; |
122 return false; | 116 return false; |
123 } | 117 } |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
157 surface_texture_ = gfx::SurfaceTexture::Create(surface_texture_id_); | 151 surface_texture_ = gfx::SurfaceTexture::Create(surface_texture_id_); |
158 | 152 |
159 if (!ConfigureMediaCodec()) { | 153 if (!ConfigureMediaCodec()) { |
160 LOG(ERROR) << "Failed to create MediaCodec instance."; | 154 LOG(ERROR) << "Failed to create MediaCodec instance."; |
161 return false; | 155 return false; |
162 } | 156 } |
163 | 157 |
164 return true; | 158 return true; |
165 } | 159 } |
166 | 160 |
167 void AndroidVideoDecodeAccelerator::DoIOTask() { | 161 void AndroidVideoDecodeAcceleratorImpl::DoIOTask() { |
168 DCHECK(thread_checker_.CalledOnValidThread()); | 162 DCHECK(thread_checker_.CalledOnValidThread()); |
169 if (state_ == ERROR) { | 163 if (state_ == ERROR) { |
170 return; | 164 return; |
171 } | 165 } |
172 | 166 |
173 QueueInput(); | 167 QueueInput(); |
174 DequeueOutput(); | 168 DequeueOutput(); |
175 } | 169 } |
176 | 170 |
177 void AndroidVideoDecodeAccelerator::QueueInput() { | 171 void AndroidVideoDecodeAcceleratorImpl::QueueInput() { |
178 DCHECK(thread_checker_.CalledOnValidThread()); | 172 DCHECK(thread_checker_.CalledOnValidThread()); |
179 if (bitstreams_notified_in_advance_.size() > kMaxBitstreamsNotifiedInAdvance) | 173 if (bitstreams_notified_in_advance_.size() > kMaxBitstreamsNotifiedInAdvance) |
180 return; | 174 return; |
181 if (pending_bitstream_buffers_.empty()) | 175 if (pending_bitstream_buffers_.empty()) |
182 return; | 176 return; |
183 | 177 |
184 int input_buf_index = 0; | 178 int input_buf_index = 0; |
185 media::MediaCodecStatus status = media_codec_->DequeueInputBuffer( | 179 media::MediaCodecStatus status = media_codec_->DequeueInputBuffer( |
186 NoWaitTimeOut(), &input_buf_index); | 180 NoWaitTimeOut(), &input_buf_index); |
187 if (status != media::MEDIA_CODEC_OK) { | 181 if (status != media::MEDIA_CODEC_OK) { |
(...skipping 16 matching lines...) Expand all Loading... |
204 | 198 |
205 // Abuse the presentation time argument to propagate the bitstream | 199 // Abuse the presentation time argument to propagate the bitstream |
206 // buffer ID to the output, so we can report it back to the client in | 200 // buffer ID to the output, so we can report it back to the client in |
207 // PictureReady(). | 201 // PictureReady(). |
208 base::TimeDelta timestamp = | 202 base::TimeDelta timestamp = |
209 base::TimeDelta::FromMicroseconds(bitstream_buffer.id()); | 203 base::TimeDelta::FromMicroseconds(bitstream_buffer.id()); |
210 | 204 |
211 scoped_ptr<base::SharedMemory> shm( | 205 scoped_ptr<base::SharedMemory> shm( |
212 new base::SharedMemory(bitstream_buffer.handle(), true)); | 206 new base::SharedMemory(bitstream_buffer.handle(), true)); |
213 | 207 |
214 RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()), | 208 RETURN_ON_FAILURE(this, |
| 209 shm->Map(bitstream_buffer.size()), |
215 "Failed to SharedMemory::Map()", | 210 "Failed to SharedMemory::Map()", |
216 UNREADABLE_INPUT); | 211 UNREADABLE_INPUT); |
217 | 212 |
218 status = | 213 status = |
219 media_codec_->QueueInputBuffer(input_buf_index, | 214 media_codec_->QueueInputBuffer(input_buf_index, |
220 static_cast<const uint8*>(shm->memory()), | 215 static_cast<const uint8*>(shm->memory()), |
221 bitstream_buffer.size(), | 216 bitstream_buffer.size(), |
222 timestamp); | 217 timestamp); |
223 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK, | 218 RETURN_ON_FAILURE(this, |
| 219 status == media::MEDIA_CODEC_OK, |
224 "Failed to QueueInputBuffer: " << status, | 220 "Failed to QueueInputBuffer: " << status, |
225 PLATFORM_FAILURE); | 221 PLATFORM_FAILURE); |
226 | 222 |
227 // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output | 223 // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output |
228 // will be returned from the bitstream buffer. However, MediaCodec API is | 224 // will be returned from the bitstream buffer. However, MediaCodec API is |
229 // not enough to guarantee it. | 225 // not enough to guarantee it. |
230 // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to | 226 // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to |
231 // keep getting more bitstreams from the client, and throttle them by using | 227 // keep getting more bitstreams from the client, and throttle them by using |
232 // |bitstreams_notified_in_advance_|. | 228 // |bitstreams_notified_in_advance_|. |
233 // TODO(dwkang): check if there is a way to remove this workaround. | 229 // TODO(dwkang): check if there is a way to remove this workaround. |
234 base::MessageLoop::current()->PostTask( | 230 base::MessageLoop::current()->PostTask( |
235 FROM_HERE, | 231 FROM_HERE, |
236 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | 232 base::Bind(&AndroidVideoDecodeAcceleratorImpl::NotifyEndOfBitstreamBuffer, |
237 weak_this_factory_.GetWeakPtr(), | 233 weak_this_factory_.GetWeakPtr(), |
238 bitstream_buffer.id())); | 234 bitstream_buffer.id())); |
239 bitstreams_notified_in_advance_.push_back(bitstream_buffer.id()); | 235 bitstreams_notified_in_advance_.push_back(bitstream_buffer.id()); |
240 } | 236 } |
241 | 237 |
242 void AndroidVideoDecodeAccelerator::DequeueOutput() { | 238 void AndroidVideoDecodeAcceleratorImpl::DequeueOutput() { |
243 DCHECK(thread_checker_.CalledOnValidThread()); | 239 DCHECK(thread_checker_.CalledOnValidThread()); |
244 if (picturebuffers_requested_ && output_picture_buffers_.empty()) | 240 if (picturebuffers_requested_ && output_picture_buffers_.empty()) |
245 return; | 241 return; |
246 | 242 |
247 if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) { | 243 if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) { |
248 // Don't have any picture buffer to send. Need to wait more. | 244 // Don't have any picture buffer to send. Need to wait more. |
249 return; | 245 return; |
250 } | 246 } |
251 | 247 |
252 bool eos = false; | 248 bool eos = false; |
(...skipping 12 matching lines...) Expand all Loading... |
265 | 261 |
266 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: { | 262 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: { |
267 int32 width, height; | 263 int32 width, height; |
268 media_codec_->GetOutputFormat(&width, &height); | 264 media_codec_->GetOutputFormat(&width, &height); |
269 | 265 |
270 if (!picturebuffers_requested_) { | 266 if (!picturebuffers_requested_) { |
271 picturebuffers_requested_ = true; | 267 picturebuffers_requested_ = true; |
272 size_ = gfx::Size(width, height); | 268 size_ = gfx::Size(width, height); |
273 base::MessageLoop::current()->PostTask( | 269 base::MessageLoop::current()->PostTask( |
274 FROM_HERE, | 270 FROM_HERE, |
275 base::Bind(&AndroidVideoDecodeAccelerator::RequestPictureBuffers, | 271 base::Bind(&AndroidVideoDecodeAcceleratorImpl:: |
| 272 RequestPictureBuffers, |
276 weak_this_factory_.GetWeakPtr())); | 273 weak_this_factory_.GetWeakPtr())); |
277 } else { | 274 } else { |
278 // Dynamic resolution change support is not specified by the Android | 275 // Dynamic resolution change support is not specified by the Android |
279 // platform at and before JB-MR1, so it's not possible to smoothly | 276 // platform at and before JB-MR1, so it's not possible to smoothly |
280 // continue playback at this point. Instead, error out immediately, | 277 // continue playback at this point. Instead, error out immediately, |
281 // expecting clients to Reset() as appropriate to avoid this. | 278 // expecting clients to Reset() as appropriate to avoid this. |
282 // b/7093648 | 279 // b/7093648 |
283 RETURN_ON_FAILURE(size_ == gfx::Size(width, height), | 280 RETURN_ON_FAILURE(this, |
| 281 size_ == gfx::Size(width, height), |
284 "Dynamic resolution change is not supported.", | 282 "Dynamic resolution change is not supported.", |
285 PLATFORM_FAILURE); | 283 PLATFORM_FAILURE); |
286 } | 284 } |
287 return; | 285 return; |
288 } | 286 } |
289 | 287 |
290 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED: | 288 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED: |
291 break; | 289 break; |
292 | 290 |
293 case media::MEDIA_CODEC_OK: | 291 case media::MEDIA_CODEC_OK: |
294 DCHECK_GE(buf_index, 0); | 292 DCHECK_GE(buf_index, 0); |
295 break; | 293 break; |
296 | 294 |
297 default: | 295 default: |
298 NOTREACHED(); | 296 NOTREACHED(); |
299 break; | 297 break; |
300 } | 298 } |
301 } while (buf_index < 0); | 299 } while (buf_index < 0); |
302 | 300 |
303 // This ignores the emitted ByteBuffer and instead relies on rendering to the | |
304 // codec's SurfaceTexture and then copying from that texture to the client's | |
305 // PictureBuffer's texture. This means that each picture's data is written | |
306 // three times: once to the ByteBuffer, once to the SurfaceTexture, and once | |
307 // to the client's texture. It would be nicer to either: | |
308 // 1) Render directly to the client's texture from MediaCodec (one write); or | |
309 // 2) Upload the ByteBuffer to the client's texture (two writes). | |
310 // Unfortunately neither is possible: | |
311 // 1) MediaCodec's use of SurfaceTexture is a singleton, and the texture | |
312 // written to can't change during the codec's lifetime. b/11990461 | |
313 // 2) The ByteBuffer is likely to contain the pixels in a vendor-specific, | |
314 // opaque/non-standard format. It's not possible to negotiate the decoder | |
315 // to emit a specific colorspace, even using HW CSC. b/10706245 | |
316 // So, we live with these two extra copies per picture :( | |
317 media_codec_->ReleaseOutputBuffer(buf_index, true); | |
318 | |
319 if (eos) { | 301 if (eos) { |
| 302 media_codec_->ReleaseOutputBuffer(buf_index, false); |
320 base::MessageLoop::current()->PostTask( | 303 base::MessageLoop::current()->PostTask( |
321 FROM_HERE, | 304 FROM_HERE, |
322 base::Bind(&AndroidVideoDecodeAccelerator::NotifyFlushDone, | 305 base::Bind(&AndroidVideoDecodeAcceleratorImpl::NotifyFlushDone, |
323 weak_this_factory_.GetWeakPtr())); | 306 weak_this_factory_.GetWeakPtr())); |
324 } else { | 307 } else { |
325 int64 bitstream_buffer_id = timestamp.InMicroseconds(); | 308 int64 bitstream_buffer_id = timestamp.InMicroseconds(); |
326 SendCurrentSurfaceToClient(static_cast<int32>(bitstream_buffer_id)); | 309 SendCurrentSurfaceToClient(buf_index, |
| 310 static_cast<int32>(bitstream_buffer_id)); |
327 | 311 |
328 // Removes ids former or equal than the id from decoder. Note that | 312 // Removes ids former or equal than the id from decoder. Note that |
329 // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder | 313 // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder |
330 // because of frame reordering issue. We just maintain this roughly and use | 314 // because of frame reordering issue. We just maintain this roughly and use |
331 // for the throttling purpose. | 315 // for the throttling purpose. |
332 std::list<int32>::iterator it; | 316 std::list<int32>::iterator it; |
333 for (it = bitstreams_notified_in_advance_.begin(); | 317 for (it = bitstreams_notified_in_advance_.begin(); |
334 it != bitstreams_notified_in_advance_.end(); | 318 it != bitstreams_notified_in_advance_.end(); |
335 ++it) { | 319 ++it) { |
336 if (*it == bitstream_buffer_id) { | 320 if (*it == bitstream_buffer_id) { |
337 bitstreams_notified_in_advance_.erase( | 321 bitstreams_notified_in_advance_.erase( |
338 bitstreams_notified_in_advance_.begin(), ++it); | 322 bitstreams_notified_in_advance_.begin(), ++it); |
339 break; | 323 break; |
340 } | 324 } |
341 } | 325 } |
342 } | 326 } |
343 } | 327 } |
344 | 328 |
345 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient( | 329 void AndroidVideoDecodeAcceleratorImpl::SendCurrentSurfaceToClient( |
| 330 int32 codec_buffer_index, |
346 int32 bitstream_id) { | 331 int32 bitstream_id) { |
347 DCHECK(thread_checker_.CalledOnValidThread()); | 332 DCHECK(thread_checker_.CalledOnValidThread()); |
348 DCHECK_NE(bitstream_id, -1); | 333 DCHECK_NE(bitstream_id, -1); |
349 DCHECK(!free_picture_ids_.empty()); | 334 DCHECK(!free_picture_ids_.empty()); |
350 | 335 |
351 RETURN_ON_FAILURE(make_context_current_.Run(), | 336 RETURN_ON_FAILURE(this, |
| 337 make_context_current_.Run(), |
352 "Failed to make this decoder's GL context current.", | 338 "Failed to make this decoder's GL context current.", |
353 PLATFORM_FAILURE); | 339 PLATFORM_FAILURE); |
354 | 340 |
355 int32 picture_buffer_id = free_picture_ids_.front(); | 341 int32 picture_buffer_id = free_picture_ids_.front(); |
356 free_picture_ids_.pop(); | 342 free_picture_ids_.pop(); |
357 | 343 |
358 float transfrom_matrix[16]; | |
359 surface_texture_->UpdateTexImage(); | |
360 surface_texture_->GetTransformMatrix(transfrom_matrix); | |
361 | |
362 OutputBufferMap::const_iterator i = | 344 OutputBufferMap::const_iterator i = |
363 output_picture_buffers_.find(picture_buffer_id); | 345 output_picture_buffers_.find(picture_buffer_id); |
364 RETURN_ON_FAILURE(i != output_picture_buffers_.end(), | 346 RETURN_ON_FAILURE(this, |
| 347 i != output_picture_buffers_.end(), |
365 "Can't find a PictureBuffer for " << picture_buffer_id, | 348 "Can't find a PictureBuffer for " << picture_buffer_id, |
366 PLATFORM_FAILURE); | 349 PLATFORM_FAILURE); |
367 uint32 picture_buffer_texture_id = i->second.texture_id(); | |
368 | 350 |
369 RETURN_ON_FAILURE(gl_decoder_.get(), | 351 // Connect the PictureBuffer to the decoded frame, via whatever |
370 "Failed to get gles2 decoder instance.", | 352 // mechanism the strategy likes. |
371 ILLEGAL_STATE); | 353 strategy_->AssignCurrentSurfaceToPictureBuffer(codec_buffer_index, |
372 // Defer initializing the CopyTextureCHROMIUMResourceManager until it is | 354 i->second); |
373 // needed because it takes 10s of milliseconds to initialize. | |
374 if (!copier_) { | |
375 copier_.reset(new gpu::CopyTextureCHROMIUMResourceManager()); | |
376 copier_->Initialize(gl_decoder_.get()); | |
377 } | |
378 | |
379 // Here, we copy |surface_texture_id_| to the picture buffer instead of | |
380 // setting new texture to |surface_texture_| by calling attachToGLContext() | |
381 // because: | |
382 // 1. Once we call detachFrameGLContext(), it deletes the texture previous | |
383 // attached. | |
384 // 2. SurfaceTexture requires us to apply a transform matrix when we show | |
385 // the texture. | |
386 // TODO(hkuang): get the StreamTexture transform matrix in GPU process | |
387 // instead of using default matrix crbug.com/226218. | |
388 const static GLfloat default_matrix[16] = {1.0f, 0.0f, 0.0f, 0.0f, | |
389 0.0f, 1.0f, 0.0f, 0.0f, | |
390 0.0f, 0.0f, 1.0f, 0.0f, | |
391 0.0f, 0.0f, 0.0f, 1.0f}; | |
392 copier_->DoCopyTextureWithTransform( | |
393 gl_decoder_.get(), GL_TEXTURE_EXTERNAL_OES, surface_texture_id_, | |
394 GL_TEXTURE_2D, picture_buffer_texture_id, GL_RGBA, GL_UNSIGNED_BYTE, | |
395 size_.width(), size_.height(), false, false, false, nullptr, | |
396 default_matrix); | |
397 | 355 |
398 // TODO(henryhsu): Pass (0, 0) as visible size will cause several test | 356 // TODO(henryhsu): Pass (0, 0) as visible size will cause several test |
399 // cases failed. We should make sure |size_| is coded size or visible size. | 357 // cases failed. We should make sure |size_| is coded size or visible size. |
400 base::MessageLoop::current()->PostTask( | 358 base::MessageLoop::current()->PostTask( |
401 FROM_HERE, base::Bind(&AndroidVideoDecodeAccelerator::NotifyPictureReady, | 359 FROM_HERE, |
402 weak_this_factory_.GetWeakPtr(), | 360 base::Bind(&AndroidVideoDecodeAcceleratorImpl::NotifyPictureReady, |
403 media::Picture(picture_buffer_id, bitstream_id, | 361 weak_this_factory_.GetWeakPtr(), |
404 gfx::Rect(size_), false))); | 362 media::Picture(picture_buffer_id, bitstream_id, |
| 363 gfx::Rect(size_), false))); |
405 } | 364 } |
406 | 365 |
407 void AndroidVideoDecodeAccelerator::Decode( | 366 void AndroidVideoDecodeAcceleratorImpl::Decode( |
408 const media::BitstreamBuffer& bitstream_buffer) { | 367 const media::BitstreamBuffer& bitstream_buffer) { |
409 DCHECK(thread_checker_.CalledOnValidThread()); | 368 DCHECK(thread_checker_.CalledOnValidThread()); |
410 if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) { | 369 if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) { |
411 base::MessageLoop::current()->PostTask( | 370 base::MessageLoop::current()->PostTask( |
412 FROM_HERE, | 371 FROM_HERE, |
413 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | 372 base::Bind(&AndroidVideoDecodeAcceleratorImpl:: |
| 373 NotifyEndOfBitstreamBuffer, |
414 weak_this_factory_.GetWeakPtr(), | 374 weak_this_factory_.GetWeakPtr(), |
415 bitstream_buffer.id())); | 375 bitstream_buffer.id())); |
416 return; | 376 return; |
417 } | 377 } |
418 | 378 |
419 pending_bitstream_buffers_.push( | 379 pending_bitstream_buffers_.push( |
420 std::make_pair(bitstream_buffer, base::Time::Now())); | 380 std::make_pair(bitstream_buffer, base::Time::Now())); |
421 | 381 |
422 DoIOTask(); | 382 DoIOTask(); |
423 } | 383 } |
424 | 384 |
425 void AndroidVideoDecodeAccelerator::AssignPictureBuffers( | 385 void AndroidVideoDecodeAcceleratorImpl::RequestPictureBuffers() { |
| 386 client_->ProvidePictureBuffers(strategy_->GetNumPictureBuffers(), |
| 387 size_, strategy_->GetTextureTarget()); |
| 388 } |
| 389 |
| 390 void AndroidVideoDecodeAcceleratorImpl::AssignPictureBuffers( |
426 const std::vector<media::PictureBuffer>& buffers) { | 391 const std::vector<media::PictureBuffer>& buffers) { |
427 DCHECK(thread_checker_.CalledOnValidThread()); | 392 DCHECK(thread_checker_.CalledOnValidThread()); |
428 DCHECK(output_picture_buffers_.empty()); | 393 DCHECK(output_picture_buffers_.empty()); |
429 DCHECK(free_picture_ids_.empty()); | 394 DCHECK(free_picture_ids_.empty()); |
430 | 395 |
431 for (size_t i = 0; i < buffers.size(); ++i) { | 396 for (size_t i = 0; i < buffers.size(); ++i) { |
432 RETURN_ON_FAILURE(buffers[i].size() == size_, | 397 RETURN_ON_FAILURE(this, |
| 398 buffers[i].size() == size_, |
433 "Invalid picture buffer size was passed.", | 399 "Invalid picture buffer size was passed.", |
434 INVALID_ARGUMENT); | 400 INVALID_ARGUMENT); |
435 int32 id = buffers[i].id(); | 401 int32 id = buffers[i].id(); |
436 output_picture_buffers_.insert(std::make_pair(id, buffers[i])); | 402 output_picture_buffers_.insert(std::make_pair(id, buffers[i])); |
437 free_picture_ids_.push(id); | 403 free_picture_ids_.push(id); |
438 // Since the client might be re-using |picture_buffer_id| values, forget | 404 // Since the client might be re-using |picture_buffer_id| values, forget |
439 // about previously-dismissed IDs now. See ReusePictureBuffer() comment | 405 // about previously-dismissed IDs now. See ReusePictureBuffer() comment |
440 // about "zombies" for why we maintain this set in the first place. | 406 // about "zombies" for why we maintain this set in the first place. |
441 dismissed_picture_ids_.erase(id); | 407 dismissed_picture_ids_.erase(id); |
442 } | 408 } |
443 | 409 |
444 RETURN_ON_FAILURE(output_picture_buffers_.size() >= kNumPictureBuffers, | 410 RETURN_ON_FAILURE(this, |
| 411 output_picture_buffers_.size() >= |
| 412 strategy_->GetNumPictureBuffers(), |
445 "Invalid picture buffers were passed.", | 413 "Invalid picture buffers were passed.", |
446 INVALID_ARGUMENT); | 414 INVALID_ARGUMENT); |
447 | 415 |
448 DoIOTask(); | 416 DoIOTask(); |
449 } | 417 } |
450 | 418 |
451 void AndroidVideoDecodeAccelerator::ReusePictureBuffer( | 419 void AndroidVideoDecodeAcceleratorImpl::ReusePictureBuffer( |
452 int32 picture_buffer_id) { | 420 int32 picture_buffer_id) { |
453 DCHECK(thread_checker_.CalledOnValidThread()); | 421 DCHECK(thread_checker_.CalledOnValidThread()); |
454 | 422 |
455 // This ReusePictureBuffer() might have been in a pipe somewhere (queued in | 423 // This ReusePictureBuffer() might have been in a pipe somewhere (queued in |
456 // IPC, or in a PostTask either at the sender or receiver) when we sent a | 424 // IPC, or in a PostTask either at the sender or receiver) when we sent a |
457 // DismissPictureBuffer() for this |picture_buffer_id|. Account for such | 425 // DismissPictureBuffer() for this |picture_buffer_id|. Account for such |
458 // potential "zombie" IDs here. | 426 // potential "zombie" IDs here. |
459 if (dismissed_picture_ids_.erase(picture_buffer_id)) | 427 if (dismissed_picture_ids_.erase(picture_buffer_id)) |
460 return; | 428 return; |
461 | 429 |
462 free_picture_ids_.push(picture_buffer_id); | 430 free_picture_ids_.push(picture_buffer_id); |
463 | 431 |
464 DoIOTask(); | 432 DoIOTask(); |
465 } | 433 } |
466 | 434 |
467 void AndroidVideoDecodeAccelerator::Flush() { | 435 void AndroidVideoDecodeAcceleratorImpl::Flush() { |
468 DCHECK(thread_checker_.CalledOnValidThread()); | 436 DCHECK(thread_checker_.CalledOnValidThread()); |
469 | 437 |
470 Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0)); | 438 Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0)); |
471 } | 439 } |
472 | 440 |
473 bool AndroidVideoDecodeAccelerator::ConfigureMediaCodec() { | 441 bool AndroidVideoDecodeAcceleratorImpl::ConfigureMediaCodec() { |
474 DCHECK(thread_checker_.CalledOnValidThread()); | 442 DCHECK(thread_checker_.CalledOnValidThread()); |
475 DCHECK(surface_texture_.get()); | 443 DCHECK(surface_texture_.get()); |
476 | 444 |
477 gfx::ScopedJavaSurface surface(surface_texture_.get()); | 445 gfx::ScopedJavaSurface surface(surface_texture_.get()); |
478 | 446 |
479 // Pass a dummy 320x240 canvas size and let the codec signal the real size | 447 // Pass a dummy 320x240 canvas size and let the codec signal the real size |
480 // when it's known from the bitstream. | 448 // when it's known from the bitstream. |
481 media_codec_.reset(media::VideoCodecBridge::CreateDecoder( | 449 media_codec_.reset(media::VideoCodecBridge::CreateDecoder( |
482 codec_, false, gfx::Size(320, 240), surface.j_surface().obj(), NULL)); | 450 codec_, false, gfx::Size(320, 240), surface.j_surface().obj(), NULL)); |
483 if (!media_codec_) | 451 if (!media_codec_) |
484 return false; | 452 return false; |
485 | 453 |
486 io_timer_.Start(FROM_HERE, | 454 io_timer_.Start(FROM_HERE, |
487 DecodePollDelay(), | 455 DecodePollDelay(), |
488 this, | 456 this, |
489 &AndroidVideoDecodeAccelerator::DoIOTask); | 457 &AndroidVideoDecodeAcceleratorImpl::DoIOTask); |
490 return true; | 458 return true; |
491 } | 459 } |
492 | 460 |
493 void AndroidVideoDecodeAccelerator::Reset() { | 461 void AndroidVideoDecodeAcceleratorImpl::Reset() { |
494 DCHECK(thread_checker_.CalledOnValidThread()); | 462 DCHECK(thread_checker_.CalledOnValidThread()); |
495 | 463 |
496 while (!pending_bitstream_buffers_.empty()) { | 464 while (!pending_bitstream_buffers_.empty()) { |
497 int32 bitstream_buffer_id = pending_bitstream_buffers_.front().first.id(); | 465 int32 bitstream_buffer_id = pending_bitstream_buffers_.front().first.id(); |
498 pending_bitstream_buffers_.pop(); | 466 pending_bitstream_buffers_.pop(); |
499 | 467 |
500 if (bitstream_buffer_id != -1) { | 468 if (bitstream_buffer_id != -1) { |
501 base::MessageLoop::current()->PostTask( | 469 base::MessageLoop::current()->PostTask( |
502 FROM_HERE, | 470 FROM_HERE, |
503 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | 471 base::Bind(&AndroidVideoDecodeAcceleratorImpl:: |
| 472 NotifyEndOfBitstreamBuffer, |
504 weak_this_factory_.GetWeakPtr(), | 473 weak_this_factory_.GetWeakPtr(), |
505 bitstream_buffer_id)); | 474 bitstream_buffer_id)); |
506 } | 475 } |
507 } | 476 } |
508 bitstreams_notified_in_advance_.clear(); | 477 bitstreams_notified_in_advance_.clear(); |
509 | 478 |
510 for (OutputBufferMap::iterator it = output_picture_buffers_.begin(); | 479 for (OutputBufferMap::iterator it = output_picture_buffers_.begin(); |
511 it != output_picture_buffers_.end(); | 480 it != output_picture_buffers_.end(); |
512 ++it) { | 481 ++it) { |
513 client_->DismissPictureBuffer(it->first); | 482 client_->DismissPictureBuffer(it->first); |
514 dismissed_picture_ids_.insert(it->first); | 483 dismissed_picture_ids_.insert(it->first); |
515 } | 484 } |
516 output_picture_buffers_.clear(); | 485 output_picture_buffers_.clear(); |
517 std::queue<int32> empty; | 486 std::queue<int32> empty; |
518 std::swap(free_picture_ids_, empty); | 487 std::swap(free_picture_ids_, empty); |
519 CHECK(free_picture_ids_.empty()); | 488 CHECK(free_picture_ids_.empty()); |
520 picturebuffers_requested_ = false; | 489 picturebuffers_requested_ = false; |
521 | 490 |
522 // On some devices, and up to at least JB-MR1, | 491 // On some devices, and up to at least JB-MR1, |
523 // - flush() can fail after EOS (b/8125974); and | 492 // - flush() can fail after EOS (b/8125974); and |
524 // - mid-stream resolution change is unsupported (b/7093648). | 493 // - mid-stream resolution change is unsupported (b/7093648). |
525 // To cope with these facts, we always stop & restart the codec on Reset(). | 494 // To cope with these facts, we always stop & restart the codec on Reset(). |
526 io_timer_.Stop(); | 495 io_timer_.Stop(); |
527 media_codec_->Stop(); | 496 media_codec_->Stop(); |
528 ConfigureMediaCodec(); | 497 ConfigureMediaCodec(); |
529 state_ = NO_ERROR; | 498 state_ = NO_ERROR; |
530 | 499 |
531 base::MessageLoop::current()->PostTask( | 500 base::MessageLoop::current()->PostTask( |
532 FROM_HERE, | 501 FROM_HERE, |
533 base::Bind(&AndroidVideoDecodeAccelerator::NotifyResetDone, | 502 base::Bind(&AndroidVideoDecodeAcceleratorImpl::NotifyResetDone, |
534 weak_this_factory_.GetWeakPtr())); | 503 weak_this_factory_.GetWeakPtr())); |
535 } | 504 } |
536 | 505 |
537 void AndroidVideoDecodeAccelerator::Destroy() { | 506 void AndroidVideoDecodeAcceleratorImpl::Destroy() { |
538 DCHECK(thread_checker_.CalledOnValidThread()); | 507 DCHECK(thread_checker_.CalledOnValidThread()); |
539 | 508 |
| 509 strategy_->Cleanup(); |
| 510 |
540 weak_this_factory_.InvalidateWeakPtrs(); | 511 weak_this_factory_.InvalidateWeakPtrs(); |
541 if (media_codec_) { | 512 if (media_codec_) { |
542 io_timer_.Stop(); | 513 io_timer_.Stop(); |
543 media_codec_->Stop(); | 514 media_codec_->Stop(); |
544 } | 515 } |
545 if (surface_texture_id_) | 516 if (surface_texture_id_) |
546 glDeleteTextures(1, &surface_texture_id_); | 517 glDeleteTextures(1, &surface_texture_id_); |
547 if (copier_) | |
548 copier_->Destroy(); | |
549 delete this; | 518 delete this; |
550 } | 519 } |
551 | 520 |
552 bool AndroidVideoDecodeAccelerator::CanDecodeOnIOThread() { | 521 bool AndroidVideoDecodeAcceleratorImpl::CanDecodeOnIOThread() { |
553 return false; | 522 return false; |
554 } | 523 } |
555 | 524 |
556 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() { | 525 const gfx::Size& AndroidVideoDecodeAcceleratorImpl::GetSize() const { |
557 client_->ProvidePictureBuffers(kNumPictureBuffers, size_, GL_TEXTURE_2D); | 526 return size_; |
558 } | 527 } |
559 | 528 |
560 void AndroidVideoDecodeAccelerator::NotifyPictureReady( | 529 const base::ThreadChecker& |
| 530 AndroidVideoDecodeAcceleratorImpl::ThreadChecker() const { |
| 531 return thread_checker_; |
| 532 } |
| 533 |
| 534 gfx::SurfaceTexture* |
| 535 AndroidVideoDecodeAcceleratorImpl::GetSurfaceTexture() const { |
| 536 return surface_texture_.get(); |
| 537 } |
| 538 |
| 539 uint32 AndroidVideoDecodeAcceleratorImpl::GetSurfaceTextureId() const { |
| 540 return surface_texture_id_; |
| 541 } |
| 542 |
| 543 gpu::gles2::GLES2Decoder* |
| 544 AndroidVideoDecodeAcceleratorImpl::GetGlDecoder() const { |
| 545 return gl_decoder_.get(); |
| 546 } |
| 547 |
| 548 media::VideoCodecBridge* AndroidVideoDecodeAcceleratorImpl::GetMediaCodec() { |
| 549 return media_codec_.get(); |
| 550 } |
| 551 |
| 552 void AndroidVideoDecodeAcceleratorImpl::PostError( |
| 553 const ::tracked_objects::Location& from_here, |
| 554 media::VideoDecodeAccelerator::Error error) { |
| 555 base::MessageLoop::current()->PostTask(from_here, |
| 556 base::Bind(&AndroidVideoDecodeAcceleratorImpl::NotifyError, |
| 557 weak_this_factory_.GetWeakPtr(), |
| 558 error)); |
| 559 state_ = ERROR; |
| 560 } |
| 561 |
| 562 void AndroidVideoDecodeAcceleratorImpl::NotifyPictureReady( |
561 const media::Picture& picture) { | 563 const media::Picture& picture) { |
562 client_->PictureReady(picture); | 564 client_->PictureReady(picture); |
563 } | 565 } |
564 | 566 |
565 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer( | 567 void AndroidVideoDecodeAcceleratorImpl::NotifyEndOfBitstreamBuffer( |
566 int input_buffer_id) { | 568 int input_buffer_id) { |
567 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); | 569 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); |
568 } | 570 } |
569 | 571 |
570 void AndroidVideoDecodeAccelerator::NotifyFlushDone() { | 572 void AndroidVideoDecodeAcceleratorImpl::NotifyFlushDone() { |
571 client_->NotifyFlushDone(); | 573 client_->NotifyFlushDone(); |
572 } | 574 } |
573 | 575 |
574 void AndroidVideoDecodeAccelerator::NotifyResetDone() { | 576 void AndroidVideoDecodeAcceleratorImpl::NotifyResetDone() { |
575 client_->NotifyResetDone(); | 577 client_->NotifyResetDone(); |
576 } | 578 } |
577 | 579 |
578 void AndroidVideoDecodeAccelerator::NotifyError( | 580 void AndroidVideoDecodeAcceleratorImpl::NotifyError( |
579 media::VideoDecodeAccelerator::Error error) { | 581 media::VideoDecodeAccelerator::Error error) { |
580 client_->NotifyError(error); | 582 client_->NotifyError(error); |
581 } | 583 } |
582 | 584 |
583 // static | 585 // static |
584 media::VideoDecodeAccelerator::SupportedProfiles | 586 media::VideoDecodeAccelerator::SupportedProfiles |
585 AndroidVideoDecodeAccelerator::GetSupportedProfiles() { | 587 AndroidVideoDecodeAccelerator::GetSupportedProfiles() { |
586 SupportedProfiles profiles; | 588 SupportedProfiles profiles; |
587 | 589 |
588 if (!media::VideoCodecBridge::IsKnownUnaccelerated( | 590 if (!media::VideoCodecBridge::IsKnownUnaccelerated( |
(...skipping 24 matching lines...) Expand all Loading... |
613 // software fallback for H264 on Android anyway. | 615 // software fallback for H264 on Android anyway. |
614 profile.max_resolution.SetSize(3840, 2160); | 616 profile.max_resolution.SetSize(3840, 2160); |
615 profiles.push_back(profile); | 617 profiles.push_back(profile); |
616 } | 618 } |
617 #endif | 619 #endif |
618 | 620 |
619 return profiles; | 621 return profiles; |
620 } | 622 } |
621 | 623 |
622 } // namespace content | 624 } // namespace content |
OLD | NEW |