Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(415)

Side by Side Diff: content/common/gpu/media/android_video_decode_accelerator.cc

Issue 1313913003: Begin refactor of AVDA to support zero-copy. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: add gn build. Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/common/gpu/media/android_video_decode_accelerator.h" 5 #include "content/common/gpu/media/android_video_decode_accelerator.h"
6 6
7 #include "base/bind.h" 7 #include "base/bind.h"
8 #include "base/logging.h" 8 #include "base/logging.h"
9 #include "base/message_loop/message_loop.h" 9 #include "base/message_loop/message_loop.h"
10 #include "base/metrics/histogram.h" 10 #include "base/metrics/histogram.h"
11 #include "content/common/gpu/gpu_channel.h" 11 #include "content/common/gpu/gpu_channel.h"
12 #include "content/common/gpu/media/avda_return_on_failure.h"
12 #include "gpu/command_buffer/service/gles2_cmd_decoder.h" 13 #include "gpu/command_buffer/service/gles2_cmd_decoder.h"
13 #include "media/base/bitstream_buffer.h" 14 #include "media/base/bitstream_buffer.h"
14 #include "media/base/limits.h" 15 #include "media/base/limits.h"
15 #include "media/base/timestamp_constants.h" 16 #include "media/base/timestamp_constants.h"
16 #include "media/base/video_decoder_config.h" 17 #include "media/base/video_decoder_config.h"
17 #include "media/video/picture.h" 18 #include "media/video/picture.h"
18 #include "ui/gl/android/scoped_java_surface.h" 19 #include "ui/gl/android/scoped_java_surface.h"
19 #include "ui/gl/android/surface_texture.h" 20 #include "ui/gl/android/surface_texture.h"
20 #include "ui/gl/gl_bindings.h" 21 #include "ui/gl/gl_bindings.h"
21 22
22 namespace content { 23 namespace content {
23 24
24 // Helper macros for dealing with failure. If |result| evaluates false, emit
25 // |log| to ERROR, register |error| with the decoder, and return.
26 #define RETURN_ON_FAILURE(result, log, error) \
27 do { \
28 if (!(result)) { \
29 DLOG(ERROR) << log; \
30 base::MessageLoop::current()->PostTask( \
31 FROM_HERE, \
32 base::Bind(&AndroidVideoDecodeAccelerator::NotifyError, \
33 weak_this_factory_.GetWeakPtr(), \
34 error)); \
35 state_ = ERROR; \
36 return; \
37 } \
38 } while (0)
39
40 // TODO(dwkang): We only need kMaxVideoFrames to pass media stack's prerolling
41 // phase, but 1 is added due to crbug.com/176036. This should be tuned when we
42 // have actual use case.
43 enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 };
44
45 // Max number of bitstreams notified to the client with 25 // Max number of bitstreams notified to the client with
46 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream. 26 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream.
47 enum { kMaxBitstreamsNotifiedInAdvance = 32 }; 27 enum { kMaxBitstreamsNotifiedInAdvance = 32 };
48 28
49 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID) 29 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID)
50 // MediaCodec is only guaranteed to support baseline, but some devices may 30 // MediaCodec is only guaranteed to support baseline, but some devices may
51 // support others. Advertise support for all H264 profiles and let the 31 // support others. Advertise support for all H264 profiles and let the
52 // MediaCodec fail when decoding if it's not actually supported. It's assumed 32 // MediaCodec fail when decoding if it's not actually supported. It's assumed
53 // that consumers won't have software fallback for H264 on Android anyway. 33 // that consumers won't have software fallback for H264 on Android anyway.
54 static const media::VideoCodecProfile kSupportedH264Profiles[] = { 34 static const media::VideoCodecProfile kSupportedH264Profiles[] = {
(...skipping 28 matching lines...) Expand all
83 // reasonably device-agnostic way to fill in the "believes" above). 63 // reasonably device-agnostic way to fill in the "believes" above).
84 return base::TimeDelta::FromMilliseconds(10); 64 return base::TimeDelta::FromMilliseconds(10);
85 } 65 }
86 66
87 static inline const base::TimeDelta NoWaitTimeOut() { 67 static inline const base::TimeDelta NoWaitTimeOut() {
88 return base::TimeDelta::FromMicroseconds(0); 68 return base::TimeDelta::FromMicroseconds(0);
89 } 69 }
90 70
91 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator( 71 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator(
92 const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder, 72 const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder,
93 const base::Callback<bool(void)>& make_context_current) 73 const base::Callback<bool(void)>& make_context_current,
74 scoped_ptr<BackingStrategy> strategy)
94 : client_(NULL), 75 : client_(NULL),
95 make_context_current_(make_context_current), 76 make_context_current_(make_context_current),
96 codec_(media::kCodecH264), 77 codec_(media::kCodecH264),
97 state_(NO_ERROR), 78 state_(NO_ERROR),
98 surface_texture_id_(0), 79 surface_texture_id_(0),
99 picturebuffers_requested_(false), 80 picturebuffers_requested_(false),
100 gl_decoder_(decoder), 81 gl_decoder_(decoder),
82 strategy_(strategy.Pass()),
101 weak_this_factory_(this) {} 83 weak_this_factory_(this) {}
102 84
103 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() { 85 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() {
104 DCHECK(thread_checker_.CalledOnValidThread()); 86 DCHECK(thread_checker_.CalledOnValidThread());
105 } 87 }
106 88
107 bool AndroidVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile, 89 bool AndroidVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile,
108 Client* client) { 90 Client* client) {
109 DCHECK(!media_codec_); 91 DCHECK(!media_codec_);
110 DCHECK(thread_checker_.CalledOnValidThread()); 92 DCHECK(thread_checker_.CalledOnValidThread());
111 93
112 client_ = client; 94 client_ = client;
113 codec_ = VideoCodecProfileToVideoCodec(profile); 95 codec_ = VideoCodecProfileToVideoCodec(profile);
114 96
97 strategy_->SetStateProvider(this);
98
115 bool profile_supported = codec_ == media::kCodecVP8; 99 bool profile_supported = codec_ == media::kCodecVP8;
116 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID) 100 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID)
117 profile_supported |= 101 profile_supported |=
118 (codec_ == media::kCodecVP9 || codec_ == media::kCodecH264); 102 (codec_ == media::kCodecVP9 || codec_ == media::kCodecH264);
119 #endif 103 #endif
120 104
121 if (!profile_supported) { 105 if (!profile_supported) {
122 LOG(ERROR) << "Unsupported profile: " << profile; 106 LOG(ERROR) << "Unsupported profile: " << profile;
123 return false; 107 return false;
124 } 108 }
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
198 pending_bitstream_buffers_.front().first; 182 pending_bitstream_buffers_.front().first;
199 pending_bitstream_buffers_.pop(); 183 pending_bitstream_buffers_.pop();
200 184
201 if (bitstream_buffer.id() == -1) { 185 if (bitstream_buffer.id() == -1) {
202 media_codec_->QueueEOS(input_buf_index); 186 media_codec_->QueueEOS(input_buf_index);
203 return; 187 return;
204 } 188 }
205 189
206 scoped_ptr<base::SharedMemory> shm( 190 scoped_ptr<base::SharedMemory> shm(
207 new base::SharedMemory(bitstream_buffer.handle(), true)); 191 new base::SharedMemory(bitstream_buffer.handle(), true));
208 RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()), 192 RETURN_ON_FAILURE(this, shm->Map(bitstream_buffer.size()),
209 "Failed to SharedMemory::Map()", UNREADABLE_INPUT); 193 "Failed to SharedMemory::Map()", UNREADABLE_INPUT);
210 194
211 const base::TimeDelta presentation_timestamp = 195 const base::TimeDelta presentation_timestamp =
212 bitstream_buffer.presentation_timestamp(); 196 bitstream_buffer.presentation_timestamp();
213 DCHECK(presentation_timestamp != media::kNoTimestamp()) 197 DCHECK(presentation_timestamp != media::kNoTimestamp())
214 << "Bitstream buffers must have valid presentation timestamps"; 198 << "Bitstream buffers must have valid presentation timestamps";
215 // There may already be a bitstream buffer with this timestamp, e.g., VP9 alt 199 // There may already be a bitstream buffer with this timestamp, e.g., VP9 alt
216 // ref frames, but it's OK to overwrite it because we only expect a single 200 // ref frames, but it's OK to overwrite it because we only expect a single
217 // output frame to have that timestamp. AVDA clients only use the bitstream 201 // output frame to have that timestamp. AVDA clients only use the bitstream
218 // buffer id in the returned Pictures to map a bitstream buffer back to a 202 // buffer id in the returned Pictures to map a bitstream buffer back to a
219 // timestamp on their side, so either one of the bitstream buffer ids will 203 // timestamp on their side, so either one of the bitstream buffer ids will
220 // result in them finding the right timestamp. 204 // result in them finding the right timestamp.
221 bitstream_buffers_in_decoder_[presentation_timestamp] = bitstream_buffer.id(); 205 bitstream_buffers_in_decoder_[presentation_timestamp] = bitstream_buffer.id();
222 206
223 status = media_codec_->QueueInputBuffer( 207 status = media_codec_->QueueInputBuffer(
224 input_buf_index, static_cast<const uint8*>(shm->memory()), 208 input_buf_index, static_cast<const uint8*>(shm->memory()),
225 bitstream_buffer.size(), presentation_timestamp); 209 bitstream_buffer.size(), presentation_timestamp);
226 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK, 210 RETURN_ON_FAILURE(this, status == media::MEDIA_CODEC_OK,
227 "Failed to QueueInputBuffer: " << status, PLATFORM_FAILURE); 211 "Failed to QueueInputBuffer: " << status, PLATFORM_FAILURE);
228 212
229 // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output 213 // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output
230 // will be returned from the bitstream buffer. However, MediaCodec API is 214 // will be returned from the bitstream buffer. However, MediaCodec API is
231 // not enough to guarantee it. 215 // not enough to guarantee it.
232 // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to 216 // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to
233 // keep getting more bitstreams from the client, and throttle them by using 217 // keep getting more bitstreams from the client, and throttle them by using
234 // |bitstreams_notified_in_advance_|. 218 // |bitstreams_notified_in_advance_|.
235 // TODO(dwkang): check if there is a way to remove this workaround. 219 // TODO(dwkang): check if there is a way to remove this workaround.
236 base::MessageLoop::current()->PostTask( 220 base::MessageLoop::current()->PostTask(
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
276 base::MessageLoop::current()->PostTask( 260 base::MessageLoop::current()->PostTask(
277 FROM_HERE, 261 FROM_HERE,
278 base::Bind(&AndroidVideoDecodeAccelerator::RequestPictureBuffers, 262 base::Bind(&AndroidVideoDecodeAccelerator::RequestPictureBuffers,
279 weak_this_factory_.GetWeakPtr())); 263 weak_this_factory_.GetWeakPtr()));
280 } else { 264 } else {
281 // Dynamic resolution change support is not specified by the Android 265 // Dynamic resolution change support is not specified by the Android
282 // platform at and before JB-MR1, so it's not possible to smoothly 266 // platform at and before JB-MR1, so it's not possible to smoothly
283 // continue playback at this point. Instead, error out immediately, 267 // continue playback at this point. Instead, error out immediately,
284 // expecting clients to Reset() as appropriate to avoid this. 268 // expecting clients to Reset() as appropriate to avoid this.
285 // b/7093648 269 // b/7093648
286 RETURN_ON_FAILURE(size_ == gfx::Size(width, height), 270 RETURN_ON_FAILURE(this, size_ == gfx::Size(width, height),
287 "Dynamic resolution change is not supported.", 271 "Dynamic resolution change is not supported.",
288 PLATFORM_FAILURE); 272 PLATFORM_FAILURE);
289 } 273 }
290 return; 274 return;
291 } 275 }
292 276
293 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED: 277 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED:
294 break; 278 break;
295 279
296 case media::MEDIA_CODEC_OK: 280 case media::MEDIA_CODEC_OK:
297 DCHECK_GE(buf_index, 0); 281 DCHECK_GE(buf_index, 0);
298 break; 282 break;
299 283
300 default: 284 default:
301 NOTREACHED(); 285 NOTREACHED();
302 break; 286 break;
303 } 287 }
304 } while (buf_index < 0); 288 } while (buf_index < 0);
305 289
306 // This ignores the emitted ByteBuffer and instead relies on rendering to the
307 // codec's SurfaceTexture and then copying from that texture to the client's
308 // PictureBuffer's texture. This means that each picture's data is written
309 // three times: once to the ByteBuffer, once to the SurfaceTexture, and once
310 // to the client's texture. It would be nicer to either:
311 // 1) Render directly to the client's texture from MediaCodec (one write); or
312 // 2) Upload the ByteBuffer to the client's texture (two writes).
313 // Unfortunately neither is possible:
314 // 1) MediaCodec's use of SurfaceTexture is a singleton, and the texture
315 // written to can't change during the codec's lifetime. b/11990461
316 // 2) The ByteBuffer is likely to contain the pixels in a vendor-specific,
317 // opaque/non-standard format. It's not possible to negotiate the decoder
318 // to emit a specific colorspace, even using HW CSC. b/10706245
319 // So, we live with these two extra copies per picture :(
320 media_codec_->ReleaseOutputBuffer(buf_index, true);
321
322 if (eos) { 290 if (eos) {
291 media_codec_->ReleaseOutputBuffer(buf_index, false);
323 base::MessageLoop::current()->PostTask( 292 base::MessageLoop::current()->PostTask(
324 FROM_HERE, 293 FROM_HERE,
325 base::Bind(&AndroidVideoDecodeAccelerator::NotifyFlushDone, 294 base::Bind(&AndroidVideoDecodeAccelerator::NotifyFlushDone,
326 weak_this_factory_.GetWeakPtr())); 295 weak_this_factory_.GetWeakPtr()));
327 } else { 296 } else {
328 // Get the bitstream buffer id from the timestamp. 297 // Get the bitstream buffer id from the timestamp.
329 auto it = bitstream_buffers_in_decoder_.find(presentation_timestamp); 298 auto it = bitstream_buffers_in_decoder_.find(presentation_timestamp);
330 // Require the decoder to output at most one frame for each distinct input 299 // Require the decoder to output at most one frame for each distinct input
331 // buffer timestamp. A VP9 alt ref frame is a case where an input buffer, 300 // buffer timestamp. A VP9 alt ref frame is a case where an input buffer,
332 // with a possibly unique timestamp, will not result in a corresponding 301 // with a possibly unique timestamp, will not result in a corresponding
333 // output frame. 302 // output frame.
334 CHECK(it != bitstream_buffers_in_decoder_.end()) 303 CHECK(it != bitstream_buffers_in_decoder_.end())
335 << "Unexpected output frame timestamp"; 304 << "Unexpected output frame timestamp";
336 const int32 bitstream_buffer_id = it->second; 305 const int32 bitstream_buffer_id = it->second;
337 bitstream_buffers_in_decoder_.erase(bitstream_buffers_in_decoder_.begin(), 306 bitstream_buffers_in_decoder_.erase(bitstream_buffers_in_decoder_.begin(),
338 ++it); 307 ++it);
339 SendCurrentSurfaceToClient(bitstream_buffer_id); 308 SendCurrentSurfaceToClient(buf_index, bitstream_buffer_id);
340 309
341 // Removes ids former or equal than the id from decoder. Note that 310 // Removes ids former or equal than the id from decoder. Note that
342 // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder 311 // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder
343 // because of frame reordering issue. We just maintain this roughly and use 312 // because of frame reordering issue. We just maintain this roughly and use
344 // for the throttling purpose. 313 // for the throttling purpose.
345 for (auto bitstream_it = bitstreams_notified_in_advance_.begin(); 314 for (auto bitstream_it = bitstreams_notified_in_advance_.begin();
346 bitstream_it != bitstreams_notified_in_advance_.end(); 315 bitstream_it != bitstreams_notified_in_advance_.end();
347 ++bitstream_it) { 316 ++bitstream_it) {
348 if (*bitstream_it == bitstream_buffer_id) { 317 if (*bitstream_it == bitstream_buffer_id) {
349 bitstreams_notified_in_advance_.erase( 318 bitstreams_notified_in_advance_.erase(
350 bitstreams_notified_in_advance_.begin(), ++bitstream_it); 319 bitstreams_notified_in_advance_.begin(), ++bitstream_it);
351 break; 320 break;
352 } 321 }
353 } 322 }
354 } 323 }
355 } 324 }
356 325
357 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient( 326 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient(
327 int32 codec_buffer_index,
358 int32 bitstream_id) { 328 int32 bitstream_id) {
359 DCHECK(thread_checker_.CalledOnValidThread()); 329 DCHECK(thread_checker_.CalledOnValidThread());
360 DCHECK_NE(bitstream_id, -1); 330 DCHECK_NE(bitstream_id, -1);
361 DCHECK(!free_picture_ids_.empty()); 331 DCHECK(!free_picture_ids_.empty());
362 332
363 RETURN_ON_FAILURE(make_context_current_.Run(), 333 RETURN_ON_FAILURE(this, make_context_current_.Run(),
364 "Failed to make this decoder's GL context current.", 334 "Failed to make this decoder's GL context current.",
365 PLATFORM_FAILURE); 335 PLATFORM_FAILURE);
366 336
367 int32 picture_buffer_id = free_picture_ids_.front(); 337 int32 picture_buffer_id = free_picture_ids_.front();
368 free_picture_ids_.pop(); 338 free_picture_ids_.pop();
369 339
370 float transfrom_matrix[16];
371 surface_texture_->UpdateTexImage();
372 surface_texture_->GetTransformMatrix(transfrom_matrix);
373
374 OutputBufferMap::const_iterator i = 340 OutputBufferMap::const_iterator i =
375 output_picture_buffers_.find(picture_buffer_id); 341 output_picture_buffers_.find(picture_buffer_id);
376 RETURN_ON_FAILURE(i != output_picture_buffers_.end(), 342 RETURN_ON_FAILURE(this, i != output_picture_buffers_.end(),
377 "Can't find a PictureBuffer for " << picture_buffer_id, 343 "Can't find a PictureBuffer for " << picture_buffer_id,
378 PLATFORM_FAILURE); 344 PLATFORM_FAILURE);
379 uint32 picture_buffer_texture_id = i->second.texture_id();
380 345
381 RETURN_ON_FAILURE(gl_decoder_.get(), 346 // Connect the PictureBuffer to the decoded frame, via whatever
382 "Failed to get gles2 decoder instance.", 347 // mechanism the strategy likes.
383 ILLEGAL_STATE); 348 strategy_->AssignCurrentSurfaceToPictureBuffer(codec_buffer_index, i->second);
384 // Defer initializing the CopyTextureCHROMIUMResourceManager until it is
385 // needed because it takes 10s of milliseconds to initialize.
386 if (!copier_) {
387 copier_.reset(new gpu::CopyTextureCHROMIUMResourceManager());
388 copier_->Initialize(gl_decoder_.get());
389 }
390
391 // Here, we copy |surface_texture_id_| to the picture buffer instead of
392 // setting new texture to |surface_texture_| by calling attachToGLContext()
393 // because:
394 // 1. Once we call detachFrameGLContext(), it deletes the texture previous
395 // attached.
396 // 2. SurfaceTexture requires us to apply a transform matrix when we show
397 // the texture.
398 // TODO(hkuang): get the StreamTexture transform matrix in GPU process
399 // instead of using default matrix crbug.com/226218.
400 const static GLfloat default_matrix[16] = {1.0f, 0.0f, 0.0f, 0.0f,
401 0.0f, 1.0f, 0.0f, 0.0f,
402 0.0f, 0.0f, 1.0f, 0.0f,
403 0.0f, 0.0f, 0.0f, 1.0f};
404 copier_->DoCopyTextureWithTransform(gl_decoder_.get(),
405 GL_TEXTURE_EXTERNAL_OES,
406 surface_texture_id_,
407 picture_buffer_texture_id,
408 size_.width(),
409 size_.height(),
410 false,
411 false,
412 false,
413 default_matrix);
414 349
415 // TODO(henryhsu): Pass (0, 0) as visible size will cause several test 350 // TODO(henryhsu): Pass (0, 0) as visible size will cause several test
416 // cases failed. We should make sure |size_| is coded size or visible size. 351 // cases failed. We should make sure |size_| is coded size or visible size.
417 base::MessageLoop::current()->PostTask( 352 base::MessageLoop::current()->PostTask(
418 FROM_HERE, base::Bind(&AndroidVideoDecodeAccelerator::NotifyPictureReady, 353 FROM_HERE, base::Bind(&AndroidVideoDecodeAccelerator::NotifyPictureReady,
419 weak_this_factory_.GetWeakPtr(), 354 weak_this_factory_.GetWeakPtr(),
420 media::Picture(picture_buffer_id, bitstream_id, 355 media::Picture(picture_buffer_id, bitstream_id,
421 gfx::Rect(size_), false))); 356 gfx::Rect(size_), false)));
422 } 357 }
423 358
424 void AndroidVideoDecodeAccelerator::Decode( 359 void AndroidVideoDecodeAccelerator::Decode(
425 const media::BitstreamBuffer& bitstream_buffer) { 360 const media::BitstreamBuffer& bitstream_buffer) {
426 DCHECK(thread_checker_.CalledOnValidThread()); 361 DCHECK(thread_checker_.CalledOnValidThread());
427 if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) { 362 if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) {
428 base::MessageLoop::current()->PostTask( 363 base::MessageLoop::current()->PostTask(
429 FROM_HERE, 364 FROM_HERE,
430 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, 365 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
431 weak_this_factory_.GetWeakPtr(), 366 weak_this_factory_.GetWeakPtr(), bitstream_buffer.id()));
432 bitstream_buffer.id()));
433 return; 367 return;
434 } 368 }
435 369
436 pending_bitstream_buffers_.push( 370 pending_bitstream_buffers_.push(
437 std::make_pair(bitstream_buffer, base::Time::Now())); 371 std::make_pair(bitstream_buffer, base::Time::Now()));
438 372
439 DoIOTask(); 373 DoIOTask();
440 } 374 }
441 375
376 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() {
377 client_->ProvidePictureBuffers(strategy_->GetNumPictureBuffers(), size_,
378 strategy_->GetTextureTarget());
379 }
380
442 void AndroidVideoDecodeAccelerator::AssignPictureBuffers( 381 void AndroidVideoDecodeAccelerator::AssignPictureBuffers(
443 const std::vector<media::PictureBuffer>& buffers) { 382 const std::vector<media::PictureBuffer>& buffers) {
444 DCHECK(thread_checker_.CalledOnValidThread()); 383 DCHECK(thread_checker_.CalledOnValidThread());
445 DCHECK(output_picture_buffers_.empty()); 384 DCHECK(output_picture_buffers_.empty());
446 DCHECK(free_picture_ids_.empty()); 385 DCHECK(free_picture_ids_.empty());
447 386
448 for (size_t i = 0; i < buffers.size(); ++i) { 387 for (size_t i = 0; i < buffers.size(); ++i) {
449 RETURN_ON_FAILURE(buffers[i].size() == size_, 388 RETURN_ON_FAILURE(this, buffers[i].size() == size_,
450 "Invalid picture buffer size was passed.", 389 "Invalid picture buffer size was passed.",
451 INVALID_ARGUMENT); 390 INVALID_ARGUMENT);
452 int32 id = buffers[i].id(); 391 int32 id = buffers[i].id();
453 output_picture_buffers_.insert(std::make_pair(id, buffers[i])); 392 output_picture_buffers_.insert(std::make_pair(id, buffers[i]));
454 free_picture_ids_.push(id); 393 free_picture_ids_.push(id);
455 // Since the client might be re-using |picture_buffer_id| values, forget 394 // Since the client might be re-using |picture_buffer_id| values, forget
456 // about previously-dismissed IDs now. See ReusePictureBuffer() comment 395 // about previously-dismissed IDs now. See ReusePictureBuffer() comment
457 // about "zombies" for why we maintain this set in the first place. 396 // about "zombies" for why we maintain this set in the first place.
458 dismissed_picture_ids_.erase(id); 397 dismissed_picture_ids_.erase(id);
459 } 398 }
460 399
461 RETURN_ON_FAILURE(output_picture_buffers_.size() >= kNumPictureBuffers, 400 RETURN_ON_FAILURE(
462 "Invalid picture buffers were passed.", 401 this, output_picture_buffers_.size() >= strategy_->GetNumPictureBuffers(),
463 INVALID_ARGUMENT); 402 "Invalid picture buffers were passed.", INVALID_ARGUMENT);
464 403
465 DoIOTask(); 404 DoIOTask();
466 } 405 }
467 406
468 void AndroidVideoDecodeAccelerator::ReusePictureBuffer( 407 void AndroidVideoDecodeAccelerator::ReusePictureBuffer(
469 int32 picture_buffer_id) { 408 int32 picture_buffer_id) {
470 DCHECK(thread_checker_.CalledOnValidThread()); 409 DCHECK(thread_checker_.CalledOnValidThread());
471 410
472 // This ReusePictureBuffer() might have been in a pipe somewhere (queued in 411 // This ReusePictureBuffer() might have been in a pipe somewhere (queued in
473 // IPC, or in a PostTask either at the sender or receiver) when we sent a 412 // IPC, or in a PostTask either at the sender or receiver) when we sent a
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
511 DCHECK(thread_checker_.CalledOnValidThread()); 450 DCHECK(thread_checker_.CalledOnValidThread());
512 451
513 while (!pending_bitstream_buffers_.empty()) { 452 while (!pending_bitstream_buffers_.empty()) {
514 int32 bitstream_buffer_id = pending_bitstream_buffers_.front().first.id(); 453 int32 bitstream_buffer_id = pending_bitstream_buffers_.front().first.id();
515 pending_bitstream_buffers_.pop(); 454 pending_bitstream_buffers_.pop();
516 455
517 if (bitstream_buffer_id != -1) { 456 if (bitstream_buffer_id != -1) {
518 base::MessageLoop::current()->PostTask( 457 base::MessageLoop::current()->PostTask(
519 FROM_HERE, 458 FROM_HERE,
520 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, 459 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
521 weak_this_factory_.GetWeakPtr(), 460 weak_this_factory_.GetWeakPtr(), bitstream_buffer_id));
522 bitstream_buffer_id));
523 } 461 }
524 } 462 }
525 bitstreams_notified_in_advance_.clear(); 463 bitstreams_notified_in_advance_.clear();
526 464
527 for (OutputBufferMap::iterator it = output_picture_buffers_.begin(); 465 for (OutputBufferMap::iterator it = output_picture_buffers_.begin();
528 it != output_picture_buffers_.end(); 466 it != output_picture_buffers_.end();
529 ++it) { 467 ++it) {
530 client_->DismissPictureBuffer(it->first); 468 client_->DismissPictureBuffer(it->first);
531 dismissed_picture_ids_.insert(it->first); 469 dismissed_picture_ids_.insert(it->first);
532 } 470 }
(...skipping 15 matching lines...) Expand all
548 486
549 base::MessageLoop::current()->PostTask( 487 base::MessageLoop::current()->PostTask(
550 FROM_HERE, 488 FROM_HERE,
551 base::Bind(&AndroidVideoDecodeAccelerator::NotifyResetDone, 489 base::Bind(&AndroidVideoDecodeAccelerator::NotifyResetDone,
552 weak_this_factory_.GetWeakPtr())); 490 weak_this_factory_.GetWeakPtr()));
553 } 491 }
554 492
555 void AndroidVideoDecodeAccelerator::Destroy() { 493 void AndroidVideoDecodeAccelerator::Destroy() {
556 DCHECK(thread_checker_.CalledOnValidThread()); 494 DCHECK(thread_checker_.CalledOnValidThread());
557 495
496 strategy_->Cleanup();
497
558 weak_this_factory_.InvalidateWeakPtrs(); 498 weak_this_factory_.InvalidateWeakPtrs();
559 if (media_codec_) { 499 if (media_codec_) {
560 io_timer_.Stop(); 500 io_timer_.Stop();
561 media_codec_->Stop(); 501 media_codec_->Stop();
562 } 502 }
563 if (surface_texture_id_) 503 if (surface_texture_id_)
564 glDeleteTextures(1, &surface_texture_id_); 504 glDeleteTextures(1, &surface_texture_id_);
565 if (copier_)
566 copier_->Destroy();
567 delete this; 505 delete this;
568 } 506 }
569 507
570 bool AndroidVideoDecodeAccelerator::CanDecodeOnIOThread() { 508 bool AndroidVideoDecodeAccelerator::CanDecodeOnIOThread() {
571 return false; 509 return false;
572 } 510 }
573 511
574 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() { 512 const gfx::Size& AndroidVideoDecodeAccelerator::GetSize() const {
575 client_->ProvidePictureBuffers(kNumPictureBuffers, size_, GL_TEXTURE_2D); 513 return size_;
514 }
515
516 const base::ThreadChecker& AndroidVideoDecodeAccelerator::ThreadChecker()
517 const {
518 return thread_checker_;
519 }
520
521 gfx::SurfaceTexture* AndroidVideoDecodeAccelerator::GetSurfaceTexture() const {
522 return surface_texture_.get();
523 }
524
525 uint32 AndroidVideoDecodeAccelerator::GetSurfaceTextureId() const {
526 return surface_texture_id_;
527 }
528
529 gpu::gles2::GLES2Decoder* AndroidVideoDecodeAccelerator::GetGlDecoder() const {
530 return gl_decoder_.get();
531 }
532
533 media::VideoCodecBridge* AndroidVideoDecodeAccelerator::GetMediaCodec() {
534 return media_codec_.get();
535 }
536
537 void AndroidVideoDecodeAccelerator::PostError(
538 const ::tracked_objects::Location& from_here,
539 media::VideoDecodeAccelerator::Error error) {
540 base::MessageLoop::current()->PostTask(
541 from_here, base::Bind(&AndroidVideoDecodeAccelerator::NotifyError,
542 weak_this_factory_.GetWeakPtr(), error));
543 state_ = ERROR;
576 } 544 }
577 545
578 void AndroidVideoDecodeAccelerator::NotifyPictureReady( 546 void AndroidVideoDecodeAccelerator::NotifyPictureReady(
579 const media::Picture& picture) { 547 const media::Picture& picture) {
580 client_->PictureReady(picture); 548 client_->PictureReady(picture);
581 } 549 }
582 550
583 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer( 551 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer(
584 int input_buffer_id) { 552 int input_buffer_id) {
585 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); 553 client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
631 // software fallback for H264 on Android anyway. 599 // software fallback for H264 on Android anyway.
632 profile.max_resolution.SetSize(3840, 2160); 600 profile.max_resolution.SetSize(3840, 2160);
633 profiles.push_back(profile); 601 profiles.push_back(profile);
634 } 602 }
635 #endif 603 #endif
636 604
637 return profiles; 605 return profiles;
638 } 606 }
639 607
640 } // namespace content 608 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698