Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2015 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/common/gpu/media/android_video_decode_accelerator.h" | 5 #include "content/common/gpu/media/android_video_decode_accelerator.h" |
| 6 | 6 |
| 7 #include "base/bind.h" | 7 #include "base/bind.h" |
| 8 #include "base/logging.h" | 8 #include "base/logging.h" |
| 9 #include "base/message_loop/message_loop.h" | 9 #include "base/message_loop/message_loop.h" |
| 10 #include "base/metrics/histogram.h" | 10 #include "base/metrics/histogram.h" |
| 11 #include "content/common/gpu/gpu_channel.h" | 11 #include "content/common/gpu/gpu_channel.h" |
| 12 #include "content/common/gpu/media/avda_return_on_failure.h" | |
| 12 #include "gpu/command_buffer/service/gles2_cmd_decoder.h" | 13 #include "gpu/command_buffer/service/gles2_cmd_decoder.h" |
| 13 #include "media/base/bitstream_buffer.h" | 14 #include "media/base/bitstream_buffer.h" |
| 14 #include "media/base/limits.h" | 15 #include "media/base/limits.h" |
| 15 #include "media/base/timestamp_constants.h" | 16 #include "media/base/timestamp_constants.h" |
| 16 #include "media/base/video_decoder_config.h" | 17 #include "media/base/video_decoder_config.h" |
| 17 #include "media/video/picture.h" | 18 #include "media/video/picture.h" |
| 18 #include "ui/gl/android/scoped_java_surface.h" | 19 #include "ui/gl/android/scoped_java_surface.h" |
| 19 #include "ui/gl/android/surface_texture.h" | 20 #include "ui/gl/android/surface_texture.h" |
| 20 #include "ui/gl/gl_bindings.h" | 21 #include "ui/gl/gl_bindings.h" |
| 21 | 22 |
| 22 namespace content { | 23 namespace content { |
| 23 | 24 |
| 24 // Helper macros for dealing with failure. If |result| evaluates false, emit | |
| 25 // |log| to ERROR, register |error| with the decoder, and return. | |
| 26 #define RETURN_ON_FAILURE(result, log, error) \ | |
| 27 do { \ | |
| 28 if (!(result)) { \ | |
| 29 DLOG(ERROR) << log; \ | |
| 30 base::MessageLoop::current()->PostTask( \ | |
| 31 FROM_HERE, \ | |
| 32 base::Bind(&AndroidVideoDecodeAccelerator::NotifyError, \ | |
| 33 weak_this_factory_.GetWeakPtr(), \ | |
| 34 error)); \ | |
| 35 state_ = ERROR; \ | |
| 36 return; \ | |
| 37 } \ | |
| 38 } while (0) | |
| 39 | |
| 40 // TODO(dwkang): We only need kMaxVideoFrames to pass media stack's prerolling | |
| 41 // phase, but 1 is added due to crbug.com/176036. This should be tuned when we | |
| 42 // have actual use case. | |
| 43 enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 }; | |
| 44 | |
| 45 // Max number of bitstreams notified to the client with | 25 // Max number of bitstreams notified to the client with |
| 46 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream. | 26 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream. |
| 47 enum { kMaxBitstreamsNotifiedInAdvance = 32 }; | 27 enum { kMaxBitstreamsNotifiedInAdvance = 32 }; |
| 48 | 28 |
| 49 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID) | 29 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID) |
| 50 // MediaCodec is only guaranteed to support baseline, but some devices may | 30 // MediaCodec is only guaranteed to support baseline, but some devices may |
| 51 // support others. Advertise support for all H264 profiles and let the | 31 // support others. Advertise support for all H264 profiles and let the |
| 52 // MediaCodec fail when decoding if it's not actually supported. It's assumed | 32 // MediaCodec fail when decoding if it's not actually supported. It's assumed |
| 53 // that consumers won't have software fallback for H264 on Android anyway. | 33 // that consumers won't have software fallback for H264 on Android anyway. |
| 54 static const media::VideoCodecProfile kSupportedH264Profiles[] = { | 34 static const media::VideoCodecProfile kSupportedH264Profiles[] = { |
| (...skipping 28 matching lines...) Expand all Loading... | |
| 83 // reasonably device-agnostic way to fill in the "believes" above). | 63 // reasonably device-agnostic way to fill in the "believes" above). |
| 84 return base::TimeDelta::FromMilliseconds(10); | 64 return base::TimeDelta::FromMilliseconds(10); |
| 85 } | 65 } |
| 86 | 66 |
| 87 static inline const base::TimeDelta NoWaitTimeOut() { | 67 static inline const base::TimeDelta NoWaitTimeOut() { |
| 88 return base::TimeDelta::FromMicroseconds(0); | 68 return base::TimeDelta::FromMicroseconds(0); |
| 89 } | 69 } |
| 90 | 70 |
| 91 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator( | 71 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator( |
| 92 const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder, | 72 const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder, |
| 93 const base::Callback<bool(void)>& make_context_current) | 73 const base::Callback<bool(void)>& make_context_current, |
| 74 scoped_ptr<BackingStrategy> strategy) | |
| 94 : client_(NULL), | 75 : client_(NULL), |
| 95 make_context_current_(make_context_current), | 76 make_context_current_(make_context_current), |
| 96 codec_(media::kCodecH264), | 77 codec_(media::kCodecH264), |
| 97 state_(NO_ERROR), | 78 state_(NO_ERROR), |
| 98 surface_texture_id_(0), | 79 surface_texture_id_(0), |
| 99 picturebuffers_requested_(false), | 80 picturebuffers_requested_(false), |
| 100 gl_decoder_(decoder), | 81 gl_decoder_(decoder), |
| 82 strategy_(strategy.Pass()), | |
| 101 weak_this_factory_(this) {} | 83 weak_this_factory_(this) {} |
| 102 | 84 |
| 103 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() { | 85 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() { |
| 104 DCHECK(thread_checker_.CalledOnValidThread()); | 86 DCHECK(thread_checker_.CalledOnValidThread()); |
| 105 } | 87 } |
| 106 | 88 |
| 107 bool AndroidVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile, | 89 bool AndroidVideoDecodeAccelerator::Initialize( |
| 108 Client* client) { | 90 media::VideoCodecProfile profile, |
| 91 Client* client) { | |
|
watk
2015/09/09 00:36:00
Unnecessary formatting change?
| |
| 109 DCHECK(!media_codec_); | 92 DCHECK(!media_codec_); |
| 110 DCHECK(thread_checker_.CalledOnValidThread()); | 93 DCHECK(thread_checker_.CalledOnValidThread()); |
| 111 | 94 |
| 112 client_ = client; | 95 client_ = client; |
| 113 codec_ = VideoCodecProfileToVideoCodec(profile); | 96 codec_ = VideoCodecProfileToVideoCodec(profile); |
| 114 | 97 |
| 98 strategy_->SetStateProvider(this); | |
| 99 | |
| 115 bool profile_supported = codec_ == media::kCodecVP8; | 100 bool profile_supported = codec_ == media::kCodecVP8; |
| 116 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID) | 101 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID) |
| 117 profile_supported |= | 102 profile_supported |= |
| 118 (codec_ == media::kCodecVP9 || codec_ == media::kCodecH264); | 103 (codec_ == media::kCodecVP9 || codec_ == media::kCodecH264); |
| 119 #endif | 104 #endif |
| 120 | 105 |
| 121 if (!profile_supported) { | 106 if (!profile_supported) { |
| 122 LOG(ERROR) << "Unsupported profile: " << profile; | 107 LOG(ERROR) << "Unsupported profile: " << profile; |
| 123 return false; | 108 return false; |
| 124 } | 109 } |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 198 pending_bitstream_buffers_.front().first; | 183 pending_bitstream_buffers_.front().first; |
| 199 pending_bitstream_buffers_.pop(); | 184 pending_bitstream_buffers_.pop(); |
| 200 | 185 |
| 201 if (bitstream_buffer.id() == -1) { | 186 if (bitstream_buffer.id() == -1) { |
| 202 media_codec_->QueueEOS(input_buf_index); | 187 media_codec_->QueueEOS(input_buf_index); |
| 203 return; | 188 return; |
| 204 } | 189 } |
| 205 | 190 |
| 206 scoped_ptr<base::SharedMemory> shm( | 191 scoped_ptr<base::SharedMemory> shm( |
| 207 new base::SharedMemory(bitstream_buffer.handle(), true)); | 192 new base::SharedMemory(bitstream_buffer.handle(), true)); |
| 208 RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()), | 193 RETURN_ON_FAILURE(this, shm->Map(bitstream_buffer.size()), |
| 209 "Failed to SharedMemory::Map()", UNREADABLE_INPUT); | 194 "Failed to SharedMemory::Map()", UNREADABLE_INPUT); |
| 210 | 195 |
| 211 const base::TimeDelta presentation_timestamp = | 196 const base::TimeDelta presentation_timestamp = |
| 212 bitstream_buffer.presentation_timestamp(); | 197 bitstream_buffer.presentation_timestamp(); |
| 213 DCHECK(presentation_timestamp != media::kNoTimestamp()) | 198 DCHECK(presentation_timestamp != media::kNoTimestamp()) |
| 214 << "Bitstream buffers must have valid presentation timestamps"; | 199 << "Bitstream buffers must have valid presentation timestamps"; |
| 215 // There may already be a bitstream buffer with this timestamp, e.g., VP9 alt | 200 // There may already be a bitstream buffer with this timestamp, e.g., VP9 alt |
| 216 // ref frames, but it's OK to overwrite it because we only expect a single | 201 // ref frames, but it's OK to overwrite it because we only expect a single |
| 217 // output frame to have that timestamp. AVDA clients only use the bitstream | 202 // output frame to have that timestamp. AVDA clients only use the bitstream |
| 218 // buffer id in the returned Pictures to map a bitstream buffer back to a | 203 // buffer id in the returned Pictures to map a bitstream buffer back to a |
| 219 // timestamp on their side, so either one of the bitstream buffer ids will | 204 // timestamp on their side, so either one of the bitstream buffer ids will |
| 220 // result in them finding the right timestamp. | 205 // result in them finding the right timestamp. |
| 221 bitstream_buffers_in_decoder_[presentation_timestamp] = bitstream_buffer.id(); | 206 bitstream_buffers_in_decoder_[presentation_timestamp] = bitstream_buffer.id(); |
| 222 | 207 |
| 223 status = media_codec_->QueueInputBuffer( | 208 status = media_codec_->QueueInputBuffer( |
| 224 input_buf_index, static_cast<const uint8*>(shm->memory()), | 209 input_buf_index, static_cast<const uint8*>(shm->memory()), |
| 225 bitstream_buffer.size(), presentation_timestamp); | 210 bitstream_buffer.size(), presentation_timestamp); |
| 226 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK, | 211 RETURN_ON_FAILURE(this, status == media::MEDIA_CODEC_OK, |
| 227 "Failed to QueueInputBuffer: " << status, PLATFORM_FAILURE); | 212 "Failed to QueueInputBuffer: " << status, PLATFORM_FAILURE); |
| 228 | 213 |
| 229 // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output | 214 // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output |
| 230 // will be returned from the bitstream buffer. However, MediaCodec API is | 215 // will be returned from the bitstream buffer. However, MediaCodec API is |
| 231 // not enough to guarantee it. | 216 // not enough to guarantee it. |
| 232 // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to | 217 // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to |
| 233 // keep getting more bitstreams from the client, and throttle them by using | 218 // keep getting more bitstreams from the client, and throttle them by using |
| 234 // |bitstreams_notified_in_advance_|. | 219 // |bitstreams_notified_in_advance_|. |
| 235 // TODO(dwkang): check if there is a way to remove this workaround. | 220 // TODO(dwkang): check if there is a way to remove this workaround. |
| 236 base::MessageLoop::current()->PostTask( | 221 base::MessageLoop::current()->PostTask( |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 268 | 253 |
| 269 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: { | 254 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: { |
| 270 int32 width, height; | 255 int32 width, height; |
| 271 media_codec_->GetOutputFormat(&width, &height); | 256 media_codec_->GetOutputFormat(&width, &height); |
| 272 | 257 |
| 273 if (!picturebuffers_requested_) { | 258 if (!picturebuffers_requested_) { |
| 274 picturebuffers_requested_ = true; | 259 picturebuffers_requested_ = true; |
| 275 size_ = gfx::Size(width, height); | 260 size_ = gfx::Size(width, height); |
| 276 base::MessageLoop::current()->PostTask( | 261 base::MessageLoop::current()->PostTask( |
| 277 FROM_HERE, | 262 FROM_HERE, |
| 278 base::Bind(&AndroidVideoDecodeAccelerator::RequestPictureBuffers, | 263 base::Bind(&AndroidVideoDecodeAccelerator:: |
|
watk
2015/09/09 00:36:00
Unnecessary formatting change?
| |
| 264 RequestPictureBuffers, | |
| 279 weak_this_factory_.GetWeakPtr())); | 265 weak_this_factory_.GetWeakPtr())); |
| 280 } else { | 266 } else { |
| 281 // Dynamic resolution change support is not specified by the Android | 267 // Dynamic resolution change support is not specified by the Android |
| 282 // platform at and before JB-MR1, so it's not possible to smoothly | 268 // platform at and before JB-MR1, so it's not possible to smoothly |
| 283 // continue playback at this point. Instead, error out immediately, | 269 // continue playback at this point. Instead, error out immediately, |
| 284 // expecting clients to Reset() as appropriate to avoid this. | 270 // expecting clients to Reset() as appropriate to avoid this. |
| 285 // b/7093648 | 271 // b/7093648 |
| 286 RETURN_ON_FAILURE(size_ == gfx::Size(width, height), | 272 RETURN_ON_FAILURE(this, |
| 273 size_ == gfx::Size(width, height), | |
| 287 "Dynamic resolution change is not supported.", | 274 "Dynamic resolution change is not supported.", |
| 288 PLATFORM_FAILURE); | 275 PLATFORM_FAILURE); |
| 289 } | 276 } |
| 290 return; | 277 return; |
| 291 } | 278 } |
| 292 | 279 |
| 293 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED: | 280 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED: |
| 294 break; | 281 break; |
| 295 | 282 |
| 296 case media::MEDIA_CODEC_OK: | 283 case media::MEDIA_CODEC_OK: |
| 297 DCHECK_GE(buf_index, 0); | 284 DCHECK_GE(buf_index, 0); |
| 298 break; | 285 break; |
| 299 | 286 |
| 300 default: | 287 default: |
| 301 NOTREACHED(); | 288 NOTREACHED(); |
| 302 break; | 289 break; |
| 303 } | 290 } |
| 304 } while (buf_index < 0); | 291 } while (buf_index < 0); |
| 305 | 292 |
| 306 // This ignores the emitted ByteBuffer and instead relies on rendering to the | |
| 307 // codec's SurfaceTexture and then copying from that texture to the client's | |
| 308 // PictureBuffer's texture. This means that each picture's data is written | |
| 309 // three times: once to the ByteBuffer, once to the SurfaceTexture, and once | |
| 310 // to the client's texture. It would be nicer to either: | |
| 311 // 1) Render directly to the client's texture from MediaCodec (one write); or | |
| 312 // 2) Upload the ByteBuffer to the client's texture (two writes). | |
| 313 // Unfortunately neither is possible: | |
| 314 // 1) MediaCodec's use of SurfaceTexture is a singleton, and the texture | |
| 315 // written to can't change during the codec's lifetime. b/11990461 | |
| 316 // 2) The ByteBuffer is likely to contain the pixels in a vendor-specific, | |
| 317 // opaque/non-standard format. It's not possible to negotiate the decoder | |
| 318 // to emit a specific colorspace, even using HW CSC. b/10706245 | |
| 319 // So, we live with these two extra copies per picture :( | |
| 320 media_codec_->ReleaseOutputBuffer(buf_index, true); | |
| 321 | |
| 322 if (eos) { | 293 if (eos) { |
| 294 media_codec_->ReleaseOutputBuffer(buf_index, false); | |
| 323 base::MessageLoop::current()->PostTask( | 295 base::MessageLoop::current()->PostTask( |
| 324 FROM_HERE, | 296 FROM_HERE, |
| 325 base::Bind(&AndroidVideoDecodeAccelerator::NotifyFlushDone, | 297 base::Bind(&AndroidVideoDecodeAccelerator::NotifyFlushDone, |
| 326 weak_this_factory_.GetWeakPtr())); | 298 weak_this_factory_.GetWeakPtr())); |
| 327 } else { | 299 } else { |
| 328 // Get the bitstream buffer id from the timestamp. | 300 // Get the bitstream buffer id from the timestamp. |
| 329 auto it = bitstream_buffers_in_decoder_.find(presentation_timestamp); | 301 auto it = bitstream_buffers_in_decoder_.find(presentation_timestamp); |
| 330 // Require the decoder to output at most one frame for each distinct input | 302 // Require the decoder to output at most one frame for each distinct input |
| 331 // buffer timestamp. A VP9 alt ref frame is a case where an input buffer, | 303 // buffer timestamp. A VP9 alt ref frame is a case where an input buffer, |
| 332 // with a possibly unique timestamp, will not result in a corresponding | 304 // with a possibly unique timestamp, will not result in a corresponding |
| 333 // output frame. | 305 // output frame. |
| 334 CHECK(it != bitstream_buffers_in_decoder_.end()) | 306 CHECK(it != bitstream_buffers_in_decoder_.end()) |
| 335 << "Unexpected output frame timestamp"; | 307 << "Unexpected output frame timestamp"; |
| 336 const int32 bitstream_buffer_id = it->second; | 308 const int32 bitstream_buffer_id = it->second; |
| 337 bitstream_buffers_in_decoder_.erase(bitstream_buffers_in_decoder_.begin(), | 309 bitstream_buffers_in_decoder_.erase(bitstream_buffers_in_decoder_.begin(), |
| 338 ++it); | 310 ++it); |
| 339 SendCurrentSurfaceToClient(bitstream_buffer_id); | 311 SendCurrentSurfaceToClient(buf_index, bitstream_buffer_id); |
| 340 | 312 |
| 341 // Removes ids former or equal than the id from decoder. Note that | 313 // Removes ids former or equal than the id from decoder. Note that |
| 342 // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder | 314 // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder |
| 343 // because of frame reordering issue. We just maintain this roughly and use | 315 // because of frame reordering issue. We just maintain this roughly and use |
| 344 // for the throttling purpose. | 316 // for the throttling purpose. |
| 345 for (auto bitstream_it = bitstreams_notified_in_advance_.begin(); | 317 for (auto bitstream_it = bitstreams_notified_in_advance_.begin(); |
| 346 bitstream_it != bitstreams_notified_in_advance_.end(); | 318 bitstream_it != bitstreams_notified_in_advance_.end(); |
| 347 ++bitstream_it) { | 319 ++bitstream_it) { |
| 348 if (*bitstream_it == bitstream_buffer_id) { | 320 if (*bitstream_it == bitstream_buffer_id) { |
| 349 bitstreams_notified_in_advance_.erase( | 321 bitstreams_notified_in_advance_.erase( |
| 350 bitstreams_notified_in_advance_.begin(), ++bitstream_it); | 322 bitstreams_notified_in_advance_.begin(), ++bitstream_it); |
| 351 break; | 323 break; |
| 352 } | 324 } |
| 353 } | 325 } |
| 354 } | 326 } |
| 355 } | 327 } |
| 356 | 328 |
| 357 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient( | 329 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient( |
| 330 int32 codec_buffer_index, | |
| 358 int32 bitstream_id) { | 331 int32 bitstream_id) { |
| 359 DCHECK(thread_checker_.CalledOnValidThread()); | 332 DCHECK(thread_checker_.CalledOnValidThread()); |
| 360 DCHECK_NE(bitstream_id, -1); | 333 DCHECK_NE(bitstream_id, -1); |
| 361 DCHECK(!free_picture_ids_.empty()); | 334 DCHECK(!free_picture_ids_.empty()); |
| 362 | 335 |
| 363 RETURN_ON_FAILURE(make_context_current_.Run(), | 336 RETURN_ON_FAILURE(this, |
| 337 make_context_current_.Run(), | |
| 364 "Failed to make this decoder's GL context current.", | 338 "Failed to make this decoder's GL context current.", |
| 365 PLATFORM_FAILURE); | 339 PLATFORM_FAILURE); |
| 366 | 340 |
| 367 int32 picture_buffer_id = free_picture_ids_.front(); | 341 int32 picture_buffer_id = free_picture_ids_.front(); |
| 368 free_picture_ids_.pop(); | 342 free_picture_ids_.pop(); |
| 369 | 343 |
| 370 float transfrom_matrix[16]; | |
| 371 surface_texture_->UpdateTexImage(); | |
| 372 surface_texture_->GetTransformMatrix(transfrom_matrix); | |
| 373 | |
| 374 OutputBufferMap::const_iterator i = | 344 OutputBufferMap::const_iterator i = |
| 375 output_picture_buffers_.find(picture_buffer_id); | 345 output_picture_buffers_.find(picture_buffer_id); |
| 376 RETURN_ON_FAILURE(i != output_picture_buffers_.end(), | 346 RETURN_ON_FAILURE(this, |
| 347 i != output_picture_buffers_.end(), | |
| 377 "Can't find a PictureBuffer for " << picture_buffer_id, | 348 "Can't find a PictureBuffer for " << picture_buffer_id, |
| 378 PLATFORM_FAILURE); | 349 PLATFORM_FAILURE); |
| 379 uint32 picture_buffer_texture_id = i->second.texture_id(); | |
| 380 | 350 |
| 381 RETURN_ON_FAILURE(gl_decoder_.get(), | 351 // Connect the PictureBuffer to the decoded frame, via whatever |
| 382 "Failed to get gles2 decoder instance.", | 352 // mechanism the strategy likes. |
| 383 ILLEGAL_STATE); | 353 strategy_->AssignCurrentSurfaceToPictureBuffer(codec_buffer_index, |
| 384 // Defer initializing the CopyTextureCHROMIUMResourceManager until it is | 354 i->second); |
|
watk
2015/09/09 00:36:00
Formatting seems weird
| |
| 385 // needed because it takes 10s of milliseconds to initialize. | |
| 386 if (!copier_) { | |
| 387 copier_.reset(new gpu::CopyTextureCHROMIUMResourceManager()); | |
| 388 copier_->Initialize(gl_decoder_.get()); | |
| 389 } | |
| 390 | |
| 391 // Here, we copy |surface_texture_id_| to the picture buffer instead of | |
| 392 // setting new texture to |surface_texture_| by calling attachToGLContext() | |
| 393 // because: | |
| 394 // 1. Once we call detachFrameGLContext(), it deletes the texture previous | |
| 395 // attached. | |
| 396 // 2. SurfaceTexture requires us to apply a transform matrix when we show | |
| 397 // the texture. | |
| 398 // TODO(hkuang): get the StreamTexture transform matrix in GPU process | |
| 399 // instead of using default matrix crbug.com/226218. | |
| 400 const static GLfloat default_matrix[16] = {1.0f, 0.0f, 0.0f, 0.0f, | |
| 401 0.0f, 1.0f, 0.0f, 0.0f, | |
| 402 0.0f, 0.0f, 1.0f, 0.0f, | |
| 403 0.0f, 0.0f, 0.0f, 1.0f}; | |
| 404 copier_->DoCopyTextureWithTransform(gl_decoder_.get(), | |
| 405 GL_TEXTURE_EXTERNAL_OES, | |
| 406 surface_texture_id_, | |
| 407 picture_buffer_texture_id, | |
| 408 size_.width(), | |
| 409 size_.height(), | |
| 410 false, | |
| 411 false, | |
| 412 false, | |
| 413 default_matrix); | |
| 414 | 355 |
| 415 // TODO(henryhsu): Pass (0, 0) as visible size will cause several test | 356 // TODO(henryhsu): Pass (0, 0) as visible size will cause several test |
| 416 // cases failed. We should make sure |size_| is coded size or visible size. | 357 // cases failed. We should make sure |size_| is coded size or visible size. |
| 417 base::MessageLoop::current()->PostTask( | 358 base::MessageLoop::current()->PostTask( |
| 418 FROM_HERE, base::Bind(&AndroidVideoDecodeAccelerator::NotifyPictureReady, | 359 FROM_HERE, |
| 419 weak_this_factory_.GetWeakPtr(), | 360 base::Bind(&AndroidVideoDecodeAccelerator::NotifyPictureReady, |
| 420 media::Picture(picture_buffer_id, bitstream_id, | 361 weak_this_factory_.GetWeakPtr(), |
| 421 gfx::Rect(size_), false))); | 362 media::Picture(picture_buffer_id, bitstream_id, |
| 363 gfx::Rect(size_), false))); | |
|
watk
2015/09/09 00:36:00
Unnecessary formatting change?
| |
| 422 } | 364 } |
| 423 | 365 |
| 424 void AndroidVideoDecodeAccelerator::Decode( | 366 void AndroidVideoDecodeAccelerator::Decode( |
| 425 const media::BitstreamBuffer& bitstream_buffer) { | 367 const media::BitstreamBuffer& bitstream_buffer) { |
| 426 DCHECK(thread_checker_.CalledOnValidThread()); | 368 DCHECK(thread_checker_.CalledOnValidThread()); |
| 427 if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) { | 369 if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) { |
| 428 base::MessageLoop::current()->PostTask( | 370 base::MessageLoop::current()->PostTask( |
| 429 FROM_HERE, | 371 FROM_HERE, |
| 430 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | 372 base::Bind(&AndroidVideoDecodeAccelerator:: |
| 373 NotifyEndOfBitstreamBuffer, | |
| 431 weak_this_factory_.GetWeakPtr(), | 374 weak_this_factory_.GetWeakPtr(), |
| 432 bitstream_buffer.id())); | 375 bitstream_buffer.id())); |
| 433 return; | 376 return; |
| 434 } | 377 } |
| 435 | 378 |
| 436 pending_bitstream_buffers_.push( | 379 pending_bitstream_buffers_.push( |
| 437 std::make_pair(bitstream_buffer, base::Time::Now())); | 380 std::make_pair(bitstream_buffer, base::Time::Now())); |
| 438 | 381 |
| 439 DoIOTask(); | 382 DoIOTask(); |
| 440 } | 383 } |
| 441 | 384 |
| 385 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() { | |
| 386 client_->ProvidePictureBuffers(strategy_->GetNumPictureBuffers(), | |
| 387 size_, strategy_->GetTextureTarget()); | |
|
watk
2015/09/09 00:36:00
Formatting seems weird, indented too far
| |
| 388 } | |
| 389 | |
| 442 void AndroidVideoDecodeAccelerator::AssignPictureBuffers( | 390 void AndroidVideoDecodeAccelerator::AssignPictureBuffers( |
| 443 const std::vector<media::PictureBuffer>& buffers) { | 391 const std::vector<media::PictureBuffer>& buffers) { |
| 444 DCHECK(thread_checker_.CalledOnValidThread()); | 392 DCHECK(thread_checker_.CalledOnValidThread()); |
| 445 DCHECK(output_picture_buffers_.empty()); | 393 DCHECK(output_picture_buffers_.empty()); |
| 446 DCHECK(free_picture_ids_.empty()); | 394 DCHECK(free_picture_ids_.empty()); |
| 447 | 395 |
| 448 for (size_t i = 0; i < buffers.size(); ++i) { | 396 for (size_t i = 0; i < buffers.size(); ++i) { |
| 449 RETURN_ON_FAILURE(buffers[i].size() == size_, | 397 RETURN_ON_FAILURE(this, |
| 398 buffers[i].size() == size_, | |
| 450 "Invalid picture buffer size was passed.", | 399 "Invalid picture buffer size was passed.", |
| 451 INVALID_ARGUMENT); | 400 INVALID_ARGUMENT); |
| 452 int32 id = buffers[i].id(); | 401 int32 id = buffers[i].id(); |
| 453 output_picture_buffers_.insert(std::make_pair(id, buffers[i])); | 402 output_picture_buffers_.insert(std::make_pair(id, buffers[i])); |
| 454 free_picture_ids_.push(id); | 403 free_picture_ids_.push(id); |
| 455 // Since the client might be re-using |picture_buffer_id| values, forget | 404 // Since the client might be re-using |picture_buffer_id| values, forget |
| 456 // about previously-dismissed IDs now. See ReusePictureBuffer() comment | 405 // about previously-dismissed IDs now. See ReusePictureBuffer() comment |
| 457 // about "zombies" for why we maintain this set in the first place. | 406 // about "zombies" for why we maintain this set in the first place. |
| 458 dismissed_picture_ids_.erase(id); | 407 dismissed_picture_ids_.erase(id); |
| 459 } | 408 } |
| 460 | 409 |
| 461 RETURN_ON_FAILURE(output_picture_buffers_.size() >= kNumPictureBuffers, | 410 RETURN_ON_FAILURE(this, |
| 411 output_picture_buffers_.size() >= | |
| 412 strategy_->GetNumPictureBuffers(), | |
| 462 "Invalid picture buffers were passed.", | 413 "Invalid picture buffers were passed.", |
| 463 INVALID_ARGUMENT); | 414 INVALID_ARGUMENT); |
| 464 | 415 |
| 465 DoIOTask(); | 416 DoIOTask(); |
| 466 } | 417 } |
| 467 | 418 |
| 468 void AndroidVideoDecodeAccelerator::ReusePictureBuffer( | 419 void AndroidVideoDecodeAccelerator::ReusePictureBuffer( |
| 469 int32 picture_buffer_id) { | 420 int32 picture_buffer_id) { |
| 470 DCHECK(thread_checker_.CalledOnValidThread()); | 421 DCHECK(thread_checker_.CalledOnValidThread()); |
| 471 | 422 |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 510 void AndroidVideoDecodeAccelerator::Reset() { | 461 void AndroidVideoDecodeAccelerator::Reset() { |
| 511 DCHECK(thread_checker_.CalledOnValidThread()); | 462 DCHECK(thread_checker_.CalledOnValidThread()); |
| 512 | 463 |
| 513 while (!pending_bitstream_buffers_.empty()) { | 464 while (!pending_bitstream_buffers_.empty()) { |
| 514 int32 bitstream_buffer_id = pending_bitstream_buffers_.front().first.id(); | 465 int32 bitstream_buffer_id = pending_bitstream_buffers_.front().first.id(); |
| 515 pending_bitstream_buffers_.pop(); | 466 pending_bitstream_buffers_.pop(); |
| 516 | 467 |
| 517 if (bitstream_buffer_id != -1) { | 468 if (bitstream_buffer_id != -1) { |
| 518 base::MessageLoop::current()->PostTask( | 469 base::MessageLoop::current()->PostTask( |
| 519 FROM_HERE, | 470 FROM_HERE, |
| 520 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | 471 base::Bind(&AndroidVideoDecodeAccelerator:: |
| 472 NotifyEndOfBitstreamBuffer, | |
| 521 weak_this_factory_.GetWeakPtr(), | 473 weak_this_factory_.GetWeakPtr(), |
| 522 bitstream_buffer_id)); | 474 bitstream_buffer_id)); |
| 523 } | 475 } |
| 524 } | 476 } |
| 525 bitstreams_notified_in_advance_.clear(); | 477 bitstreams_notified_in_advance_.clear(); |
| 526 | 478 |
| 527 for (OutputBufferMap::iterator it = output_picture_buffers_.begin(); | 479 for (OutputBufferMap::iterator it = output_picture_buffers_.begin(); |
| 528 it != output_picture_buffers_.end(); | 480 it != output_picture_buffers_.end(); |
| 529 ++it) { | 481 ++it) { |
| 530 client_->DismissPictureBuffer(it->first); | 482 client_->DismissPictureBuffer(it->first); |
| (...skipping 17 matching lines...) Expand all Loading... | |
| 548 | 500 |
| 549 base::MessageLoop::current()->PostTask( | 501 base::MessageLoop::current()->PostTask( |
| 550 FROM_HERE, | 502 FROM_HERE, |
| 551 base::Bind(&AndroidVideoDecodeAccelerator::NotifyResetDone, | 503 base::Bind(&AndroidVideoDecodeAccelerator::NotifyResetDone, |
| 552 weak_this_factory_.GetWeakPtr())); | 504 weak_this_factory_.GetWeakPtr())); |
| 553 } | 505 } |
| 554 | 506 |
| 555 void AndroidVideoDecodeAccelerator::Destroy() { | 507 void AndroidVideoDecodeAccelerator::Destroy() { |
| 556 DCHECK(thread_checker_.CalledOnValidThread()); | 508 DCHECK(thread_checker_.CalledOnValidThread()); |
| 557 | 509 |
| 510 strategy_->Cleanup(); | |
| 511 | |
| 558 weak_this_factory_.InvalidateWeakPtrs(); | 512 weak_this_factory_.InvalidateWeakPtrs(); |
| 559 if (media_codec_) { | 513 if (media_codec_) { |
| 560 io_timer_.Stop(); | 514 io_timer_.Stop(); |
| 561 media_codec_->Stop(); | 515 media_codec_->Stop(); |
| 562 } | 516 } |
| 563 if (surface_texture_id_) | 517 if (surface_texture_id_) |
| 564 glDeleteTextures(1, &surface_texture_id_); | 518 glDeleteTextures(1, &surface_texture_id_); |
| 565 if (copier_) | |
| 566 copier_->Destroy(); | |
| 567 delete this; | 519 delete this; |
| 568 } | 520 } |
| 569 | 521 |
| 570 bool AndroidVideoDecodeAccelerator::CanDecodeOnIOThread() { | 522 bool AndroidVideoDecodeAccelerator::CanDecodeOnIOThread() { |
| 571 return false; | 523 return false; |
| 572 } | 524 } |
| 573 | 525 |
| 574 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() { | 526 const gfx::Size& AndroidVideoDecodeAccelerator::GetSize() const { |
| 575 client_->ProvidePictureBuffers(kNumPictureBuffers, size_, GL_TEXTURE_2D); | 527 return size_; |
| 528 } | |
| 529 | |
| 530 const base::ThreadChecker& | |
| 531 AndroidVideoDecodeAccelerator::ThreadChecker() const { | |
| 532 return thread_checker_; | |
| 533 } | |
| 534 | |
| 535 gfx::SurfaceTexture* | |
| 536 AndroidVideoDecodeAccelerator::GetSurfaceTexture() const { | |
| 537 return surface_texture_.get(); | |
| 538 } | |
| 539 | |
| 540 uint32 AndroidVideoDecodeAccelerator::GetSurfaceTextureId() const { | |
| 541 return surface_texture_id_; | |
| 542 } | |
| 543 | |
| 544 gpu::gles2::GLES2Decoder* | |
| 545 AndroidVideoDecodeAccelerator::GetGlDecoder() const { | |
| 546 return gl_decoder_.get(); | |
| 547 } | |
| 548 | |
| 549 media::VideoCodecBridge* AndroidVideoDecodeAccelerator::GetMediaCodec() { | |
| 550 return media_codec_.get(); | |
| 551 } | |
| 552 | |
| 553 void AndroidVideoDecodeAccelerator::PostError( | |
| 554 const ::tracked_objects::Location& from_here, | |
| 555 media::VideoDecodeAccelerator::Error error) { | |
| 556 base::MessageLoop::current()->PostTask(from_here, | |
| 557 base::Bind(&AndroidVideoDecodeAccelerator::NotifyError, | |
| 558 weak_this_factory_.GetWeakPtr(), | |
| 559 error)); | |
| 560 state_ = ERROR; | |
| 576 } | 561 } |
| 577 | 562 |
| 578 void AndroidVideoDecodeAccelerator::NotifyPictureReady( | 563 void AndroidVideoDecodeAccelerator::NotifyPictureReady( |
| 579 const media::Picture& picture) { | 564 const media::Picture& picture) { |
| 580 client_->PictureReady(picture); | 565 client_->PictureReady(picture); |
| 581 } | 566 } |
| 582 | 567 |
| 583 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer( | 568 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer( |
| 584 int input_buffer_id) { | 569 int input_buffer_id) { |
| 585 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); | 570 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 631 // software fallback for H264 on Android anyway. | 616 // software fallback for H264 on Android anyway. |
| 632 profile.max_resolution.SetSize(3840, 2160); | 617 profile.max_resolution.SetSize(3840, 2160); |
| 633 profiles.push_back(profile); | 618 profiles.push_back(profile); |
| 634 } | 619 } |
| 635 #endif | 620 #endif |
| 636 | 621 |
| 637 return profiles; | 622 return profiles; |
| 638 } | 623 } |
| 639 | 624 |
| 640 } // namespace content | 625 } // namespace content |
| OLD | NEW |