OLD | NEW |
(Empty) | |
| 1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include "content/renderer/media/rtc_video_decoder.h" |
| 6 |
| 7 #include "base/bind.h" |
| 8 #include "base/logging.h" |
| 9 #include "base/memory/ref_counted.h" |
| 10 #include "base/message_loop/message_loop_proxy.h" |
| 11 #include "base/safe_numerics.h" |
| 12 #include "base/stl_util.h" |
| 13 #include "base/task_runner_util.h" |
| 14 #include "content/child/child_thread.h" |
| 15 #include "media/base/bind_to_loop.h" |
| 16 #include "third_party/webrtc/system_wrappers/interface/ref_count.h" |
| 17 |
| 18 namespace content { |
| 19 |
| 20 const int32 RTCVideoDecoder::ID_LAST = 0x3FFFFFFF; |
| 21 const int32 RTCVideoDecoder::ID_HALF = 0x20000000; |
| 22 const int32 RTCVideoDecoder::ID_INVALID = -1; |
| 23 |
| 24 // Maximum number of concurrent VDA::Decode() operations RVD will maintain. |
| 25 // Higher values allow better pipelining in the GPU, but also require more |
| 26 // resources. |
| 27 static const size_t kMaxInFlightDecodes = 8; |
| 28 |
| 29 // Size of shared-memory segments we allocate. Since we reuse them we let them |
| 30 // be on the beefy side. |
| 31 static const size_t kSharedMemorySegmentBytes = 100 << 10; |
| 32 |
| 33 // Maximum number of allocated shared-memory segments. |
| 34 static const int kMaxNumSharedMemorySegments = 16; |
| 35 |
| 36 // Maximum number of pending WebRTC buffers that are waiting for the shared |
| 37 // memory. 10 seconds for 30 fps. |
| 38 static const size_t kMaxNumOfPendingBuffers = 300; |
| 39 |
| 40 // A shared memory segment and its allocated size. This class has the ownership |
| 41 // of |shm|. |
| 42 class RTCVideoDecoder::SHMBuffer { |
| 43 public: |
| 44 SHMBuffer(base::SharedMemory* shm, size_t size); |
| 45 ~SHMBuffer(); |
| 46 base::SharedMemory* const shm; |
| 47 const size_t size; |
| 48 }; |
| 49 |
| 50 RTCVideoDecoder::SHMBuffer::SHMBuffer(base::SharedMemory* shm, size_t size) |
| 51 : shm(shm), size(size) {} |
| 52 |
| 53 RTCVideoDecoder::SHMBuffer::~SHMBuffer() { shm->Close(); } |
| 54 |
| 55 RTCVideoDecoder::BufferData::BufferData(int32 bitstream_buffer_id, |
| 56 uint32_t timestamp, |
| 57 int width, |
| 58 int height, |
| 59 size_t size) |
| 60 : bitstream_buffer_id(bitstream_buffer_id), |
| 61 timestamp(timestamp), |
| 62 width(width), |
| 63 height(height), |
| 64 size(size) {} |
| 65 |
| 66 RTCVideoDecoder::BufferData::BufferData() {} |
| 67 |
| 68 RTCVideoDecoder::BufferData::~BufferData() {} |
| 69 |
| 70 RTCVideoDecoder::RTCVideoDecoder( |
| 71 const scoped_refptr<media::GpuVideoDecoder::Factories>& factories) |
| 72 : weak_factory_(this), |
| 73 weak_this_(weak_factory_.GetWeakPtr()), |
| 74 factories_(factories), |
| 75 vda_loop_proxy_(factories_->GetMessageLoop()), |
| 76 create_shm_thread_("CreateSHMThread"), |
| 77 decoder_texture_target_(0), |
| 78 next_picture_buffer_id_(0), |
| 79 state_(UNINITIALIZED), |
| 80 decode_complete_callback_(NULL), |
| 81 num_shm_buffers_(0), |
| 82 next_bitstream_buffer_id_(0), |
| 83 reset_bitstream_buffer_id_(ID_INVALID) { |
| 84 create_shm_thread_.Start(); |
| 85 // Initialize directly if |vda_loop_proxy_| is the renderer thread. |
| 86 base::WaitableEvent compositor_loop_async_waiter(false, false); |
| 87 if (vda_loop_proxy_->BelongsToCurrentThread()) { |
| 88 Initialize(&compositor_loop_async_waiter); |
| 89 return; |
| 90 } |
| 91 // Post the task if |vda_loop_proxy_| is the compositor thread. Waiting here |
| 92 // is safe because the compositor thread will not be stopped until the |
| 93 // renderer thread shuts down. |
| 94 vda_loop_proxy_->PostTask(FROM_HERE, |
| 95 base::Bind(&RTCVideoDecoder::Initialize, |
| 96 base::Unretained(this), |
| 97 &compositor_loop_async_waiter)); |
| 98 compositor_loop_async_waiter.Wait(); |
| 99 } |
| 100 |
| 101 RTCVideoDecoder::~RTCVideoDecoder() { |
| 102 DVLOG(2) << "~RTCVideoDecoder"; |
| 103 factories_->Abort(); |
| 104 create_shm_thread_.Stop(); |
| 105 // Delete vda and remove |this| from the observer if vda thread is alive. |
| 106 if (vda_loop_proxy_->BelongsToCurrentThread()) { |
| 107 base::MessageLoop::current()->RemoveDestructionObserver(this); |
| 108 DestroyVDA(); |
| 109 } else { |
| 110 // VDA should have been destroyed in WillDestroyCurrentMessageLoop. |
| 111 DCHECK(!vda_); |
| 112 } |
| 113 |
| 114 // Delete all shared memories. |
| 115 STLDeleteElements(&available_shm_segments_); |
| 116 STLDeleteValues(&bitstream_buffers_in_decoder_); |
| 117 STLDeleteContainerPairFirstPointers(decode_buffers_.begin(), |
| 118 decode_buffers_.end()); |
| 119 decode_buffers_.clear(); |
| 120 |
| 121 // Delete WebRTC input buffers. |
| 122 for (std::deque<std::pair<webrtc::EncodedImage, BufferData> >::iterator it = |
| 123 pending_buffers_.begin(); |
| 124 it != pending_buffers_.end(); |
| 125 ++it) { |
| 126 delete[] it->first._buffer; |
| 127 } |
| 128 } |
| 129 |
| 130 scoped_ptr<RTCVideoDecoder> RTCVideoDecoder::Create( |
| 131 const scoped_refptr<media::GpuVideoDecoder::Factories>& factories) { |
| 132 scoped_ptr<RTCVideoDecoder> decoder(new RTCVideoDecoder(factories)); |
| 133 decoder->vda_.reset(factories->CreateVideoDecodeAccelerator( |
| 134 media::VP8PROFILE_MAIN, decoder.get())); |
| 135 // vda can be NULL if VP8 is not supported. |
| 136 if (decoder->vda_ != NULL) { |
| 137 decoder->state_ = INITIALIZED; |
| 138 } else { |
| 139 factories->GetMessageLoop()->DeleteSoon(FROM_HERE, decoder.release()); |
| 140 } |
| 141 return decoder.Pass(); |
| 142 } |
| 143 |
| 144 int32_t RTCVideoDecoder::InitDecode(const webrtc::VideoCodec* codecSettings, |
| 145 int32_t /*numberOfCores*/) { |
| 146 DVLOG(2) << "InitDecode"; |
| 147 DCHECK_EQ(codecSettings->codecType, webrtc::kVideoCodecVP8); |
| 148 if (codecSettings->codecSpecific.VP8.feedbackModeOn) { |
| 149 LOG(ERROR) << "Feedback mode not supported"; |
| 150 return WEBRTC_VIDEO_CODEC_ERROR; |
| 151 } |
| 152 |
| 153 base::AutoLock auto_lock(lock_); |
| 154 if (state_ == UNINITIALIZED || state_ == DECODE_ERROR) { |
| 155 LOG(ERROR) << "VDA is not initialized. state=" << state_; |
| 156 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| 157 } |
| 158 // Create some shared memory if the queue is empty. |
| 159 if (available_shm_segments_.size() == 0) { |
| 160 // Unretained is safe because the destructor will wait until |
| 161 // |create_shm_thread_| stops. |
| 162 create_shm_thread_.message_loop_proxy() |
| 163 ->PostTask(FROM_HERE, |
| 164 base::Bind(&RTCVideoDecoder::CreateSHM, |
| 165 base::Unretained(this), |
| 166 kMaxInFlightDecodes, |
| 167 kSharedMemorySegmentBytes)); |
| 168 } |
| 169 return WEBRTC_VIDEO_CODEC_OK; |
| 170 } |
| 171 |
| 172 int32_t RTCVideoDecoder::Decode( |
| 173 const webrtc::EncodedImage& inputImage, |
| 174 bool missingFrames, |
| 175 const webrtc::RTPFragmentationHeader* /*fragmentation*/, |
| 176 const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/, |
| 177 int64_t /*renderTimeMs*/) { |
| 178 DVLOG(3) << "Decode"; |
| 179 |
| 180 base::AutoLock auto_lock(lock_); |
| 181 if (state_ == UNINITIALIZED || decode_complete_callback_ == NULL) { |
| 182 LOG(ERROR) << "The decoder has not initialized."; |
| 183 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| 184 } |
| 185 if (state_ == DECODE_ERROR) { |
| 186 LOG(ERROR) << "Decoding error occurred."; |
| 187 return WEBRTC_VIDEO_CODEC_ERROR; |
| 188 } |
| 189 if (missingFrames || !inputImage._completeFrame) { |
| 190 DLOG(ERROR) << "Missing or incomplete frames."; |
| 191 // Unlike the SW decoder in libvpx, hw decoder cannot handle broken frames. |
| 192 // Return an error to request a key frame. |
| 193 return WEBRTC_VIDEO_CODEC_ERROR; |
| 194 } |
| 195 if (inputImage._frameType == webrtc::kKeyFrame) |
| 196 frame_size_.SetSize(inputImage._encodedWidth, inputImage._encodedHeight); |
| 197 |
| 198 // Create buffer metadata. |
| 199 BufferData buffer_data(next_bitstream_buffer_id_, |
| 200 inputImage._timeStamp, |
| 201 frame_size_.width(), |
| 202 frame_size_.height(), |
| 203 inputImage._length); |
| 204 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer. |
| 205 next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & ID_LAST; |
| 206 |
| 207 // If the shared memory is available and there are no pending buffers, send |
| 208 // the buffer for decode. If not, save the buffer in the queue for decode |
| 209 // later. |
| 210 scoped_ptr<SHMBuffer> shm_buffer; |
| 211 if (pending_buffers_.size() == 0) |
| 212 shm_buffer = GetSHM_Locked(inputImage._length); |
| 213 if (!shm_buffer) { |
| 214 int32_t result = SaveToPendingBuffers_Locked(inputImage, buffer_data); |
| 215 return result ? WEBRTC_VIDEO_CODEC_OK : WEBRTC_VIDEO_CODEC_ERROR; |
| 216 } |
| 217 |
| 218 SaveToDecodeBuffers_Locked(inputImage, shm_buffer.Pass(), buffer_data); |
| 219 vda_loop_proxy_->PostTask( |
| 220 FROM_HERE, base::Bind(&RTCVideoDecoder::RequestBufferDecode, weak_this_)); |
| 221 return WEBRTC_VIDEO_CODEC_OK; |
| 222 } |
| 223 |
| 224 int32_t RTCVideoDecoder::RegisterDecodeCompleteCallback( |
| 225 webrtc::DecodedImageCallback* callback) { |
| 226 DVLOG(2) << "RegisterDecodeCompleteCallback"; |
| 227 base::AutoLock auto_lock(lock_); |
| 228 decode_complete_callback_ = callback; |
| 229 return WEBRTC_VIDEO_CODEC_OK; |
| 230 } |
| 231 |
| 232 int32_t RTCVideoDecoder::Release() { |
| 233 DVLOG(2) << "Release"; |
| 234 // Do not destroy VDA because the decoder will be recycled by |
| 235 // RTCVideoDecoderFactory. Just reset VDA. |
| 236 return Reset(); |
| 237 } |
| 238 |
| 239 int32_t RTCVideoDecoder::Reset() { |
| 240 DVLOG(2) << "Reset"; |
| 241 base::AutoLock auto_lock(lock_); |
| 242 if (state_ == UNINITIALIZED) { |
| 243 LOG(ERROR) << "Decoder not initialized."; |
| 244 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| 245 } |
| 246 if (next_bitstream_buffer_id_ != 0) |
| 247 reset_bitstream_buffer_id_ = next_bitstream_buffer_id_ - 1; |
| 248 else |
| 249 reset_bitstream_buffer_id_ = ID_LAST; |
| 250 // If VDA is already resetting, no need to request the reset again. |
| 251 if (state_ != RESETTING) { |
| 252 state_ = RESETTING; |
| 253 vda_loop_proxy_->PostTask( |
| 254 FROM_HERE, base::Bind(&RTCVideoDecoder::ResetInternal, weak_this_)); |
| 255 } |
| 256 return WEBRTC_VIDEO_CODEC_OK; |
| 257 } |
| 258 |
| 259 void RTCVideoDecoder::NotifyInitializeDone() { |
| 260 DVLOG(2) << "NotifyInitializeDone"; |
| 261 NOTREACHED(); |
| 262 } |
| 263 |
| 264 void RTCVideoDecoder::ProvidePictureBuffers(uint32 count, |
| 265 const gfx::Size& size, |
| 266 uint32 texture_target) { |
| 267 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); |
| 268 DVLOG(3) << "ProvidePictureBuffers. texture_target=" << texture_target; |
| 269 |
| 270 if (!vda_) |
| 271 return; |
| 272 |
| 273 std::vector<uint32> texture_ids; |
| 274 std::vector<gpu::Mailbox> texture_mailboxes; |
| 275 decoder_texture_target_ = texture_target; |
| 276 // Discards the sync point returned here since PictureReady will imply that |
| 277 // the produce has already happened, and the texture is ready for use. |
| 278 if (!factories_->CreateTextures(count, |
| 279 size, |
| 280 &texture_ids, |
| 281 &texture_mailboxes, |
| 282 decoder_texture_target_)) { |
| 283 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); |
| 284 return; |
| 285 } |
| 286 DCHECK_EQ(count, texture_ids.size()); |
| 287 DCHECK_EQ(count, texture_mailboxes.size()); |
| 288 |
| 289 std::vector<media::PictureBuffer> picture_buffers; |
| 290 for (size_t i = 0; i < texture_ids.size(); ++i) { |
| 291 picture_buffers.push_back(media::PictureBuffer( |
| 292 next_picture_buffer_id_++, size, texture_ids[i], texture_mailboxes[i])); |
| 293 bool inserted = assigned_picture_buffers_.insert(std::make_pair( |
| 294 picture_buffers.back().id(), picture_buffers.back())).second; |
| 295 DCHECK(inserted); |
| 296 } |
| 297 vda_->AssignPictureBuffers(picture_buffers); |
| 298 } |
| 299 |
| 300 void RTCVideoDecoder::DismissPictureBuffer(int32 id) { |
| 301 DVLOG(3) << "DismissPictureBuffer. id=" << id; |
| 302 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); |
| 303 |
| 304 std::map<int32, media::PictureBuffer>::iterator it = |
| 305 assigned_picture_buffers_.find(id); |
| 306 if (it == assigned_picture_buffers_.end()) { |
| 307 NOTREACHED() << "Missing picture buffer: " << id; |
| 308 return; |
| 309 } |
| 310 |
| 311 media::PictureBuffer buffer_to_dismiss = it->second; |
| 312 assigned_picture_buffers_.erase(it); |
| 313 |
| 314 std::set<int32>::iterator at_display_it = |
| 315 picture_buffers_at_display_.find(id); |
| 316 |
| 317 if (at_display_it == picture_buffers_at_display_.end()) { |
| 318 // We can delete the texture immediately as it's not being displayed. |
| 319 factories_->DeleteTexture(buffer_to_dismiss.texture_id()); |
| 320 } else { |
| 321 // Texture in display. Postpone deletion until after it's returned to us. |
| 322 bool inserted = dismissed_picture_buffers_ |
| 323 .insert(std::make_pair(id, buffer_to_dismiss)).second; |
| 324 DCHECK(inserted); |
| 325 } |
| 326 } |
| 327 |
| 328 void RTCVideoDecoder::PictureReady(const media::Picture& picture) { |
| 329 DVLOG(3) << "PictureReady"; |
| 330 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); |
| 331 |
| 332 std::map<int32, media::PictureBuffer>::iterator it = |
| 333 assigned_picture_buffers_.find(picture.picture_buffer_id()); |
| 334 if (it == assigned_picture_buffers_.end()) { |
| 335 NOTREACHED() << "Missing picture buffer: " << picture.picture_buffer_id(); |
| 336 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); |
| 337 return; |
| 338 } |
| 339 const media::PictureBuffer& pb = it->second; |
| 340 |
| 341 // Create a media::VideoFrame. |
| 342 uint32_t timestamp = 0, width = 0, height = 0; |
| 343 size_t size = 0; |
| 344 GetBufferData( |
| 345 picture.bitstream_buffer_id(), ×tamp, &width, &height, &size); |
| 346 scoped_refptr<media::VideoFrame> frame = |
| 347 CreateVideoFrame(picture, pb, timestamp, width, height, size); |
| 348 bool inserted = |
| 349 picture_buffers_at_display_.insert(picture.picture_buffer_id()).second; |
| 350 DCHECK(inserted); |
| 351 |
| 352 // Create a WebRTC video frame. |
| 353 // TODO(wuchengli): make media::VideoFrame an opaque native handle and put it |
| 354 // into WebRTC frame. |
| 355 webrtc::I420VideoFrame decoded_image; |
| 356 decoded_image.CreateEmptyFrame( |
| 357 width, height, width, (width + 1) / 2, (width + 1) / 2); |
| 358 decoded_image.set_timestamp(timestamp); |
| 359 |
| 360 // Invoke decode callback. WebRTC expects no frame callback after Release. |
| 361 { |
| 362 base::AutoLock auto_lock(lock_); |
| 363 DCHECK(decode_complete_callback_ != NULL); |
| 364 if (IsBufferAfterReset(picture.bitstream_buffer_id(), |
| 365 reset_bitstream_buffer_id_)) { |
| 366 decode_complete_callback_->Decoded(decoded_image); |
| 367 } |
| 368 } |
| 369 } |
| 370 |
| 371 scoped_refptr<media::VideoFrame> RTCVideoDecoder::CreateVideoFrame( |
| 372 const media::Picture& picture, |
| 373 const media::PictureBuffer& pb, |
| 374 uint32_t timestamp, |
| 375 uint32_t width, |
| 376 uint32_t height, |
| 377 size_t size) { |
| 378 gfx::Rect visible_rect(width, height); |
| 379 gfx::Size natural_size(width, height); |
| 380 DCHECK(decoder_texture_target_); |
| 381 // Convert timestamp from 90KHz to ms. |
| 382 base::TimeDelta timestamp_ms = base::TimeDelta::FromInternalValue( |
| 383 base::checked_numeric_cast<uint64_t>(timestamp) * 1000 / 90); |
| 384 return media::VideoFrame::WrapNativeTexture( |
| 385 new media::VideoFrame::MailboxHolder( |
| 386 pb.texture_mailbox(), |
| 387 0, // sync_point |
| 388 media::BindToCurrentLoop( |
| 389 base::Bind(&RTCVideoDecoder::ReusePictureBuffer, |
| 390 weak_this_, |
| 391 picture.picture_buffer_id()))), |
| 392 decoder_texture_target_, |
| 393 pb.size(), |
| 394 visible_rect, |
| 395 natural_size, |
| 396 timestamp_ms, |
| 397 base::Bind(&media::GpuVideoDecoder::Factories::ReadPixels, |
| 398 factories_, |
| 399 pb.texture_id(), |
| 400 decoder_texture_target_, |
| 401 natural_size), |
| 402 base::Closure()); |
| 403 } |
| 404 |
| 405 void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32 id) { |
| 406 DVLOG(3) << "NotifyEndOfBitstreamBuffer. id=" << id; |
| 407 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); |
| 408 |
| 409 std::map<int32, SHMBuffer*>::iterator it = |
| 410 bitstream_buffers_in_decoder_.find(id); |
| 411 if (it == bitstream_buffers_in_decoder_.end()) { |
| 412 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); |
| 413 NOTREACHED() << "Missing bitstream buffer: " << id; |
| 414 return; |
| 415 } |
| 416 |
| 417 { |
| 418 base::AutoLock auto_lock(lock_); |
| 419 PutSHM_Locked(scoped_ptr<SHMBuffer>(it->second)); |
| 420 } |
| 421 bitstream_buffers_in_decoder_.erase(it); |
| 422 |
| 423 RequestBufferDecode(); |
| 424 } |
| 425 |
| 426 void RTCVideoDecoder::NotifyFlushDone() { |
| 427 DVLOG(3) << "NotifyFlushDone"; |
| 428 NOTREACHED() << "Unexpected flush done notification."; |
| 429 } |
| 430 |
| 431 void RTCVideoDecoder::NotifyResetDone() { |
| 432 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); |
| 433 DVLOG(3) << "NotifyResetDone"; |
| 434 |
| 435 if (!vda_) |
| 436 return; |
| 437 |
| 438 input_buffer_data_.clear(); |
| 439 { |
| 440 base::AutoLock auto_lock(lock_); |
| 441 state_ = INITIALIZED; |
| 442 } |
| 443 // Send the pending buffers for decoding. |
| 444 RequestBufferDecode(); |
| 445 } |
| 446 |
| 447 void RTCVideoDecoder::NotifyError(media::VideoDecodeAccelerator::Error error) { |
| 448 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); |
| 449 if (!vda_) |
| 450 return; |
| 451 |
| 452 LOG(ERROR) << "VDA Error:" << error; |
| 453 DestroyVDA(); |
| 454 |
| 455 base::AutoLock auto_lock(lock_); |
| 456 state_ = DECODE_ERROR; |
| 457 } |
| 458 |
| 459 void RTCVideoDecoder::WillDestroyCurrentMessageLoop() { |
| 460 DVLOG(2) << "WillDestroyCurrentMessageLoop"; |
| 461 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); |
| 462 factories_->Abort(); |
| 463 weak_factory_.InvalidateWeakPtrs(); |
| 464 DestroyVDA(); |
| 465 } |
| 466 |
| 467 void RTCVideoDecoder::Initialize(base::WaitableEvent* waiter) { |
| 468 DVLOG(2) << "Initialize"; |
| 469 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); |
| 470 base::MessageLoop::current()->AddDestructionObserver(this); |
| 471 waiter->Signal(); |
| 472 } |
| 473 |
| 474 void RTCVideoDecoder::RequestBufferDecode() { |
| 475 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); |
| 476 if (!vda_) |
| 477 return; |
| 478 |
| 479 MovePendingBuffersToDecodeBuffers(); |
| 480 |
| 481 while (CanMoreDecodeWorkBeDone()) { |
| 482 // Get a buffer and data from the queue. |
| 483 SHMBuffer* shm_buffer = NULL; |
| 484 BufferData buffer_data; |
| 485 { |
| 486 base::AutoLock auto_lock(lock_); |
| 487 // Do not request decode if VDA is resetting. |
| 488 if (decode_buffers_.size() == 0 || state_ == RESETTING) |
| 489 return; |
| 490 shm_buffer = decode_buffers_.front().first; |
| 491 buffer_data = decode_buffers_.front().second; |
| 492 decode_buffers_.pop_front(); |
| 493 // Drop the buffers before Reset or Release is called. |
| 494 if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id, |
| 495 reset_bitstream_buffer_id_)) { |
| 496 PutSHM_Locked(scoped_ptr<SHMBuffer>(shm_buffer)); |
| 497 continue; |
| 498 } |
| 499 } |
| 500 |
| 501 // Create a BitstreamBuffer and send to VDA to decode. |
| 502 media::BitstreamBuffer bitstream_buffer(buffer_data.bitstream_buffer_id, |
| 503 shm_buffer->shm->handle(), |
| 504 buffer_data.size); |
| 505 bool inserted = bitstream_buffers_in_decoder_ |
| 506 .insert(std::make_pair(bitstream_buffer.id(), shm_buffer)).second; |
| 507 DCHECK(inserted); |
| 508 RecordBufferData(buffer_data); |
| 509 vda_->Decode(bitstream_buffer); |
| 510 } |
| 511 } |
| 512 |
| 513 bool RTCVideoDecoder::CanMoreDecodeWorkBeDone() { |
| 514 return bitstream_buffers_in_decoder_.size() < kMaxInFlightDecodes; |
| 515 } |
| 516 |
| 517 bool RTCVideoDecoder::IsBufferAfterReset(int32 id_buffer, int32 id_reset) { |
| 518 if (id_reset == ID_INVALID) |
| 519 return true; |
| 520 int32 diff = id_buffer - id_reset; |
| 521 if (diff <= 0) |
| 522 diff += ID_LAST + 1; |
| 523 return diff < ID_HALF; |
| 524 } |
| 525 |
| 526 void RTCVideoDecoder::SaveToDecodeBuffers_Locked( |
| 527 const webrtc::EncodedImage& input_image, |
| 528 scoped_ptr<SHMBuffer> shm_buffer, |
| 529 const BufferData& buffer_data) { |
| 530 memcpy(shm_buffer->shm->memory(), input_image._buffer, input_image._length); |
| 531 std::pair<SHMBuffer*, BufferData> buffer_pair = |
| 532 std::make_pair(shm_buffer.release(), buffer_data); |
| 533 |
| 534 // Store the buffer and the metadata to the queue. |
| 535 decode_buffers_.push_back(buffer_pair); |
| 536 } |
| 537 |
| 538 bool RTCVideoDecoder::SaveToPendingBuffers_Locked( |
| 539 const webrtc::EncodedImage& input_image, |
| 540 const BufferData& buffer_data) { |
| 541 DVLOG(2) << "SaveToPendingBuffers_Locked" |
| 542 << ". pending_buffers size=" << pending_buffers_.size() |
| 543 << ". decode_buffers_ size=" << decode_buffers_.size() |
| 544 << ". available_shm size=" << available_shm_segments_.size(); |
| 545 // Queued too many buffers. Something goes wrong. |
| 546 if (pending_buffers_.size() >= kMaxNumOfPendingBuffers) { |
| 547 LOG(WARNING) << "Too many pending buffers!"; |
| 548 return false; |
| 549 } |
| 550 |
| 551 // Clone the input image and save it to the queue. |
| 552 uint8_t* buffer = new uint8_t[input_image._length]; |
| 553 // TODO(wuchengli): avoid memcpy. Extend webrtc::VideoDecoder::Decode() |
| 554 // interface to take a non-const ptr to the frame and add a method to the |
| 555 // frame that will swap buffers with another. |
| 556 memcpy(buffer, input_image._buffer, input_image._length); |
| 557 webrtc::EncodedImage encoded_image( |
| 558 buffer, input_image._length, input_image._length); |
| 559 std::pair<webrtc::EncodedImage, BufferData> buffer_pair = |
| 560 std::make_pair(encoded_image, buffer_data); |
| 561 |
| 562 pending_buffers_.push_back(buffer_pair); |
| 563 return true; |
| 564 } |
| 565 |
| 566 void RTCVideoDecoder::MovePendingBuffersToDecodeBuffers() { |
| 567 base::AutoLock auto_lock(lock_); |
| 568 while (pending_buffers_.size() > 0) { |
| 569 // Get a pending buffer from the queue. |
| 570 const webrtc::EncodedImage& input_image = pending_buffers_.front().first; |
| 571 const BufferData& buffer_data = pending_buffers_.front().second; |
| 572 |
| 573 // Drop the frame if it comes before Reset or Release. |
| 574 if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id, |
| 575 reset_bitstream_buffer_id_)) { |
| 576 delete[] input_image._buffer; |
| 577 pending_buffers_.pop_front(); |
| 578 continue; |
| 579 } |
| 580 // Get shared memory and save it to decode buffers. |
| 581 scoped_ptr<SHMBuffer> shm_buffer = GetSHM_Locked(input_image._length); |
| 582 if (!shm_buffer) |
| 583 return; |
| 584 SaveToDecodeBuffers_Locked(input_image, shm_buffer.Pass(), buffer_data); |
| 585 delete[] input_image._buffer; |
| 586 pending_buffers_.pop_front(); |
| 587 } |
| 588 } |
| 589 |
| 590 void RTCVideoDecoder::ResetInternal() { |
| 591 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); |
| 592 DVLOG(2) << "ResetInternal"; |
| 593 if (vda_) |
| 594 vda_->Reset(); |
| 595 } |
| 596 |
| 597 void RTCVideoDecoder::ReusePictureBuffer(int64 picture_buffer_id, |
| 598 uint32 sync_point) { |
| 599 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); |
| 600 DVLOG(3) << "ReusePictureBuffer. id=" << picture_buffer_id; |
| 601 |
| 602 if (!vda_) |
| 603 return; |
| 604 |
| 605 CHECK(!picture_buffers_at_display_.empty()); |
| 606 |
| 607 size_t num_erased = picture_buffers_at_display_.erase(picture_buffer_id); |
| 608 DCHECK(num_erased); |
| 609 |
| 610 std::map<int32, media::PictureBuffer>::iterator it = |
| 611 assigned_picture_buffers_.find(picture_buffer_id); |
| 612 |
| 613 if (it == assigned_picture_buffers_.end()) { |
| 614 // This picture was dismissed while in display, so we postponed deletion. |
| 615 it = dismissed_picture_buffers_.find(picture_buffer_id); |
| 616 DCHECK(it != dismissed_picture_buffers_.end()); |
| 617 factories_->DeleteTexture(it->second.texture_id()); |
| 618 dismissed_picture_buffers_.erase(it); |
| 619 return; |
| 620 } |
| 621 |
| 622 factories_->WaitSyncPoint(sync_point); |
| 623 |
| 624 vda_->ReusePictureBuffer(picture_buffer_id); |
| 625 } |
| 626 |
| 627 void RTCVideoDecoder::DestroyTextures() { |
| 628 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); |
| 629 std::map<int32, media::PictureBuffer>::iterator it; |
| 630 |
| 631 for (it = assigned_picture_buffers_.begin(); |
| 632 it != assigned_picture_buffers_.end(); |
| 633 ++it) { |
| 634 factories_->DeleteTexture(it->second.texture_id()); |
| 635 } |
| 636 assigned_picture_buffers_.clear(); |
| 637 |
| 638 for (it = dismissed_picture_buffers_.begin(); |
| 639 it != dismissed_picture_buffers_.end(); |
| 640 ++it) { |
| 641 factories_->DeleteTexture(it->second.texture_id()); |
| 642 } |
| 643 dismissed_picture_buffers_.clear(); |
| 644 } |
| 645 |
| 646 void RTCVideoDecoder::DestroyVDA() { |
| 647 DVLOG(2) << "DestroyVDA"; |
| 648 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); |
| 649 if (vda_) |
| 650 vda_.release()->Destroy(); |
| 651 DestroyTextures(); |
| 652 } |
| 653 |
| 654 scoped_ptr<RTCVideoDecoder::SHMBuffer> RTCVideoDecoder::GetSHM_Locked( |
| 655 size_t min_size) { |
| 656 // Reuse a SHM if possible. |
| 657 SHMBuffer* ret = NULL; |
| 658 if (!available_shm_segments_.empty() && |
| 659 available_shm_segments_.back()->size >= min_size) { |
| 660 ret = available_shm_segments_.back(); |
| 661 available_shm_segments_.pop_back(); |
| 662 } |
| 663 // Post to the child thread to create shared memory if SHM cannot be reused |
| 664 // or the queue is almost empty. |
| 665 if (num_shm_buffers_ < kMaxNumSharedMemorySegments && |
| 666 (ret == NULL || available_shm_segments_.size() <= 1)) { |
| 667 create_shm_thread_.message_loop_proxy()->PostTask( |
| 668 FROM_HERE, |
| 669 // Unretained is safe because the destructor will wait until |
| 670 // |create_shm_thread_| stops. |
| 671 base::Bind( |
| 672 &RTCVideoDecoder::CreateSHM, base::Unretained(this), 1, min_size)); |
| 673 } |
| 674 return scoped_ptr<SHMBuffer>(ret); |
| 675 } |
| 676 |
| 677 void RTCVideoDecoder::PutSHM_Locked(scoped_ptr<SHMBuffer> shm_buffer) { |
| 678 available_shm_segments_.push_back(shm_buffer.release()); |
| 679 } |
| 680 |
| 681 void RTCVideoDecoder::CreateSHM(int number, size_t min_size) { |
| 682 DCHECK(create_shm_thread_.message_loop_proxy()->BelongsToCurrentThread()); |
| 683 DVLOG(2) << "CreateSHM. size=" << min_size; |
| 684 int number_to_allocate; |
| 685 { |
| 686 base::AutoLock auto_lock(lock_); |
| 687 number_to_allocate = |
| 688 std::min(kMaxNumSharedMemorySegments - num_shm_buffers_, number); |
| 689 } |
| 690 size_t size_to_allocate = std::max(min_size, kSharedMemorySegmentBytes); |
| 691 for (int i = 0; i < number_to_allocate; i++) { |
| 692 base::SharedMemory* shm = factories_->CreateSharedMemory(size_to_allocate); |
| 693 if (shm != NULL) { |
| 694 base::AutoLock auto_lock(lock_); |
| 695 num_shm_buffers_++; |
| 696 PutSHM_Locked( |
| 697 scoped_ptr<SHMBuffer>(new SHMBuffer(shm, size_to_allocate))); |
| 698 // Kick off the decoding. |
| 699 vda_loop_proxy_->PostTask( |
| 700 FROM_HERE, |
| 701 base::Bind(&RTCVideoDecoder::RequestBufferDecode, weak_this_)); |
| 702 } |
| 703 } |
| 704 } |
| 705 |
| 706 void RTCVideoDecoder::RecordBufferData(const BufferData& buffer_data) { |
| 707 input_buffer_data_.push_front(buffer_data); |
| 708 // Why this value? Because why not. avformat.h:MAX_REORDER_DELAY is 16, but |
| 709 // that's too small for some pathological B-frame test videos. The cost of |
| 710 // using too-high a value is low (192 bits per extra slot). |
| 711 static const size_t kMaxInputBufferDataSize = 128; |
| 712 // Pop from the back of the list, because that's the oldest and least likely |
| 713 // to be useful in the future data. |
| 714 if (input_buffer_data_.size() > kMaxInputBufferDataSize) |
| 715 input_buffer_data_.pop_back(); |
| 716 } |
| 717 |
| 718 void RTCVideoDecoder::GetBufferData(int32 bitstream_buffer_id, |
| 719 uint32_t* timestamp, |
| 720 uint32_t* width, |
| 721 uint32_t* height, |
| 722 size_t* size) { |
| 723 for (std::list<BufferData>::iterator it = input_buffer_data_.begin(); |
| 724 it != input_buffer_data_.end(); |
| 725 ++it) { |
| 726 if (it->bitstream_buffer_id != bitstream_buffer_id) |
| 727 continue; |
| 728 *timestamp = it->timestamp; |
| 729 *width = it->width; |
| 730 *height = it->height; |
| 731 return; |
| 732 } |
| 733 NOTREACHED() << "Missing bitstream buffer id: " << bitstream_buffer_id; |
| 734 } |
| 735 |
| 736 } // namespace content |
OLD | NEW |