Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "content/renderer/media/rtc_video_decoder.h" | |
| 6 | |
| 7 #include "base/bind.h" | |
| 8 #include "base/logging.h" | |
| 9 #include "base/memory/ref_counted.h" | |
| 10 #include "base/message_loop/message_loop_proxy.h" | |
| 11 #include "base/safe_numerics.h" | |
| 12 #include "base/stl_util.h" | |
| 13 #include "base/task_runner_util.h" | |
| 14 #include "content/child/child_thread.h" | |
| 15 #include "content/renderer/media/native_handle_impl.h" | |
| 16 #include "media/base/bind_to_loop.h" | |
| 17 #include "third_party/webrtc/common_video/interface/texture_video_frame.h" | |
| 18 #include "third_party/webrtc/system_wrappers/interface/ref_count.h" | |
| 19 | |
| 20 namespace content { | |
| 21 | |
| 22 static const int32 ID_LAST = 0x3FFFFFFF; // maximum bitstream buffer id | |
| 23 static const int32 ID_HALF = 0x20000000; | |
| 24 | |
| 25 // Maximum number of concurrent VDA::Decode() operations RVD will maintain. | |
| 26 // Higher values allow better pipelining in the GPU, but also require more | |
| 27 // resources. | |
| 28 static const size_t kMaxInFlightDecodes = 8; | |
| 29 | |
| 30 // Size of shared-memory segments we allocate. Since we reuse them we let them | |
| 31 // be on the beefy side. | |
| 32 static const size_t kSharedMemorySegmentBytes = 100 << 10; | |
| 33 | |
| 34 // Maximum number of allocated shared-memory segments. | |
| 35 static const int kMaxNumSharedMemorySegments = 16; | |
| 36 | |
| 37 // Maximum number of pending WebRTC buffers that are waiting for the shared | |
| 38 // memory. 10 seconds for 30 fps. | |
| 39 static const size_t kMaxNumOfPendingBuffers = 300; | |
|
wuchengli
2013/06/28 15:45:41
kMaxNumSharedMemorySegments and kMaxNumOfPendingBu
Ami GONE FROM CHROMIUM
2013/06/28 17:04:00
What sort of things can go wrong?
| |
| 40 | |
| 41 // A shared memory segment and its allocated size. This class has the ownership | |
| 42 // of |shm|. | |
| 43 class RTCVideoDecoder::SHMBuffer { | |
| 44 public: | |
| 45 SHMBuffer(base::SharedMemory* shm, size_t size); | |
| 46 ~SHMBuffer(); | |
| 47 base::SharedMemory* const shm; | |
| 48 const size_t size; | |
| 49 }; | |
| 50 | |
| 51 RTCVideoDecoder::SHMBuffer::SHMBuffer(base::SharedMemory* shm, size_t size) | |
| 52 : shm(shm), size(size) {} | |
| 53 | |
| 54 RTCVideoDecoder::SHMBuffer::~SHMBuffer() { shm->Close(); } | |
| 55 | |
| 56 // Metadata of a bitstream buffer. | |
| 57 struct RTCVideoDecoder::BufferData { | |
| 58 BufferData(int32 bitstream_buffer_id, | |
| 59 uint32_t timestamp, | |
| 60 int width, | |
| 61 int height, | |
| 62 size_t size); | |
| 63 ~BufferData(); | |
| 64 int32 bitstream_buffer_id; | |
| 65 uint32_t timestamp; // in 90KHz | |
| 66 uint32_t width; | |
| 67 uint32_t height; | |
| 68 size_t size; // buffer size | |
| 69 }; | |
| 70 | |
| 71 RTCVideoDecoder::BufferData::BufferData(int32 bitstream_buffer_id, | |
| 72 uint32_t timestamp, | |
| 73 int width, | |
| 74 int height, | |
| 75 size_t size) | |
| 76 : bitstream_buffer_id(bitstream_buffer_id), | |
| 77 timestamp(timestamp), | |
| 78 width(width), | |
| 79 height(height), | |
| 80 size(size) {} | |
| 81 | |
| 82 RTCVideoDecoder::BufferData::~BufferData() {} | |
| 83 | |
| 84 RTCVideoDecoder::RTCVideoDecoder( | |
| 85 const scoped_refptr<media::GpuVideoDecoder::Factories>& factories) | |
| 86 : weak_factory_(this), | |
| 87 factories_(factories), | |
| 88 vda_loop_proxy_(factories_->GetMessageLoop()), | |
| 89 main_loop_proxy_(base::MessageLoopProxy::current()), | |
| 90 decoder_texture_target_(0), | |
| 91 next_picture_buffer_id_(0), | |
| 92 state_(UNINITIALIZED), | |
| 93 decode_complete_callback_(NULL), | |
| 94 num_shm_buffers_(0), | |
| 95 next_bitstream_buffer_id_(0), | |
| 96 reset_bitstream_buffer_id_(0) { | |
| 97 // Initialize directly if |vda_loop_proxy_| is the renderer thread. | |
| 98 base::WaitableEvent compositor_loop_async_waiter(false, false); | |
| 99 if (vda_loop_proxy_->BelongsToCurrentThread()) { | |
| 100 Initialize(&compositor_loop_async_waiter); | |
| 101 return; | |
| 102 } | |
| 103 // Post the task if |vda_loop_proxy_| is the compositor thread. Waiting here | |
| 104 // is safe because the compositor thread will not be stopped until the | |
| 105 // renderer thread shuts down. | |
| 106 vda_loop_proxy_->PostTask(FROM_HERE, | |
| 107 base::Bind(&RTCVideoDecoder::Initialize, | |
| 108 base::Unretained(this), | |
| 109 &compositor_loop_async_waiter)); | |
| 110 compositor_loop_async_waiter.Wait(); | |
| 111 } | |
| 112 | |
| 113 RTCVideoDecoder::~RTCVideoDecoder() { | |
| 114 DVLOG(2) << "~RTCVideoDecoder"; | |
| 115 // Delete vda and remove |this| from the observer if vda thread is alive. | |
| 116 if (vda_loop_proxy_->BelongsToCurrentThread()) { | |
| 117 base::MessageLoop::current()->RemoveDestructionObserver(this); | |
| 118 DestroyVDA(); | |
| 119 } else { | |
| 120 // VDA should have been destroyed in WillDestroyCurrentMessageLoop. | |
| 121 DCHECK(!vda_); | |
| 122 } | |
| 123 | |
| 124 // Delete all shared memories. | |
| 125 STLDeleteElements(&available_shm_segments_); | |
| 126 STLDeleteValues(&bitstream_buffers_in_decoder_); | |
| 127 STLDeleteContainerPairFirstPointers(buffers_to_be_decoded_.begin(), | |
| 128 buffers_to_be_decoded_.end()); | |
| 129 buffers_to_be_decoded_.clear(); | |
| 130 | |
| 131 // Delete WebRTC input buffers. | |
| 132 for (std::deque<std::pair<webrtc::EncodedImage, BufferData> >::iterator it = | |
| 133 webrtc_buffers_.begin(); | |
| 134 it != webrtc_buffers_.end(); | |
| 135 ++it) { | |
| 136 delete it->first._buffer; | |
| 137 } | |
| 138 } | |
| 139 | |
| 140 scoped_ptr<RTCVideoDecoder> RTCVideoDecoder::Create( | |
| 141 const scoped_refptr<media::GpuVideoDecoder::Factories>& factories) { | |
| 142 scoped_ptr<RTCVideoDecoder> decoder(new RTCVideoDecoder(factories)); | |
| 143 decoder->vda_.reset(factories->CreateVideoDecodeAccelerator( | |
| 144 media::VP8PROFILE_MAIN, decoder.get())); | |
| 145 // vda can be NULL if VP8 is not supported. | |
| 146 if (decoder->vda_ != NULL) { | |
| 147 decoder->state_ = INITIALIZED; | |
| 148 } else { | |
| 149 factories->GetMessageLoop()->DeleteSoon(FROM_HERE, decoder.release()); | |
| 150 } | |
| 151 return decoder.Pass(); | |
| 152 } | |
| 153 | |
| 154 int32_t RTCVideoDecoder::InitDecode(const webrtc::VideoCodec* codecSettings, | |
| 155 int32_t /*numberOfCores*/) { | |
| 156 DVLOG(2) << "InitDecode"; | |
| 157 DCHECK_EQ(codecSettings->codecType, webrtc::kVideoCodecVP8); | |
| 158 if (codecSettings->codecSpecific.VP8.feedbackModeOn) { | |
| 159 LOG(ERROR) << "Feedback mode not supported"; | |
| 160 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 161 } | |
| 162 | |
| 163 base::AutoLock auto_lock(lock_); | |
| 164 if (state_ == UNINITIALIZED || state_ == DECODE_ERROR) { | |
| 165 LOG(ERROR) << "VDA is not initialized. state=" << state_; | |
| 166 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
| 167 } | |
| 168 // Create a shared memory if the queue is empty. | |
| 169 if (available_shm_segments_.size() == 0) { | |
| 170 main_loop_proxy_->PostTask(FROM_HERE, | |
| 171 base::Bind(&RTCVideoDecoder::CreateSHM, | |
| 172 weak_this_, | |
| 173 kSharedMemorySegmentBytes)); | |
| 174 } | |
| 175 return WEBRTC_VIDEO_CODEC_OK; | |
| 176 } | |
| 177 | |
| 178 int32_t RTCVideoDecoder::Decode( | |
| 179 const webrtc::EncodedImage& inputImage, | |
| 180 bool missingFrames, | |
| 181 const webrtc::RTPFragmentationHeader* /*fragmentation*/, | |
| 182 const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/, | |
| 183 int64_t /*renderTimeMs*/) { | |
| 184 DVLOG(3) << "Decode"; | |
| 185 | |
| 186 int bitstream_buffer_id; | |
| 187 { | |
| 188 base::AutoLock auto_lock(lock_); | |
| 189 if (state_ == UNINITIALIZED || decode_complete_callback_ == NULL) { | |
| 190 LOG(ERROR) << "The decoder has not initialized."; | |
| 191 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
| 192 } | |
| 193 if (state_ == DECODE_ERROR) { | |
| 194 LOG(ERROR) << "Decoding error occurred."; | |
| 195 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 196 } | |
| 197 bitstream_buffer_id = next_bitstream_buffer_id_; | |
| 198 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer. | |
| 199 next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & ID_LAST; | |
| 200 } | |
| 201 if (missingFrames || !inputImage._completeFrame) { | |
| 202 DLOG(ERROR) << "Missing or incomplete frames."; | |
| 203 // Unlike the SW decoder in libvpx, hw decoder cannot handle broken frames. | |
| 204 // Return an error to request a key frame. | |
| 205 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 206 } | |
| 207 if (inputImage._frameType == webrtc::kKeyFrame) | |
| 208 frame_size_.SetSize(inputImage._encodedWidth, inputImage._encodedHeight); | |
| 209 | |
| 210 // Create buffer metadata. | |
| 211 BufferData buffer_data(bitstream_buffer_id, | |
| 212 inputImage._timeStamp, | |
| 213 frame_size_.width(), | |
| 214 frame_size_.height(), | |
| 215 inputImage._length); | |
| 216 | |
| 217 // The buffers should be sent to VDA in order. Send the pending buffers first. | |
| 218 SendPendingBuffersForDecode(); | |
| 219 | |
| 220 // If the shared memory is available and there are no pending buffers, send | |
| 221 // the buffer for decode. If not, save the buffer in the queue for decode | |
| 222 // later. | |
| 223 SHMBuffer* shm_buffer = NULL; | |
| 224 if (webrtc_buffers_.size() == 0) | |
| 225 shm_buffer = GetSHM(inputImage._length); | |
| 226 | |
| 227 if (shm_buffer != NULL) { | |
| 228 SendBufferForDecode(inputImage, shm_buffer, buffer_data); | |
| 229 return WEBRTC_VIDEO_CODEC_OK; | |
| 230 } | |
| 231 | |
| 232 return SaveToPendingBuffers(inputImage, buffer_data); | |
| 233 } | |
| 234 | |
| 235 int32_t RTCVideoDecoder::RegisterDecodeCompleteCallback( | |
| 236 webrtc::DecodedImageCallback* callback) { | |
| 237 DVLOG(2) << "RegisterDecodeCompleteCallback"; | |
| 238 base::AutoLock auto_lock(lock_); | |
| 239 decode_complete_callback_ = callback; | |
| 240 return WEBRTC_VIDEO_CODEC_OK; | |
| 241 } | |
| 242 | |
| 243 int32_t RTCVideoDecoder::Release() { | |
| 244 DVLOG(2) << "Release"; | |
| 245 // Do not destroy VDA because the decoder will be recycled by | |
| 246 // RTCVideoDecoderFactory. Just reset VDA. | |
| 247 return Reset(); | |
| 248 } | |
| 249 | |
| 250 int32_t RTCVideoDecoder::Reset() { | |
| 251 DVLOG(2) << "Reset"; | |
| 252 base::AutoLock auto_lock(lock_); | |
| 253 if (state_ == UNINITIALIZED) { | |
| 254 LOG(ERROR) << "Decoder not initialized."; | |
| 255 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
| 256 } | |
| 257 reset_bitstream_buffer_id_ = next_bitstream_buffer_id_; | |
| 258 // If VDA is already resetting, no need to request the reset again. | |
| 259 if (state_ != RESETTING) { | |
| 260 state_ = RESETTING; | |
| 261 vda_loop_proxy_->PostTask( | |
| 262 FROM_HERE, base::Bind(&RTCVideoDecoder::ResetInternal, weak_this_)); | |
| 263 } | |
| 264 return WEBRTC_VIDEO_CODEC_OK; | |
| 265 } | |
| 266 | |
| 267 void RTCVideoDecoder::NotifyInitializeDone() { | |
| 268 DVLOG(2) << "NotifyInitializeDone"; | |
| 269 NOTREACHED(); | |
| 270 } | |
| 271 | |
| 272 void RTCVideoDecoder::ProvidePictureBuffers(uint32 count, | |
| 273 const gfx::Size& size, | |
| 274 uint32 texture_target) { | |
| 275 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
| 276 DVLOG(3) << "ProvidePictureBuffers. texture_target=" << texture_target; | |
| 277 | |
| 278 if (!vda_) | |
| 279 return; | |
| 280 | |
| 281 std::vector<uint32> texture_ids; | |
| 282 std::vector<gpu::Mailbox> texture_mailboxes; | |
| 283 decoder_texture_target_ = texture_target; | |
| 284 // Discards the sync point returned here since PictureReady will imply that | |
| 285 // the produce has already happened, and the texture is ready for use. | |
| 286 if (!factories_->CreateTextures(count, | |
| 287 size, | |
| 288 &texture_ids, | |
| 289 &texture_mailboxes, | |
| 290 decoder_texture_target_)) { | |
| 291 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); | |
| 292 return; | |
| 293 } | |
| 294 DCHECK_EQ(count, texture_ids.size()); | |
| 295 DCHECK_EQ(count, texture_mailboxes.size()); | |
| 296 | |
| 297 std::vector<media::PictureBuffer> picture_buffers; | |
| 298 for (size_t i = 0; i < texture_ids.size(); ++i) { | |
| 299 picture_buffers.push_back(media::PictureBuffer( | |
| 300 next_picture_buffer_id_++, size, texture_ids[i], texture_mailboxes[i])); | |
| 301 bool inserted = assigned_picture_buffers_.insert(std::make_pair( | |
| 302 picture_buffers.back().id(), picture_buffers.back())).second; | |
| 303 DCHECK(inserted); | |
| 304 } | |
| 305 vda_->AssignPictureBuffers(picture_buffers); | |
| 306 } | |
| 307 | |
| 308 void RTCVideoDecoder::DismissPictureBuffer(int32 id) { | |
| 309 DVLOG(3) << "DismissPictureBuffer. id=" << id; | |
| 310 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
| 311 | |
| 312 std::map<int32, media::PictureBuffer>::iterator it = | |
| 313 assigned_picture_buffers_.find(id); | |
| 314 if (it == assigned_picture_buffers_.end()) { | |
| 315 NOTREACHED() << "Missing picture buffer: " << id; | |
| 316 return; | |
| 317 } | |
| 318 | |
| 319 media::PictureBuffer buffer_to_dismiss = it->second; | |
| 320 assigned_picture_buffers_.erase(it); | |
| 321 | |
| 322 std::set<int32>::iterator at_display_it = | |
| 323 picture_buffers_at_display_.find(id); | |
| 324 | |
| 325 if (at_display_it == picture_buffers_at_display_.end()) { | |
| 326 // We can delete the texture immediately as it's not being displayed. | |
| 327 factories_->DeleteTexture(buffer_to_dismiss.texture_id()); | |
| 328 } else { | |
| 329 // Texture in display. Postpone deletion until after it's returned to us. | |
| 330 bool inserted = dismissed_picture_buffers_ | |
| 331 .insert(std::make_pair(id, buffer_to_dismiss)).second; | |
| 332 DCHECK(inserted); | |
| 333 } | |
| 334 } | |
| 335 | |
| 336 void RTCVideoDecoder::PictureReady(const media::Picture& picture) { | |
| 337 DVLOG(3) << "PictureReady"; | |
| 338 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
| 339 | |
| 340 std::map<int32, media::PictureBuffer>::iterator it = | |
| 341 assigned_picture_buffers_.find(picture.picture_buffer_id()); | |
| 342 if (it == assigned_picture_buffers_.end()) { | |
| 343 NOTREACHED() << "Missing picture buffer: " << picture.picture_buffer_id(); | |
| 344 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); | |
| 345 return; | |
| 346 } | |
| 347 const media::PictureBuffer& pb = it->second; | |
| 348 | |
| 349 // Create a media::VideoFrame. | |
| 350 uint32_t timestamp = 0, width = 0, height = 0; | |
| 351 size_t size = 0; | |
| 352 GetBufferData( | |
| 353 picture.bitstream_buffer_id(), ×tamp, &width, &height, &size); | |
| 354 scoped_refptr<media::VideoFrame> frame = | |
| 355 CreateVideoFrame(picture, pb, timestamp, width, height, size); | |
| 356 bool inserted = | |
| 357 picture_buffers_at_display_.insert(picture.picture_buffer_id()).second; | |
| 358 DCHECK(inserted); | |
| 359 | |
| 360 // Create a WebRTC video frame. | |
| 361 webrtc::RefCountImpl<NativeHandleImpl>* handle = | |
| 362 new webrtc::RefCountImpl<NativeHandleImpl>(); | |
| 363 handle->SetHandle(frame.get()); | |
| 364 webrtc::TextureVideoFrame decoded_image(width, height, timestamp, 0, handle); | |
| 365 | |
| 366 // Invoke decode callback. WebRTC expects no frame callback after Release. | |
| 367 { | |
| 368 base::AutoLock auto_lock(lock_); | |
| 369 DCHECK(decode_complete_callback_ != NULL); | |
| 370 if (IsBufferAfterReset(picture.bitstream_buffer_id(), | |
| 371 reset_bitstream_buffer_id_)) { | |
| 372 decode_complete_callback_->Decoded(decoded_image); | |
| 373 } | |
| 374 } | |
| 375 } | |
| 376 | |
| 377 scoped_refptr<media::VideoFrame> RTCVideoDecoder::CreateVideoFrame( | |
| 378 const media::Picture& picture, | |
| 379 const media::PictureBuffer& pb, | |
| 380 uint32_t timestamp, | |
| 381 uint32_t width, | |
| 382 uint32_t height, | |
| 383 size_t size) { | |
| 384 gfx::Rect visible_rect(width, height); | |
| 385 gfx::Size natural_size(width, height); | |
| 386 DCHECK(decoder_texture_target_); | |
| 387 // Convert timestamp from 90KHz to ms. | |
| 388 base::TimeDelta timestamp_ms = base::TimeDelta::FromInternalValue( | |
| 389 base::checked_numeric_cast<uint64_t>(timestamp) * 1000 / 90); | |
| 390 return media::VideoFrame::WrapNativeTexture( | |
| 391 new media::VideoFrame::MailboxHolder( | |
| 392 pb.texture_mailbox(), | |
| 393 0, // sync_point | |
| 394 media::BindToCurrentLoop( | |
| 395 base::Bind(&RTCVideoDecoder::ReusePictureBuffer, | |
| 396 weak_this_, | |
| 397 picture.picture_buffer_id()))), | |
| 398 decoder_texture_target_, | |
| 399 pb.size(), | |
| 400 visible_rect, | |
| 401 natural_size, | |
| 402 timestamp_ms, | |
| 403 base::Bind(&media::GpuVideoDecoder::Factories::ReadPixels, | |
| 404 factories_, | |
| 405 pb.texture_id(), | |
| 406 decoder_texture_target_, | |
| 407 natural_size), | |
| 408 base::Closure()); | |
| 409 } | |
| 410 | |
| 411 void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32 id) { | |
| 412 DVLOG(3) << "NotifyEndOfBitstreamBuffer. id=" << id; | |
| 413 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
| 414 | |
| 415 std::map<int32, SHMBuffer*>::iterator it = | |
| 416 bitstream_buffers_in_decoder_.find(id); | |
| 417 if (it == bitstream_buffers_in_decoder_.end()) { | |
| 418 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); | |
| 419 NOTREACHED() << "Missing bitstream buffer: " << id; | |
| 420 return; | |
| 421 } | |
| 422 | |
| 423 PutSHM(it->second); | |
| 424 bitstream_buffers_in_decoder_.erase(it); | |
| 425 | |
| 426 RequestBufferDecode(); | |
| 427 } | |
| 428 | |
| 429 void RTCVideoDecoder::NotifyFlushDone() { | |
| 430 DVLOG(3) << "NotifyFlushDone"; | |
| 431 NOTREACHED() << "Unexpected flush done notification."; | |
| 432 } | |
| 433 | |
| 434 void RTCVideoDecoder::NotifyResetDone() { | |
| 435 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
| 436 DVLOG(3) << "NotifyResetDone"; | |
| 437 | |
| 438 if (!vda_) | |
| 439 return; | |
| 440 | |
| 441 input_buffer_data_.clear(); | |
| 442 { | |
| 443 base::AutoLock auto_lock(lock_); | |
| 444 state_ = INITIALIZED; | |
| 445 } | |
| 446 // Send the pending buffers for decoding. | |
| 447 RequestBufferDecode(); | |
| 448 } | |
| 449 | |
| 450 void RTCVideoDecoder::NotifyError(media::VideoDecodeAccelerator::Error error) { | |
| 451 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
| 452 if (!vda_) | |
| 453 return; | |
| 454 | |
| 455 DLOG(ERROR) << "VDA Error:" << error; | |
| 456 DestroyVDA(); | |
| 457 | |
| 458 base::AutoLock auto_lock(lock_); | |
| 459 state_ = DECODE_ERROR; | |
| 460 } | |
| 461 | |
| 462 void RTCVideoDecoder::WillDestroyCurrentMessageLoop() { | |
| 463 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
| 464 factories_->Abort(); | |
| 465 weak_factory_.InvalidateWeakPtrs(); | |
| 466 DestroyVDA(); | |
| 467 } | |
| 468 | |
| 469 void RTCVideoDecoder::Initialize(base::WaitableEvent* waiter) { | |
| 470 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
| 471 base::MessageLoop::current()->AddDestructionObserver(this); | |
| 472 weak_this_ = weak_factory_.GetWeakPtr(); | |
| 473 waiter->Signal(); | |
| 474 } | |
| 475 | |
| 476 void RTCVideoDecoder::RequestBufferDecode() { | |
| 477 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
| 478 if (!vda_) | |
| 479 return; | |
| 480 | |
| 481 while (CanMoreDecodeWorkBeDone()) { | |
| 482 // Get a buffer and data from the queue. | |
| 483 std::pair<SHMBuffer*, BufferData>* buffer_pair; | |
| 484 SHMBuffer* shm_buffer = NULL; | |
| 485 BufferData* buffer_data = NULL; | |
| 486 { | |
| 487 base::AutoLock auto_lock(lock_); | |
| 488 // Do not request decode if VDA is resetting. | |
| 489 if (buffers_to_be_decoded_.size() == 0 || state_ == RESETTING) | |
| 490 return; | |
| 491 buffer_pair = &buffers_to_be_decoded_.front(); | |
| 492 buffers_to_be_decoded_.pop_front(); | |
| 493 shm_buffer = buffer_pair->first; | |
| 494 buffer_data = &buffer_pair->second; | |
| 495 // Drop the buffers before Reset or Release is called. | |
| 496 if (!IsBufferAfterReset(buffer_data->bitstream_buffer_id, | |
| 497 reset_bitstream_buffer_id_)) { | |
| 498 available_shm_segments_.push_back(shm_buffer); | |
| 499 continue; | |
| 500 } | |
| 501 } | |
| 502 | |
| 503 // Create a BitstreamBuffer and send to VDA to decode. | |
| 504 media::BitstreamBuffer bitstream_buffer(buffer_data->bitstream_buffer_id, | |
| 505 shm_buffer->shm->handle(), | |
| 506 buffer_data->size); | |
| 507 bool inserted = bitstream_buffers_in_decoder_ | |
| 508 .insert(std::make_pair(bitstream_buffer.id(), shm_buffer)).second; | |
| 509 DCHECK(inserted); | |
| 510 RecordBufferData(*buffer_data); | |
| 511 vda_->Decode(bitstream_buffer); | |
| 512 } | |
| 513 } | |
| 514 | |
| 515 bool RTCVideoDecoder::CanMoreDecodeWorkBeDone() { | |
| 516 return bitstream_buffers_in_decoder_.size() < kMaxInFlightDecodes; | |
| 517 } | |
| 518 | |
| 519 bool RTCVideoDecoder::IsBufferAfterReset(int32 id_buffer, int32 id_reset) { | |
| 520 int32 diff = id_buffer - id_reset; | |
| 521 if (diff < 0) | |
| 522 diff += ID_LAST + 1; | |
| 523 return diff < ID_HALF; | |
| 524 } | |
| 525 | |
| 526 void RTCVideoDecoder::SendBufferForDecode( | |
| 527 const webrtc::EncodedImage& input_image, | |
| 528 SHMBuffer* shm_buffer, | |
| 529 const BufferData& buffer_data) { | |
| 530 memcpy(shm_buffer->shm->memory(), input_image._buffer, input_image._length); | |
| 531 std::pair<SHMBuffer*, BufferData> buffer_pair = | |
| 532 std::make_pair(shm_buffer, buffer_data); | |
| 533 | |
| 534 // Store the buffer and the metadata to the queue. | |
| 535 { | |
| 536 base::AutoLock auto_lock(lock_); | |
| 537 buffers_to_be_decoded_.push_back(buffer_pair); | |
| 538 } | |
| 539 vda_loop_proxy_->PostTask( | |
| 540 FROM_HERE, base::Bind(&RTCVideoDecoder::RequestBufferDecode, weak_this_)); | |
| 541 } | |
| 542 | |
| 543 void RTCVideoDecoder::SendPendingBuffersForDecode() { | |
| 544 DVLOG(2) << "SendPendingBuffersForDecode"; | |
| 545 while (webrtc_buffers_.size() > 0) { | |
| 546 // Get a WebRTC buffer from the queue. | |
| 547 const std::pair<webrtc::EncodedImage, BufferData>& buffer_pair = | |
| 548 webrtc_buffers_.front(); | |
| 549 const webrtc::EncodedImage& input_image = buffer_pair.first; | |
| 550 const BufferData& buffer_data = buffer_pair.second; | |
| 551 | |
| 552 // Drop the frame if it comes before Reset or Release. | |
| 553 { | |
| 554 base::AutoLock auto_lock(lock_); | |
| 555 if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id, | |
| 556 reset_bitstream_buffer_id_)) { | |
| 557 webrtc_buffers_.pop_front(); | |
| 558 delete input_image._buffer; | |
| 559 continue; | |
| 560 } | |
| 561 } | |
| 562 | |
| 563 // Get shared memory and send it for decode. | |
| 564 SHMBuffer* shm_buffer = GetSHM(input_image._length); | |
| 565 if (!shm_buffer) | |
| 566 return; | |
| 567 SendBufferForDecode(input_image, shm_buffer, buffer_data); | |
| 568 webrtc_buffers_.pop_front(); | |
| 569 delete input_image._buffer; | |
| 570 } | |
| 571 } | |
| 572 | |
| 573 int RTCVideoDecoder::SaveToPendingBuffers( | |
| 574 const webrtc::EncodedImage& input_image, | |
| 575 const BufferData& buffer_data) { | |
| 576 DVLOG(2) << "SaveToPendingBuffers"; | |
| 577 // Queued too many buffers. Something goes wrong. | |
| 578 if (webrtc_buffers_.size() >= kMaxNumOfPendingBuffers) | |
| 579 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 580 | |
| 581 // Clone the input image and save it to the queue. | |
| 582 uint8_t* buffer = static_cast<uint8_t*>(malloc(input_image._length)); | |
| 583 memcpy(buffer, input_image._buffer, input_image._length); | |
| 584 webrtc::EncodedImage encoded_image( | |
| 585 buffer, input_image._length, input_image._length); | |
| 586 std::pair<webrtc::EncodedImage, BufferData> buffer_pair = | |
| 587 std::make_pair(encoded_image, buffer_data); | |
| 588 webrtc_buffers_.push_back(buffer_pair); | |
| 589 return WEBRTC_VIDEO_CODEC_OK; | |
| 590 } | |
| 591 | |
| 592 void RTCVideoDecoder::ResetInternal() { | |
| 593 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
| 594 DVLOG(2) << "ResetInternal"; | |
| 595 if (vda_) | |
| 596 vda_->Reset(); | |
| 597 } | |
| 598 | |
| 599 void RTCVideoDecoder::ReusePictureBuffer(int64 picture_buffer_id, | |
| 600 uint32 sync_point) { | |
| 601 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
| 602 DVLOG(3) << "ReusePictureBuffer. id=" << picture_buffer_id; | |
| 603 | |
| 604 if (!vda_) | |
| 605 return; | |
| 606 | |
| 607 CHECK(!picture_buffers_at_display_.empty()); | |
| 608 | |
| 609 size_t num_erased = picture_buffers_at_display_.erase(picture_buffer_id); | |
| 610 DCHECK(num_erased); | |
| 611 | |
| 612 std::map<int32, media::PictureBuffer>::iterator it = | |
| 613 assigned_picture_buffers_.find(picture_buffer_id); | |
| 614 | |
| 615 if (it == assigned_picture_buffers_.end()) { | |
| 616 // This picture was dismissed while in display, so we postponed deletion. | |
| 617 it = dismissed_picture_buffers_.find(picture_buffer_id); | |
| 618 DCHECK(it != dismissed_picture_buffers_.end()); | |
| 619 factories_->DeleteTexture(it->second.texture_id()); | |
| 620 dismissed_picture_buffers_.erase(it); | |
| 621 return; | |
| 622 } | |
| 623 | |
| 624 factories_->WaitSyncPoint(sync_point); | |
| 625 | |
| 626 vda_->ReusePictureBuffer(picture_buffer_id); | |
| 627 } | |
| 628 | |
| 629 void RTCVideoDecoder::DestroyTextures() { | |
| 630 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
| 631 std::map<int32, media::PictureBuffer>::iterator it; | |
| 632 | |
| 633 for (it = assigned_picture_buffers_.begin(); | |
| 634 it != assigned_picture_buffers_.end(); | |
| 635 ++it) { | |
| 636 factories_->DeleteTexture(it->second.texture_id()); | |
| 637 } | |
| 638 assigned_picture_buffers_.clear(); | |
| 639 | |
| 640 for (it = dismissed_picture_buffers_.begin(); | |
| 641 it != dismissed_picture_buffers_.end(); | |
| 642 ++it) { | |
| 643 factories_->DeleteTexture(it->second.texture_id()); | |
| 644 } | |
| 645 dismissed_picture_buffers_.clear(); | |
| 646 } | |
| 647 | |
| 648 void RTCVideoDecoder::DestroyVDA() { | |
| 649 DVLOG(2) << "DestroyVDA"; | |
| 650 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
| 651 if (vda_) | |
| 652 vda_.release()->Destroy(); | |
| 653 DestroyTextures(); | |
| 654 } | |
| 655 | |
| 656 RTCVideoDecoder::SHMBuffer* RTCVideoDecoder::GetSHM(size_t min_size) { | |
| 657 // Reuse a SHM if possible. | |
| 658 SHMBuffer* ret = NULL; | |
| 659 base::AutoLock auto_lock(lock_); | |
| 660 if (!available_shm_segments_.empty() && | |
| 661 available_shm_segments_.back()->size >= min_size) { | |
| 662 ret = available_shm_segments_.back(); | |
| 663 available_shm_segments_.pop_back(); | |
| 664 } | |
| 665 // Post to the child thread to create shared memory if SHM cannot be reused | |
| 666 // or the queue is almost empty. | |
| 667 if (num_shm_buffers_ < kMaxNumSharedMemorySegments && | |
| 668 (ret == NULL || available_shm_segments_.size() <= 1)) { | |
| 669 main_loop_proxy_->PostTask( | |
| 670 FROM_HERE, | |
| 671 base::Bind(&RTCVideoDecoder::CreateSHM, weak_this_, min_size)); | |
| 672 } | |
| 673 return ret; | |
| 674 } | |
| 675 | |
| 676 void RTCVideoDecoder::CreateSHM(size_t min_size) { | |
| 677 DCHECK(main_loop_proxy_->BelongsToCurrentThread()); | |
|
wuchengli
2013/07/02 10:34:25
I found this function is problematic because Creat
| |
| 678 DVLOG(2) << "CreateSharedMemory. size=" << min_size; | |
| 679 { | |
| 680 base::AutoLock auto_lock(lock_); | |
| 681 if (num_shm_buffers_ >= kMaxNumSharedMemorySegments) | |
| 682 return; | |
| 683 } | |
| 684 size_t size_to_allocate = std::max(min_size, kSharedMemorySegmentBytes); | |
| 685 base::SharedMemory* shm = factories_->CreateSharedMemory(size_to_allocate); | |
| 686 if (shm != NULL) { | |
| 687 { | |
| 688 base::AutoLock auto_lock(lock_); | |
| 689 num_shm_buffers_++; | |
| 690 } | |
| 691 PutSHM(new SHMBuffer(shm, size_to_allocate)); | |
| 692 } | |
| 693 } | |
| 694 | |
| 695 void RTCVideoDecoder::PutSHM(SHMBuffer* shm_buffer) { | |
| 696 base::AutoLock auto_lock(lock_); | |
| 697 available_shm_segments_.push_back(shm_buffer); | |
| 698 } | |
| 699 | |
| 700 void RTCVideoDecoder::RecordBufferData(const BufferData& buffer_data) { | |
| 701 input_buffer_data_.push_front(buffer_data); | |
| 702 // Why this value? Because why not. avformat.h:MAX_REORDER_DELAY is 16, but | |
| 703 // that's too small for some pathological B-frame test videos. The cost of | |
| 704 // using too-high a value is low (192 bits per extra slot). | |
| 705 static const size_t kMaxInputBufferDataSize = 128; | |
| 706 // Pop from the back of the list, because that's the oldest and least likely | |
| 707 // to be useful in the future data. | |
| 708 if (input_buffer_data_.size() > kMaxInputBufferDataSize) | |
| 709 input_buffer_data_.pop_back(); | |
| 710 } | |
| 711 | |
| 712 void RTCVideoDecoder::GetBufferData(int32 bitstream_buffer_id, | |
| 713 uint32_t* timestamp, | |
| 714 uint32_t* width, | |
| 715 uint32_t* height, | |
| 716 size_t* size) { | |
| 717 for (std::list<BufferData>::iterator it = input_buffer_data_.begin(); | |
| 718 it != input_buffer_data_.end(); | |
| 719 ++it) { | |
| 720 if (it->bitstream_buffer_id != bitstream_buffer_id) | |
| 721 continue; | |
| 722 *timestamp = it->timestamp; | |
| 723 *width = it->width; | |
| 724 *height = it->height; | |
| 725 return; | |
| 726 } | |
| 727 NOTREACHED() << "Missing bitstream buffer id: " << bitstream_buffer_id; | |
| 728 } | |
| 729 | |
| 730 } // namespace content | |
| OLD | NEW |