OLD | NEW |
---|---|
(Empty) | |
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/renderer/media/rtc_video_decoder.h" | |
6 | |
7 #include "base/bind.h" | |
8 #include "base/logging.h" | |
9 #include "base/memory/ref_counted.h" | |
10 #include "base/message_loop_proxy.h" | |
11 #include "base/safe_numerics.h" | |
12 #include "base/task_runner_util.h" | |
13 #include "content/renderer/media/native_handle_impl.h" | |
14 #include "media/base/bind_to_loop.h" | |
15 #include "third_party/webrtc/system_wrappers/interface/ref_count.h" | |
16 | |
17 namespace content { | |
18 | |
19 // A shared memory segment and its allocated size. |shm| is unowned and the | |
20 // users should delete it. | |
21 struct RTCVideoDecoder::SHMBuffer { | |
22 SHMBuffer(base::SharedMemory* shm, size_t size); | |
23 ~SHMBuffer(); | |
24 base::SharedMemory* const shm; | |
25 const size_t size; | |
26 }; | |
27 | |
28 RTCVideoDecoder::SHMBuffer::SHMBuffer(base::SharedMemory* shm, size_t size) | |
29 : shm(shm), size(size) {} | |
30 | |
31 RTCVideoDecoder::SHMBuffer::~SHMBuffer() {} | |
32 | |
33 // Metadata of a bitstream buffer. | |
34 struct RTCVideoDecoder::BufferData { | |
35 BufferData(int32 bitstream_buffer_id, | |
36 uint32_t timestamp, | |
37 int width, | |
38 int height, | |
39 size_t sisze); | |
40 ~BufferData(); | |
41 int32 bitstream_buffer_id; | |
42 uint32_t timestamp; // in 90KHz | |
43 uint32_t width; | |
44 uint32_t height; | |
45 size_t size; // buffer size | |
46 }; | |
47 | |
48 RTCVideoDecoder::BufferData::BufferData(int32 bitstream_buffer_id, | |
49 uint32_t timestamp, | |
50 int width, | |
51 int height, | |
52 size_t size) | |
53 : bitstream_buffer_id(bitstream_buffer_id), | |
54 timestamp(timestamp), | |
55 width(width), | |
56 height(height), | |
57 size(size) {} | |
58 | |
59 RTCVideoDecoder::BufferData::~BufferData() {} | |
60 | |
61 RTCVideoDecoder::RTCVideoDecoder( | |
62 const scoped_refptr<media::GpuVideoDecoder::Factories>& factories) | |
63 : state_(UNINITIALIZED), | |
64 decode_complete_callback_(NULL), | |
65 weak_factory_(this), | |
66 factories_(factories), | |
67 vda_loop_proxy_(factories_->GetMessageLoop()), | |
68 decoder_texture_target_(0), | |
69 next_picture_buffer_id_(0), | |
70 next_bitstream_buffer_id_(0) { | |
71 // Initialize directly if |vda_loop_proxy_| is the renderer thread. | |
72 base::WaitableEvent compositor_loop_async_waiter(false, false); | |
73 if (vda_loop_proxy_->BelongsToCurrentThread()) { | |
74 Initialize(&compositor_loop_async_waiter); | |
75 return; | |
76 } | |
77 // Post the task if |vda_loop_proxy_| is the compositor thread. Waiting here | |
78 // is safe because the compositor thread will not be stopped until the | |
79 // renderer thread shuts down. | |
80 vda_loop_proxy_->PostTask(FROM_HERE, | |
81 base::Bind(&RTCVideoDecoder::Initialize, | |
82 base::Unretained(this), | |
83 &compositor_loop_async_waiter)); | |
84 compositor_loop_async_waiter.Wait(); | |
85 } | |
86 | |
87 RTCVideoDecoder::~RTCVideoDecoder() { | |
88 DVLOG(2) << "~RTCVideoDecoder"; | |
89 // Delete vda and remove |this| from the observer if vda thread is alive. | |
90 if (vda_loop_proxy_->BelongsToCurrentThread()) { | |
91 base::MessageLoop::current()->RemoveDestructionObserver(this); | |
92 DestroyVDA(); | |
93 } else { | |
94 // VDA should have been destroyed in WillDestroyCurrentMessageLoop. | |
95 DCHECK(!vda_); | |
96 } | |
97 | |
98 // Delete all shared memories. | |
99 for (size_t i = 0; i < available_shm_segments_.size(); ++i) { | |
100 available_shm_segments_[i]->shm->Close(); | |
101 delete available_shm_segments_[i]; | |
102 } | |
103 available_shm_segments_.clear(); | |
104 for (std::map<int32, SHMBuffer*>::iterator it = | |
105 bitstream_buffers_in_decoder_.begin(); | |
106 it != bitstream_buffers_in_decoder_.end(); | |
107 ++it) { | |
108 it->second->shm->Close(); | |
109 delete it->second; | |
110 } | |
111 bitstream_buffers_in_decoder_.clear(); | |
112 ClearBufferQueue(&buffers_to_be_decoded_); | |
113 ClearBufferQueue(&buffers_delayed_); | |
114 } | |
115 | |
116 scoped_ptr<RTCVideoDecoder> RTCVideoDecoder::Create( | |
117 const scoped_refptr<media::GpuVideoDecoder::Factories>& factories) { | |
118 scoped_ptr<RTCVideoDecoder> decoder(new RTCVideoDecoder(factories)); | |
119 decoder->vda_.reset(factories->CreateVideoDecodeAccelerator( | |
120 media::VP8PROFILE_MAIN, decoder.get())); | |
121 // vda can be NULL if VP8 is not supported. | |
122 if (decoder->vda_ != NULL) { | |
123 decoder->state_ = INITIALIZED; | |
124 } else { | |
125 factories->GetMessageLoop()->DeleteSoon(FROM_HERE, decoder.release()); | |
126 } | |
127 return decoder.Pass(); | |
128 } | |
129 | |
130 int32_t RTCVideoDecoder::InitDecode(const webrtc::VideoCodec* codecSettings, | |
131 int32_t /*numberOfCores*/) { | |
132 DVLOG(2) << "InitDecode"; | |
133 DCHECK_EQ(codecSettings->codecType, webrtc::kVideoCodecVP8); | |
134 if (codecSettings->codecSpecific.VP8.feedbackModeOn) { | |
135 LOG(ERROR) << "Feedback mode not supported"; | |
136 return WEBRTC_VIDEO_CODEC_ERROR; | |
137 } | |
138 | |
139 base::AutoLock auto_lock(lock_); | |
140 if (state_ == UNINITIALIZED || state_ == DECODE_ERROR) { | |
141 LOG(ERROR) << "VDA is not initialized. state=" << state_; | |
142 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
143 } | |
144 return WEBRTC_VIDEO_CODEC_OK; | |
145 } | |
146 | |
147 int32_t RTCVideoDecoder::Decode( | |
148 const webrtc::EncodedImage& inputImage, | |
149 bool missingFrames, | |
150 const webrtc::RTPFragmentationHeader* /*fragmentation*/, | |
151 const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/, | |
152 int64_t /*renderTimeMs*/) { | |
153 DVLOG(3) << "Decode"; | |
154 | |
155 { | |
156 base::AutoLock auto_lock(lock_); | |
157 if (state_ == UNINITIALIZED || decode_complete_callback_ == NULL) { | |
158 LOG(ERROR) << "The decoder has not initialized."; | |
159 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
160 } | |
161 if (state_ == DECODE_ERROR) { | |
162 LOG(ERROR) << "Decoding error occurred."; | |
163 return WEBRTC_VIDEO_CODEC_ERROR; | |
164 } | |
165 } | |
166 if (missingFrames || !inputImage._completeFrame) { | |
167 DLOG(ERROR) << "Missing or incomplete frames."; | |
168 // Unlike the SW decoder in libvpx, hw decoder cannot handle broken frames. | |
169 // Return an error to request a key frame. | |
170 return WEBRTC_VIDEO_CODEC_ERROR; | |
171 } | |
172 | |
173 if (inputImage._frameType == webrtc::kKeyFrame) | |
174 frame_size_.SetSize(inputImage._encodedWidth, inputImage._encodedHeight); | |
175 | |
176 // Copy WebRTC buffer to SHM buffer and create buffer metadata. | |
177 SHMBuffer* shm_buffer = GetSHM(inputImage._length); | |
178 if (!shm_buffer) | |
179 return WEBRTC_VIDEO_CODEC_ERROR; | |
180 memcpy(shm_buffer->shm->memory(), inputImage._buffer, inputImage._length); | |
181 BufferData buffer_data(next_bitstream_buffer_id_, | |
182 inputImage._timeStamp, | |
183 frame_size_.width(), | |
184 frame_size_.height(), | |
185 inputImage._length); | |
186 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer. | |
187 next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & 0x3FFFFFFF; | |
188 std::pair<SHMBuffer*, BufferData> buffer_pair = | |
189 std::make_pair(shm_buffer, buffer_data); | |
190 | |
191 // Store the buffer and the metadata to the queue. | |
192 base::AutoLock auto_lock(lock_); | |
193 if (state_ == RESETTING) { | |
194 // If VDA is resetting, save the buffer but do not request decode. | |
195 buffers_delayed_.push_back(buffer_pair); | |
196 } else { | |
197 buffers_to_be_decoded_.push_back(buffer_pair); | |
198 vda_loop_proxy_->PostTask( | |
199 FROM_HERE, | |
200 base::Bind(&RTCVideoDecoder::RequestBufferDecode, weak_this_)); | |
201 } | |
202 return WEBRTC_VIDEO_CODEC_OK; | |
203 } | |
204 | |
205 int32_t RTCVideoDecoder::RegisterDecodeCompleteCallback( | |
206 webrtc::DecodedImageCallback* callback) { | |
207 DVLOG(2) << "RegisterDecodeCompleteCallback"; | |
208 base::AutoLock auto_lock(lock_); | |
209 decode_complete_callback_ = callback; | |
210 return WEBRTC_VIDEO_CODEC_OK; | |
211 } | |
212 | |
213 int32_t RTCVideoDecoder::Release() { | |
214 DVLOG(2) << "Release"; | |
215 // Do not destroy VDA because the decoder will be recycled by | |
216 // RTCVideoDecoderFactory. Just reset VDA. | |
217 return Reset(); | |
218 } | |
219 | |
220 int32_t RTCVideoDecoder::Reset() { | |
221 DVLOG(2) << "Reset"; | |
222 base::AutoLock auto_lock(lock_); | |
223 if (state_ == UNINITIALIZED) { | |
224 LOG(ERROR) << "Decoder not initialized."; | |
225 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
226 } else if (state_ == RESETTING) { | |
227 // If VDA is already resetting, empty the pending buffer queue. No need to | |
228 // request the reset again. | |
229 buffers_to_be_decoded_.insert(buffers_to_be_decoded_.end(), | |
230 buffers_delayed_.begin(), | |
231 buffers_delayed_.end()); | |
232 buffers_delayed_.clear(); | |
233 } else { | |
234 state_ = RESETTING; | |
235 vda_loop_proxy_->PostTask( | |
236 FROM_HERE, base::Bind(&RTCVideoDecoder::ResetInternal, weak_this_)); | |
237 } | |
238 return WEBRTC_VIDEO_CODEC_OK; | |
239 } | |
240 | |
241 void RTCVideoDecoder::NotifyInitializeDone() { | |
242 DVLOG(2) << "NotifyInitializeDone"; | |
243 NOTREACHED(); | |
244 } | |
245 | |
246 void RTCVideoDecoder::ProvidePictureBuffers(uint32 count, | |
247 const gfx::Size& size, | |
248 uint32 texture_target) { | |
249 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
250 DVLOG(3) << "ProvidePictureBuffers. texture_target=" << texture_target; | |
251 | |
252 if (!vda_) | |
253 return; | |
254 | |
255 std::vector<uint32> texture_ids; | |
256 decoder_texture_target_ = texture_target; | |
257 if (!factories_->CreateTextures( | |
258 count, size, &texture_ids, decoder_texture_target_)) { | |
259 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); | |
260 return; | |
261 } | |
262 DCHECK_EQ(count, texture_ids.size()); | |
263 | |
264 std::vector<media::PictureBuffer> picture_buffers; | |
265 for (size_t i = 0; i < texture_ids.size(); ++i) { | |
266 picture_buffers.push_back( | |
267 media::PictureBuffer(next_picture_buffer_id_++, size, texture_ids[i])); | |
268 bool inserted = assigned_picture_buffers_.insert(std::make_pair( | |
269 picture_buffers.back().id(), picture_buffers.back())).second; | |
270 DCHECK(inserted); | |
271 } | |
272 vda_->AssignPictureBuffers(picture_buffers); | |
273 } | |
274 | |
275 void RTCVideoDecoder::DismissPictureBuffer(int32 id) { | |
276 DVLOG(3) << "DismissPictureBuffer. id=" << id; | |
277 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
278 | |
279 std::map<int32, media::PictureBuffer>::iterator it = | |
280 assigned_picture_buffers_.find(id); | |
281 if (it == assigned_picture_buffers_.end()) { | |
282 NOTREACHED() << "Missing picture buffer: " << id; | |
283 return; | |
284 } | |
285 | |
286 media::PictureBuffer buffer_to_dismiss = it->second; | |
287 assigned_picture_buffers_.erase(it); | |
288 | |
289 std::set<int32>::iterator at_display_it = | |
290 picture_buffers_at_display_.find(id); | |
291 | |
292 if (at_display_it == picture_buffers_at_display_.end()) { | |
293 // We can delete the texture immediately as it's not being displayed. | |
294 factories_->DeleteTexture(buffer_to_dismiss.texture_id()); | |
295 } else { | |
296 // Texture in display. Postpone deletion until after it's returned to us. | |
297 bool inserted = dismissed_picture_buffers_ | |
298 .insert(std::make_pair(id, buffer_to_dismiss)).second; | |
299 DCHECK(inserted); | |
300 } | |
301 } | |
302 | |
303 void RTCVideoDecoder::PictureReady(const media::Picture& picture) { | |
304 DVLOG(3) << "PictureReady"; | |
305 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
306 | |
307 std::map<int32, media::PictureBuffer>::iterator it = | |
308 assigned_picture_buffers_.find(picture.picture_buffer_id()); | |
309 if (it == assigned_picture_buffers_.end()) { | |
310 NOTREACHED() << "Missing picture buffer: " << picture.picture_buffer_id(); | |
311 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); | |
312 return; | |
313 } | |
314 const media::PictureBuffer& pb = it->second; | |
315 | |
316 // Create a media::VideoFrame. | |
317 uint32_t timestamp = 0, width = 0, height = 0; | |
318 size_t size = 0; | |
319 GetBufferData( | |
320 picture.bitstream_buffer_id(), ×tamp, &width, &height, &size); | |
321 scoped_refptr<media::VideoFrame> frame = | |
322 CreateVideoFrame(picture, pb, timestamp, width, height, size); | |
323 bool inserted = | |
324 picture_buffers_at_display_.insert(picture.picture_buffer_id()).second; | |
325 DCHECK(inserted); | |
326 { | |
327 // WebRTC expects no frame callback after Release. Drop the frame if VDA is | |
328 // resetting. | |
329 base::AutoLock auto_lock(lock_); | |
330 if (state_ == RESETTING) | |
331 return; | |
wuchengli
2013/06/20 07:27:04
This fixes a occasional crash during hang up.
| |
332 } | |
333 | |
334 // Create a webrtc::I420VideoFrame. | |
335 webrtc::I420VideoFrame decoded_image; | |
336 // TODO(wuchengli): remove the malloc. | |
337 decoded_image.CreateEmptyFrame(width, height, width, height / 2, width / 2); | |
338 webrtc::RefCountImpl<NativeHandleImpl>* handle = | |
339 new webrtc::RefCountImpl<NativeHandleImpl>(); | |
340 handle->SetHandle(frame.get()); | |
341 decoded_image.set_native_handle(handle); | |
342 decoded_image.set_timestamp(timestamp); | |
343 | |
344 // Send to decode callback. | |
345 webrtc::DecodedImageCallback* callback; | |
346 { | |
347 base::AutoLock auto_lock(lock_); | |
348 callback = decode_complete_callback_; | |
349 } | |
350 DCHECK(callback != NULL); | |
351 callback->Decoded(decoded_image); | |
352 } | |
353 | |
354 scoped_refptr<media::VideoFrame> RTCVideoDecoder::CreateVideoFrame( | |
355 const media::Picture& picture, | |
356 const media::PictureBuffer& pb, | |
357 uint32_t timestamp, | |
358 uint32_t width, | |
359 uint32_t height, | |
360 size_t size) { | |
361 gfx::Rect visible_rect(width, height); | |
362 gfx::Size natural_size(width, height); | |
363 DCHECK(decoder_texture_target_); | |
364 // Convert timestamp from 90KHz to ms. | |
365 base::TimeDelta timestamp_ms = base::TimeDelta::FromInternalValue( | |
366 base::checked_numeric_cast<uint64_t>(timestamp) * 1000 / 90); | |
367 return media::VideoFrame::WrapNativeTexture( | |
368 pb.texture_id(), | |
369 decoder_texture_target_, | |
370 pb.size(), | |
371 visible_rect, | |
372 natural_size, | |
373 timestamp_ms, | |
374 base::Bind(&media::GpuVideoDecoder::Factories::ReadPixels, | |
375 factories_, | |
376 pb.texture_id(), | |
377 decoder_texture_target_, | |
378 natural_size), | |
379 media::BindToCurrentLoop(base::Bind(&RTCVideoDecoder::ReusePictureBuffer, | |
380 weak_this_, | |
381 picture.picture_buffer_id()))); | |
382 } | |
383 | |
384 void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32 id) { | |
385 DVLOG(3) << "NotifyEndOfBitstreamBuffer. id=" << id; | |
386 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
387 | |
388 std::map<int32, SHMBuffer*>::iterator it = | |
389 bitstream_buffers_in_decoder_.find(id); | |
390 if (it == bitstream_buffers_in_decoder_.end()) { | |
391 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); | |
392 NOTREACHED() << "Missing bitstream buffer: " << id; | |
393 return; | |
394 } | |
395 | |
396 PutSHM(it->second); | |
397 bitstream_buffers_in_decoder_.erase(it); | |
398 | |
399 RequestBufferDecode(); | |
400 } | |
401 | |
402 void RTCVideoDecoder::NotifyFlushDone() { | |
403 DVLOG(3) << "NotifyFlushDone"; | |
404 NOTREACHED() << "Unexpected flush done notification."; | |
405 } | |
406 | |
407 void RTCVideoDecoder::NotifyResetDone() { | |
408 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
409 DVLOG(3) << "NotifyResetDone"; | |
410 | |
411 if (!vda_) | |
412 return; | |
413 | |
414 input_buffer_data_.clear(); | |
415 int num_buffers = 0; | |
416 { | |
417 base::AutoLock auto_lock(lock_); | |
418 // Clear and recycle the old buffers. | |
419 for (std::deque<std::pair<SHMBuffer*, BufferData> >::const_iterator it = | |
420 buffers_to_be_decoded_.begin(); | |
421 it != buffers_to_be_decoded_.end(); | |
422 it++) { | |
423 PutSHM(it->first); | |
424 } | |
425 buffers_to_be_decoded_.clear(); | |
426 | |
427 std::swap(buffers_to_be_decoded_, buffers_delayed_); | |
428 num_buffers = buffers_to_be_decoded_.size(); | |
429 state_ = INITIALIZED; | |
430 } | |
431 // Send the pending buffers for decoding. | |
432 for (int i = 0; i < num_buffers; i++) | |
433 RequestBufferDecode(); | |
434 } | |
435 | |
436 void RTCVideoDecoder::NotifyError(media::VideoDecodeAccelerator::Error error) { | |
437 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
438 if (!vda_) | |
439 return; | |
440 | |
441 DLOG(ERROR) << "VDA Error:" << error; | |
442 DestroyVDA(); | |
443 | |
444 base::AutoLock auto_lock(lock_); | |
445 state_ = DECODE_ERROR; | |
446 } | |
447 | |
448 void RTCVideoDecoder::WillDestroyCurrentMessageLoop() { | |
449 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
450 factories_->Abort(); | |
451 weak_factory_.InvalidateWeakPtrs(); | |
452 DestroyVDA(); | |
453 } | |
454 | |
455 void RTCVideoDecoder::Initialize(base::WaitableEvent* waiter) { | |
456 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
457 base::MessageLoop::current()->AddDestructionObserver(this); | |
458 weak_this_ = weak_factory_.GetWeakPtr(); | |
459 waiter->Signal(); | |
460 } | |
461 | |
462 void RTCVideoDecoder::RequestBufferDecode() { | |
463 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
464 if (!CanMoreDecodeWorkBeDone() || !vda_) | |
465 return; | |
466 | |
467 // Get a buffer and data from the queue. | |
468 std::pair<SHMBuffer*, BufferData>* buffer_pair; | |
469 { | |
470 base::AutoLock auto_lock(lock_); | |
471 if (buffers_to_be_decoded_.size() == 0 || state_ == RESETTING) | |
472 return; | |
473 buffer_pair = &buffers_to_be_decoded_.front(); | |
474 buffers_to_be_decoded_.pop_front(); | |
475 } | |
476 SHMBuffer* shm_buffer = buffer_pair->first; | |
477 BufferData buffer_data = buffer_pair->second; | |
478 | |
479 // Create a BitstreamBuffer and send to VDA to decode. | |
480 media::BitstreamBuffer bitstream_buffer(buffer_data.bitstream_buffer_id, | |
481 shm_buffer->shm->handle(), | |
482 buffer_data.size); | |
483 bool inserted = bitstream_buffers_in_decoder_ | |
484 .insert(std::make_pair(bitstream_buffer.id(), shm_buffer)).second; | |
485 DCHECK(inserted); | |
486 RecordBufferData(buffer_data); | |
487 vda_->Decode(bitstream_buffer); | |
488 } | |
489 | |
490 // Maximum number of concurrent VDA::Decode() operations RVD will maintain. | |
491 // Higher values allow better pipelining in the GPU, but also require more | |
492 // resources. | |
493 enum { | |
494 kMaxInFlightDecodes = 8 | |
495 }; | |
496 | |
497 bool RTCVideoDecoder::CanMoreDecodeWorkBeDone() { | |
498 return bitstream_buffers_in_decoder_.size() < kMaxInFlightDecodes; | |
499 } | |
500 | |
501 void RTCVideoDecoder::ResetInternal() { | |
502 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
503 DVLOG(2) << "ResetInternal"; | |
504 if (vda_) | |
505 vda_->Reset(); | |
506 } | |
507 | |
508 void RTCVideoDecoder::ReusePictureBuffer(int64 picture_buffer_id) { | |
509 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
510 DVLOG(3) << "ReusePictureBuffer. id=" << picture_buffer_id; | |
511 | |
512 if (!vda_) | |
513 return; | |
514 | |
515 CHECK(!picture_buffers_at_display_.empty()); | |
516 | |
517 size_t num_erased = picture_buffers_at_display_.erase(picture_buffer_id); | |
518 DCHECK(num_erased); | |
519 | |
520 std::map<int32, media::PictureBuffer>::iterator it = | |
521 assigned_picture_buffers_.find(picture_buffer_id); | |
522 | |
523 if (it == assigned_picture_buffers_.end()) { | |
524 // This picture was dismissed while in display, so we postponed deletion. | |
525 it = dismissed_picture_buffers_.find(picture_buffer_id); | |
526 DCHECK(it != dismissed_picture_buffers_.end()); | |
527 factories_->DeleteTexture(it->second.texture_id()); | |
528 dismissed_picture_buffers_.erase(it); | |
529 return; | |
530 } | |
531 | |
532 vda_->ReusePictureBuffer(picture_buffer_id); | |
533 } | |
534 | |
535 void RTCVideoDecoder::DestroyTextures() { | |
536 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
537 std::map<int32, media::PictureBuffer>::iterator it; | |
538 | |
539 for (it = assigned_picture_buffers_.begin(); | |
540 it != assigned_picture_buffers_.end(); | |
541 ++it) { | |
542 factories_->DeleteTexture(it->second.texture_id()); | |
543 } | |
544 assigned_picture_buffers_.clear(); | |
545 | |
546 for (it = dismissed_picture_buffers_.begin(); | |
547 it != dismissed_picture_buffers_.end(); | |
548 ++it) { | |
549 factories_->DeleteTexture(it->second.texture_id()); | |
550 } | |
551 dismissed_picture_buffers_.clear(); | |
552 } | |
553 | |
554 void RTCVideoDecoder::DestroyVDA() { | |
555 DVLOG(2) << "DestroyVDA"; | |
556 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
557 if (vda_) | |
558 vda_.release()->Destroy(); | |
559 DestroyTextures(); | |
560 } | |
561 | |
562 void RTCVideoDecoder::ClearBufferQueue( | |
563 std::deque<std::pair<SHMBuffer*, BufferData> >* queue) { | |
564 for (std::deque<std::pair<SHMBuffer*, BufferData> >::iterator it = | |
565 queue->begin(); | |
566 it != queue->end(); | |
567 ++it) { | |
568 it->first->shm->Close(); | |
569 delete it->first; | |
570 } | |
571 queue->clear(); | |
572 } | |
573 | |
574 // Size of shared-memory segments we allocate. Since we reuse them we let them | |
575 // be on the beefy side. | |
576 static const size_t kSharedMemorySegmentBytes = 100 << 10; | |
577 | |
578 RTCVideoDecoder::SHMBuffer* RTCVideoDecoder::GetSHM(size_t min_size) { | |
579 { | |
580 // Reuse a SHM if possible. | |
581 base::AutoLock auto_lock(lock_); | |
582 if (!available_shm_segments_.empty() && | |
583 available_shm_segments_.back()->size >= min_size) { | |
584 SHMBuffer* ret = available_shm_segments_.back(); | |
585 available_shm_segments_.pop_back(); | |
586 return ret; | |
587 } | |
588 } | |
589 // Create a new shared memory. This is done in main thread. | |
590 size_t size_to_allocate = std::max(min_size, kSharedMemorySegmentBytes); | |
591 base::SharedMemory* shm = factories_->CreateSharedMemory(size_to_allocate); | |
592 if (!shm) | |
593 return NULL; | |
594 return new SHMBuffer(shm, size_to_allocate); | |
595 } | |
596 | |
597 void RTCVideoDecoder::PutSHM(SHMBuffer* shm_buffer) { | |
598 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
599 base::AutoLock auto_lock(lock_); | |
600 available_shm_segments_.push_back(shm_buffer); | |
601 } | |
602 | |
603 void RTCVideoDecoder::RecordBufferData(const BufferData& buffer_data) { | |
604 input_buffer_data_.push_front(buffer_data); | |
605 // Why this value? Because why not. avformat.h:MAX_REORDER_DELAY is 16, but | |
606 // that's too small for some pathological B-frame test videos. The cost of | |
607 // using too-high a value is low (192 bits per extra slot). | |
608 static const size_t kMaxInputBufferDataSize = 128; | |
609 // Pop from the back of the list, because that's the oldest and least likely | |
610 // to be useful in the future data. | |
611 if (input_buffer_data_.size() > kMaxInputBufferDataSize) | |
612 input_buffer_data_.pop_back(); | |
613 } | |
614 | |
615 void RTCVideoDecoder::GetBufferData(int32 bitstream_buffer_id, | |
616 uint32_t* timestamp, | |
617 uint32_t* width, | |
618 uint32_t* height, | |
619 size_t* size) { | |
620 for (std::list<BufferData>::iterator it = input_buffer_data_.begin(); | |
621 it != input_buffer_data_.end(); | |
622 ++it) { | |
623 if (it->bitstream_buffer_id != bitstream_buffer_id) | |
624 continue; | |
625 *timestamp = it->timestamp; | |
626 *width = it->width; | |
627 *height = it->height; | |
628 return; | |
629 } | |
630 NOTREACHED() << "Missing bitstream buffer id: " << bitstream_buffer_id; | |
631 } | |
632 | |
633 } // namespace content | |
OLD | NEW |