OLD | NEW |
---|---|
(Empty) | |
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/renderer/media/rtc_video_decoder.h" | |
6 | |
7 #include "base/bind.h" | |
8 #include "base/callback_helpers.h" | |
9 #include "base/logging.h" | |
10 #include "base/memory/ref_counted.h" | |
11 #include "base/message_loop_proxy.h" | |
12 #include "base/safe_numerics.h" | |
13 #include "base/task_runner_util.h" | |
14 #include "content/renderer/media/native_handle_impl.h" | |
15 #include "media/base/bind_to_loop.h" | |
16 #include "third_party/webrtc/system_wrappers/interface/ref_count.h" | |
17 | |
18 namespace content { | |
19 | |
20 // A shared memory segment and its allocated size. |shm| is unowned and the | |
21 // users should delete it. | |
22 struct RTCVideoDecoder::SHMBuffer { | |
23 SHMBuffer(base::SharedMemory* shm, size_t size); | |
24 ~SHMBuffer(); | |
25 base::SharedMemory* const shm; | |
26 const size_t size; | |
27 }; | |
28 | |
29 RTCVideoDecoder::SHMBuffer::SHMBuffer(base::SharedMemory* shm, size_t size) | |
30 : shm(shm), size(size) {} | |
31 | |
32 RTCVideoDecoder::SHMBuffer::~SHMBuffer() {} | |
33 | |
34 // Metadata of a bitstream buffer. | |
35 struct RTCVideoDecoder::BufferData { | |
36 BufferData(int32 bitstream_buffer_id, | |
37 uint32_t timestamp, | |
38 int width, | |
39 int height, | |
40 size_t sisze); | |
41 ~BufferData(); | |
42 int32 bitstream_buffer_id; | |
43 uint32_t timestamp; // in 90KHz | |
44 uint32_t width; | |
45 uint32_t height; | |
46 size_t size; // buffer size | |
47 }; | |
48 | |
49 RTCVideoDecoder::BufferData::BufferData(int32 bitstream_buffer_id, | |
50 uint32_t timestamp, | |
51 int width, | |
52 int height, | |
53 size_t size) | |
54 : bitstream_buffer_id(bitstream_buffer_id), | |
55 timestamp(timestamp), | |
56 width(width), | |
57 height(height), | |
58 size(size) {} | |
59 | |
60 RTCVideoDecoder::BufferData::~BufferData() {} | |
61 | |
62 RTCVideoDecoder::RTCVideoDecoder( | |
63 const scoped_refptr<media::GpuVideoDecoder::Factories>& factories) | |
64 : state_(UNINITIALIZED), | |
65 decode_complete_callback_(NULL), | |
66 weak_factory_(this), | |
67 factories_(factories), | |
68 vda_loop_proxy_(factories_->GetMessageLoop()), | |
69 decoder_texture_target_(0), | |
70 next_picture_buffer_id_(0), | |
71 next_bitstream_buffer_id_(0) { | |
72 // Initialize directly if |vda_loop_proxy_| is the renderer thread. | |
73 base::WaitableEvent compositor_loop_async_waiter(false, false); | |
74 if (vda_loop_proxy_->BelongsToCurrentThread()) { | |
75 Initialize(&compositor_loop_async_waiter); | |
76 return; | |
77 } | |
78 // Post the task if |vda_loop_proxy_| is the compositor thread. Waiting here | |
79 // is safe because the compositor thread will not be stopped until the | |
80 // renderer thread shuts down. | |
81 vda_loop_proxy_->PostTask( | |
82 FROM_HERE, | |
83 base::Bind(&RTCVideoDecoder::Initialize, | |
84 base::Unretained(this), | |
85 base::Unretained(&compositor_loop_async_waiter))); | |
Ami GONE FROM CHROMIUM
2013/06/19 18:28:58
base::Unretained is unnecessary here, no?
wuchengli
2013/06/20 07:27:04
Interesting. I thought it would have a compile err
| |
86 compositor_loop_async_waiter.Wait(); | |
87 } | |
88 | |
89 RTCVideoDecoder::~RTCVideoDecoder() { | |
90 DVLOG(2) << "~RTCVideoDecoder"; | |
91 // Delete vda and remove |this| from the observer if vda thread is alive. | |
92 if (vda_loop_proxy_->BelongsToCurrentThread()) { | |
Ami GONE FROM CHROMIUM
2013/06/19 18:28:58
When can this be false?
More importantly, if it is
wuchengli
2013/06/20 07:27:04
It can be false when the vda thread stops before R
| |
93 base::MessageLoop::current()->RemoveDestructionObserver(this); | |
94 DestroyVDA(); | |
95 } | |
96 | |
97 // Delete all shared memories. | |
98 for (size_t i = 0; i < available_shm_segments_.size(); ++i) { | |
99 available_shm_segments_[i]->shm->Close(); | |
100 delete available_shm_segments_[i]; | |
101 } | |
102 available_shm_segments_.clear(); | |
103 for (std::map<int32, SHMBuffer*>::iterator it = | |
104 bitstream_buffers_in_decoder_.begin(); | |
105 it != bitstream_buffers_in_decoder_.end(); | |
106 ++it) { | |
107 it->second->shm->Close(); | |
108 delete it->second; | |
109 } | |
110 bitstream_buffers_in_decoder_.clear(); | |
111 ClearBufferQueue(&buffers_to_be_decoded_); | |
112 ClearBufferQueue(&buffers_delayed_); | |
113 } | |
114 | |
115 scoped_ptr<RTCVideoDecoder> RTCVideoDecoder::Create( | |
116 const scoped_refptr<media::GpuVideoDecoder::Factories>& factories) { | |
117 scoped_ptr<RTCVideoDecoder> decoder(new RTCVideoDecoder(factories)); | |
118 if (!decoder->InitVideoDecodeAccelerator()) { | |
119 // VP8 is unsupported. Release the decoder. | |
120 factories->GetMessageLoop()->DeleteSoon(FROM_HERE, decoder.release()); | |
121 } | |
122 return decoder.Pass(); | |
123 } | |
124 | |
125 int32_t RTCVideoDecoder::InitDecode(const webrtc::VideoCodec* codecSettings, | |
126 int32_t /*numberOfCores*/) { | |
127 DVLOG(2) << "InitDecode"; | |
128 DCHECK_EQ(codecSettings->codecType, webrtc::kVideoCodecVP8); | |
129 if (codecSettings->codecSpecific.VP8.feedbackModeOn) { | |
130 LOG(ERROR) << "Feedback mode not supported"; | |
131 return WEBRTC_VIDEO_CODEC_ERROR; | |
132 } | |
133 | |
134 base::AutoLock auto_lock(lock_); | |
135 if (state_ == UNINITIALIZED || state_ == DECODE_ERROR) { | |
136 LOG(ERROR) << "VDA is not initialized. state=" << state_; | |
137 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
138 } | |
139 return WEBRTC_VIDEO_CODEC_OK; | |
140 } | |
141 | |
142 int32_t RTCVideoDecoder::Decode( | |
143 const webrtc::EncodedImage& inputImage, | |
144 bool missingFrames, | |
145 const webrtc::RTPFragmentationHeader* /*fragmentation*/, | |
146 const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/, | |
147 int64_t /*renderTimeMs*/) { | |
148 DVLOG(3) << "Decode"; | |
149 | |
150 { | |
151 base::AutoLock auto_lock(lock_); | |
152 if (state_ == UNINITIALIZED || decode_complete_callback_ == NULL) { | |
153 LOG(ERROR) << "The decoder has not initialized."; | |
154 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
155 } | |
156 if (state_ == DECODE_ERROR) { | |
157 LOG(ERROR) << "Decoding error occurred."; | |
158 return WEBRTC_VIDEO_CODEC_ERROR; | |
159 } | |
160 } | |
161 if (missingFrames || !inputImage._completeFrame) { | |
162 DLOG(ERROR) << "Missing or incomplete frames."; | |
163 // Unlike the SW decoder in libvpx, hw decoder cannot handle broken frames. | |
164 // Return an error to request a key frame. | |
165 return WEBRTC_VIDEO_CODEC_ERROR; | |
166 } | |
167 | |
168 if (inputImage._frameType == webrtc::kKeyFrame) | |
169 frame_size_.SetSize(inputImage._encodedWidth, inputImage._encodedHeight); | |
170 | |
171 // Copy WebRTC buffer to SHM buffer and create buffer metadata. | |
172 SHMBuffer* shm_buffer = GetSHM(inputImage._length); | |
173 if (!shm_buffer) | |
174 return WEBRTC_VIDEO_CODEC_ERROR; | |
175 memcpy(shm_buffer->shm->memory(), inputImage._buffer, inputImage._length); | |
176 BufferData buffer_data(next_bitstream_buffer_id_, | |
177 inputImage._timeStamp, | |
178 frame_size_.width(), | |
179 frame_size_.height(), | |
180 inputImage._length); | |
181 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer. | |
182 next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & 0x3FFFFFFF; | |
183 std::pair<SHMBuffer*, BufferData> buffer_pair = | |
184 std::make_pair(shm_buffer, buffer_data); | |
185 | |
186 // Store the buffer and the metadata to the queue. | |
187 base::AutoLock auto_lock(lock_); | |
188 if (state_ == RESETTING) { | |
189 // If VDA is resetting, save the buffer but do not request decode. | |
190 buffers_delayed_.push_back(buffer_pair); | |
191 } else { | |
192 buffers_to_be_decoded_.push_back(buffer_pair); | |
193 vda_loop_proxy_->PostTask( | |
194 FROM_HERE, | |
195 base::Bind(&RTCVideoDecoder::RequestBufferDecode, weak_this_)); | |
196 } | |
197 return WEBRTC_VIDEO_CODEC_OK; | |
198 } | |
199 | |
200 int32_t RTCVideoDecoder::RegisterDecodeCompleteCallback( | |
201 webrtc::DecodedImageCallback* callback) { | |
202 base::AutoLock auto_lock(lock_); | |
203 decode_complete_callback_ = callback; | |
204 return WEBRTC_VIDEO_CODEC_OK; | |
205 } | |
206 | |
207 int32_t RTCVideoDecoder::Release() { | |
208 DVLOG(2) << "Release"; | |
209 // Do not destroy VDA because the decoder will be recycled by | |
210 // RTCVideoDecoderFactory. Just reset VDA. | |
211 return Reset(); | |
212 } | |
213 | |
214 int32_t RTCVideoDecoder::Reset() { | |
215 DVLOG(2) << "Reset"; | |
216 base::AutoLock auto_lock(lock_); | |
217 if (state_ == UNINITIALIZED) { | |
218 LOG(ERROR) << "Decoder not initialized."; | |
219 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
220 } else if (state_ == RESETTING) { | |
221 // If VDA is already resetting, empty the pending buffer queue. No need to | |
222 // request the reset again. | |
223 buffers_to_be_decoded_.insert(buffers_to_be_decoded_.end(), | |
224 buffers_delayed_.begin(), | |
225 buffers_delayed_.end()); | |
226 buffers_delayed_.clear(); | |
227 } else { | |
228 state_ = RESETTING; | |
229 vda_loop_proxy_->PostTask( | |
230 FROM_HERE, base::Bind(&RTCVideoDecoder::ResetInternal, weak_this_)); | |
231 } | |
232 return WEBRTC_VIDEO_CODEC_OK; | |
233 } | |
234 | |
235 void RTCVideoDecoder::NotifyInitializeDone() { | |
236 DVLOG(2) << "NotifyInitializeDone"; | |
237 NOTREACHED(); | |
238 } | |
239 | |
240 void RTCVideoDecoder::ProvidePictureBuffers(uint32 count, | |
241 const gfx::Size& size, | |
242 uint32 texture_target) { | |
243 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
244 DVLOG(3) << "ProvidePictureBuffers. texture_target=" << texture_target; | |
245 | |
246 if (!vda_) | |
247 return; | |
248 | |
249 std::vector<uint32> texture_ids; | |
250 decoder_texture_target_ = texture_target; | |
251 if (!factories_->CreateTextures( | |
252 count, size, &texture_ids, decoder_texture_target_)) { | |
253 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); | |
254 return; | |
255 } | |
256 DCHECK_EQ(count, texture_ids.size()); | |
257 | |
258 std::vector<media::PictureBuffer> picture_buffers; | |
259 for (size_t i = 0; i < texture_ids.size(); ++i) { | |
260 picture_buffers.push_back( | |
261 media::PictureBuffer(next_picture_buffer_id_++, size, texture_ids[i])); | |
262 bool inserted = assigned_picture_buffers_.insert(std::make_pair( | |
263 picture_buffers.back().id(), picture_buffers.back())).second; | |
264 DCHECK(inserted); | |
265 } | |
266 vda_->AssignPictureBuffers(picture_buffers); | |
267 } | |
268 | |
269 void RTCVideoDecoder::DismissPictureBuffer(int32 id) { | |
270 DVLOG(3) << "DismissPictureBuffer. id=" << id; | |
271 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
272 | |
273 std::map<int32, media::PictureBuffer>::iterator it = | |
274 assigned_picture_buffers_.find(id); | |
275 if (it == assigned_picture_buffers_.end()) { | |
276 NOTREACHED() << "Missing picture buffer: " << id; | |
277 return; | |
278 } | |
279 | |
280 media::PictureBuffer buffer_to_dismiss = it->second; | |
281 assigned_picture_buffers_.erase(it); | |
282 | |
283 std::set<int32>::iterator at_display_it = | |
284 picture_buffers_at_display_.find(id); | |
285 | |
286 if (at_display_it == picture_buffers_at_display_.end()) { | |
287 // We can delete the texture immediately as it's not being displayed. | |
288 factories_->DeleteTexture(buffer_to_dismiss.texture_id()); | |
289 } else { | |
290 // Texture in display. Postpone deletion until after it's returned to us. | |
291 bool inserted = dismissed_picture_buffers_ | |
292 .insert(std::make_pair(id, buffer_to_dismiss)).second; | |
293 DCHECK(inserted); | |
294 } | |
295 } | |
296 | |
297 void RTCVideoDecoder::PictureReady(const media::Picture& picture) { | |
298 DVLOG(3) << "PictureReady"; | |
299 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
300 | |
301 std::map<int32, media::PictureBuffer>::iterator it = | |
302 assigned_picture_buffers_.find(picture.picture_buffer_id()); | |
303 if (it == assigned_picture_buffers_.end()) { | |
304 NOTREACHED() << "Missing picture buffer: " << picture.picture_buffer_id(); | |
305 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); | |
306 return; | |
307 } | |
308 const media::PictureBuffer& pb = it->second; | |
309 | |
310 // Create a media::VideoFrame. | |
311 uint32_t timestamp = 0; | |
312 uint32_t width = 0, height = 0; | |
313 size_t size = 0; | |
314 GetBufferData( | |
315 picture.bitstream_buffer_id(), ×tamp, &width, &height, &size); | |
316 gfx::Rect visible_rect(width, height); | |
317 gfx::Size natural_size(width, height); | |
318 DCHECK(decoder_texture_target_); | |
319 // Convert timestamp from 90KHz to ms. | |
320 base::TimeDelta timestamp_ms = base::TimeDelta::FromInternalValue( | |
321 base::checked_numeric_cast<uint64_t>(timestamp) * 1000 / 90); | |
322 scoped_refptr<media::VideoFrame> frame(media::VideoFrame::WrapNativeTexture( | |
323 pb.texture_id(), | |
324 decoder_texture_target_, | |
325 pb.size(), | |
326 visible_rect, | |
327 natural_size, | |
328 timestamp_ms, | |
329 base::Bind(&media::GpuVideoDecoder::Factories::ReadPixels, | |
330 factories_, | |
331 pb.texture_id(), | |
332 decoder_texture_target_, | |
333 natural_size), | |
334 media::BindToCurrentLoop(base::Bind(&RTCVideoDecoder::ReusePictureBuffer, | |
335 weak_this_, | |
336 picture.picture_buffer_id())))); | |
337 bool inserted = | |
338 picture_buffers_at_display_.insert(picture.picture_buffer_id()).second; | |
339 DCHECK(inserted); | |
340 | |
341 // Create a webrtc::I420VideoFrame. | |
342 webrtc::I420VideoFrame decoded_image; | |
343 // TODO(wuchengli): remove the malloc. | |
344 decoded_image.CreateEmptyFrame(width, height, width, height / 2, width / 2); | |
345 webrtc::RefCountImpl<NativeHandleImpl>* handle = | |
346 new webrtc::RefCountImpl<NativeHandleImpl>(); | |
347 handle->SetHandle(frame.get()); | |
348 decoded_image.set_native_handle(handle); | |
349 decoded_image.set_timestamp(timestamp); | |
350 | |
351 // Send to decode callback. | |
352 webrtc::DecodedImageCallback* callback; | |
353 { | |
354 base::AutoLock auto_lock(lock_); | |
355 callback = decode_complete_callback_; | |
356 } | |
357 DCHECK(callback != NULL); | |
358 callback->Decoded(decoded_image); | |
359 } | |
360 | |
361 void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32 id) { | |
362 DVLOG(3) << "NotifyEndOfBitstreamBuffer. id=" << id; | |
363 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
364 | |
365 std::map<int32, SHMBuffer*>::iterator it = | |
366 bitstream_buffers_in_decoder_.find(id); | |
367 if (it == bitstream_buffers_in_decoder_.end()) { | |
368 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); | |
369 NOTREACHED() << "Missing bitstream buffer: " << id; | |
370 return; | |
371 } | |
372 | |
373 PutSHM(it->second); | |
374 bitstream_buffers_in_decoder_.erase(it); | |
375 | |
376 RequestBufferDecode(); | |
377 } | |
378 | |
379 void RTCVideoDecoder::NotifyFlushDone() { | |
380 DVLOG(3) << "NotifyFlushDone"; | |
381 NOTREACHED() << "Unexpected flush done notification."; | |
382 } | |
383 | |
384 void RTCVideoDecoder::NotifyResetDone() { | |
385 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
386 DVLOG(3) << "NotifyResetDone"; | |
387 | |
388 if (!vda_) | |
389 return; | |
390 | |
391 input_buffer_data_.clear(); | |
392 int num_buffers = 0; | |
393 { | |
394 base::AutoLock auto_lock(lock_); | |
395 // Clear and recycle the old buffers. | |
396 for (std::deque<std::pair<SHMBuffer*, BufferData> >::const_iterator it = | |
397 buffers_to_be_decoded_.begin(); | |
398 it != buffers_to_be_decoded_.end(); | |
399 it++) { | |
400 PutSHM(it->first); | |
401 } | |
402 buffers_to_be_decoded_.clear(); | |
403 | |
404 std::swap(buffers_to_be_decoded_, buffers_delayed_); | |
405 num_buffers = buffers_to_be_decoded_.size(); | |
406 state_ = INITIALIZED; | |
407 } | |
408 // Send the pending buffers for decoding. | |
409 for (int i = 0; i < num_buffers; i++) | |
410 RequestBufferDecode(); | |
411 } | |
412 | |
413 void RTCVideoDecoder::NotifyError(media::VideoDecodeAccelerator::Error error) { | |
414 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
415 if (!vda_) | |
416 return; | |
417 | |
418 DLOG(ERROR) << "VDA Error:" << error; | |
419 DestroyVDA(); | |
420 | |
421 base::AutoLock auto_lock(lock_); | |
422 state_ = DECODE_ERROR; | |
423 } | |
424 | |
425 void RTCVideoDecoder::WillDestroyCurrentMessageLoop() { | |
426 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
427 factories_->Abort(); | |
428 weak_this_.reset(); | |
Ami GONE FROM CHROMIUM
2013/06/19 18:28:58
Did you mean to weak_factory_.InvalidatePtrs() ins
wuchengli
2013/06/20 07:27:04
Done. weak_this_.reset() also invalidate the weak
| |
429 DestroyVDA(); | |
430 } | |
431 | |
432 bool RTCVideoDecoder::InitVideoDecodeAccelerator() { | |
433 vda_.reset( | |
434 factories_->CreateVideoDecodeAccelerator(media::VP8PROFILE_MAIN, this)); | |
435 | |
436 // vda can be NULL if the codec type is not supported. | |
437 if (vda_ != NULL) { | |
438 base::AutoLock auto_lock(lock_); | |
Ami GONE FROM CHROMIUM
2013/06/19 18:28:58
If you inlined this into Create it would be more o
wuchengli
2013/06/20 07:27:04
I inlined this into Create. Does it look OK?
| |
439 state_ = INITIALIZED; | |
440 return true; | |
Ami GONE FROM CHROMIUM
2013/06/19 18:28:58
return value is unnecessary since state_ tells whe
wuchengli
2013/06/20 07:27:04
Done.
| |
441 } | |
442 return false; | |
443 } | |
444 | |
445 void RTCVideoDecoder::Initialize(base::WaitableEvent* waiter) { | |
446 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
447 base::MessageLoop::current()->AddDestructionObserver(this); | |
448 weak_this_ = weak_factory_.GetWeakPtr(); | |
449 waiter->Signal(); | |
450 } | |
451 | |
452 void RTCVideoDecoder::RequestBufferDecode() { | |
453 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
454 if (!CanMoreDecodeWorkBeDone() || !vda_) | |
455 return; | |
456 | |
457 // Get a buffer and data from the queue. | |
458 std::pair<SHMBuffer*, BufferData>* buffer_pair; | |
459 { | |
460 base::AutoLock auto_lock(lock_); | |
461 if (buffers_to_be_decoded_.size() == 0 || state_ == RESETTING) | |
462 return; | |
463 buffer_pair = &buffers_to_be_decoded_.front(); | |
464 buffers_to_be_decoded_.pop_front(); | |
465 } | |
466 SHMBuffer* shm_buffer = buffer_pair->first; | |
467 BufferData buffer_data = buffer_pair->second; | |
468 | |
469 // Create a BitstreamBuffer and send to VDA to decode. | |
470 media::BitstreamBuffer bitstream_buffer(buffer_data.bitstream_buffer_id, | |
471 shm_buffer->shm->handle(), | |
472 buffer_data.size); | |
473 bool inserted = bitstream_buffers_in_decoder_ | |
474 .insert(std::make_pair(bitstream_buffer.id(), shm_buffer)).second; | |
475 DCHECK(inserted); | |
476 RecordBufferData(buffer_data); | |
477 vda_->Decode(bitstream_buffer); | |
478 } | |
479 | |
480 // Maximum number of concurrent VDA::Decode() operations RVD will maintain. | |
481 // Higher values allow better pipelining in the GPU, but also require more | |
482 // resources. | |
483 enum { | |
484 kMaxInFlightDecodes = 8 | |
485 }; | |
486 | |
487 bool RTCVideoDecoder::CanMoreDecodeWorkBeDone() { | |
488 return bitstream_buffers_in_decoder_.size() < kMaxInFlightDecodes; | |
489 } | |
490 | |
491 void RTCVideoDecoder::ResetInternal() { | |
492 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
493 DVLOG(2) << "ResetInternal"; | |
494 if (vda_) | |
495 vda_->Reset(); | |
496 } | |
497 | |
498 void RTCVideoDecoder::ReusePictureBuffer(int64 picture_buffer_id) { | |
499 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
500 DVLOG(3) << "ReusePictureBuffer. id=" << picture_buffer_id; | |
501 | |
502 if (!vda_) | |
503 return; | |
504 | |
505 CHECK(!picture_buffers_at_display_.empty()); | |
506 | |
507 size_t num_erased = picture_buffers_at_display_.erase(picture_buffer_id); | |
508 DCHECK(num_erased); | |
509 | |
510 std::map<int32, media::PictureBuffer>::iterator it = | |
511 assigned_picture_buffers_.find(picture_buffer_id); | |
512 | |
513 if (it == assigned_picture_buffers_.end()) { | |
514 // This picture was dismissed while in display, so we postponed deletion. | |
515 it = dismissed_picture_buffers_.find(picture_buffer_id); | |
516 DCHECK(it != dismissed_picture_buffers_.end()); | |
517 factories_->DeleteTexture(it->second.texture_id()); | |
518 dismissed_picture_buffers_.erase(it); | |
519 return; | |
520 } | |
521 | |
522 vda_->ReusePictureBuffer(picture_buffer_id); | |
523 } | |
524 | |
525 void RTCVideoDecoder::DestroyTextures() { | |
526 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
527 std::map<int32, media::PictureBuffer>::iterator it; | |
528 | |
529 for (it = assigned_picture_buffers_.begin(); | |
530 it != assigned_picture_buffers_.end(); | |
531 ++it) { | |
532 factories_->DeleteTexture(it->second.texture_id()); | |
533 } | |
534 assigned_picture_buffers_.clear(); | |
535 | |
536 for (it = dismissed_picture_buffers_.begin(); | |
537 it != dismissed_picture_buffers_.end(); | |
538 ++it) { | |
539 factories_->DeleteTexture(it->second.texture_id()); | |
540 } | |
541 dismissed_picture_buffers_.clear(); | |
542 } | |
543 | |
544 void RTCVideoDecoder::DestroyVDA() { | |
545 DVLOG(2) << "DestroyVDA"; | |
546 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
547 if (vda_) | |
548 vda_.release()->Destroy(); | |
549 DestroyTextures(); | |
550 } | |
551 | |
552 void RTCVideoDecoder::ClearBufferQueue( | |
553 std::deque<std::pair<SHMBuffer*, BufferData> >* queue) { | |
554 for (std::deque<std::pair<SHMBuffer*, BufferData> >::iterator it = | |
555 queue->begin(); | |
556 it != queue->end(); | |
557 ++it) { | |
558 it->first->shm->Close(); | |
559 delete it->first; | |
560 } | |
561 queue->clear(); | |
562 } | |
563 | |
564 // Size of shared-memory segments we allocate. Since we reuse them we let them | |
565 // be on the beefy side. | |
566 static const size_t kSharedMemorySegmentBytes = 100 << 10; | |
567 | |
568 RTCVideoDecoder::SHMBuffer* RTCVideoDecoder::GetSHM(size_t min_size) { | |
569 { | |
570 // Reuse a SHM if possible. | |
571 base::AutoLock auto_lock(lock_); | |
572 if (!available_shm_segments_.empty() && | |
573 available_shm_segments_.back()->size >= min_size) { | |
574 SHMBuffer* ret = available_shm_segments_.back(); | |
575 available_shm_segments_.pop_back(); | |
576 return ret; | |
577 } | |
578 } | |
579 // Create a new shared memory. This is done in main thread. | |
580 size_t size_to_allocate = std::max(min_size, kSharedMemorySegmentBytes); | |
581 base::SharedMemory* shm = factories_->CreateSharedMemory(size_to_allocate); | |
582 if (!shm) | |
583 return NULL; | |
584 return new SHMBuffer(shm, size_to_allocate); | |
585 } | |
586 | |
587 void RTCVideoDecoder::PutSHM(SHMBuffer* shm_buffer) { | |
588 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
589 base::AutoLock auto_lock(lock_); | |
590 available_shm_segments_.push_back(shm_buffer); | |
591 } | |
592 | |
593 void RTCVideoDecoder::RecordBufferData(const BufferData& buffer_data) { | |
594 input_buffer_data_.push_front(buffer_data); | |
595 // Why this value? Because why not. avformat.h:MAX_REORDER_DELAY is 16, but | |
596 // that's too small for some pathological B-frame test videos. The cost of | |
597 // using too-high a value is low (192 bits per extra slot). | |
598 static const size_t kMaxInputBufferDataSize = 128; | |
599 // Pop from the back of the list, because that's the oldest and least likely | |
600 // to be useful in the future data. | |
601 if (input_buffer_data_.size() > kMaxInputBufferDataSize) | |
602 input_buffer_data_.pop_back(); | |
603 } | |
604 | |
605 void RTCVideoDecoder::GetBufferData(int32 bitstream_buffer_id, | |
606 uint32_t* timestamp, | |
607 uint32_t* width, | |
608 uint32_t* height, | |
609 size_t* size) { | |
610 for (std::list<BufferData>::iterator it = input_buffer_data_.begin(); | |
611 it != input_buffer_data_.end(); | |
612 ++it) { | |
613 if (it->bitstream_buffer_id != bitstream_buffer_id) | |
614 continue; | |
615 *timestamp = it->timestamp; | |
616 *width = it->width; | |
617 *height = it->height; | |
618 return; | |
619 } | |
620 NOTREACHED() << "Missing bitstream buffer id: " << bitstream_buffer_id; | |
621 } | |
622 | |
623 } // namespace content | |
OLD | NEW |