OLD | NEW |
---|---|
(Empty) | |
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/renderer/media/rtc_video_decoder.h" | |
6 | |
7 #include "base/bind.h" | |
8 #include "base/logging.h" | |
9 #include "base/memory/ref_counted.h" | |
10 #include "base/message_loop_proxy.h" | |
11 #include "base/safe_numerics.h" | |
12 #include "base/task_runner_util.h" | |
13 #include "content/child/child_thread.h" | |
14 #include "content/renderer/media/native_handle_impl.h" | |
15 #include "media/base/bind_to_loop.h" | |
16 #include "third_party/webrtc/system_wrappers/interface/ref_count.h" | |
17 | |
18 namespace content { | |
19 | |
20 // A shared memory segment and its allocated size. |shm| is unowned and the | |
21 // users should delete it. | |
22 struct RTCVideoDecoder::SHMBuffer { | |
23 SHMBuffer(base::SharedMemory* shm, size_t size); | |
24 ~SHMBuffer(); | |
25 base::SharedMemory* const shm; | |
26 const size_t size; | |
27 }; | |
28 | |
29 RTCVideoDecoder::SHMBuffer::SHMBuffer(base::SharedMemory* shm, size_t size) | |
30 : shm(shm), size(size) {} | |
31 | |
32 RTCVideoDecoder::SHMBuffer::~SHMBuffer() {} | |
33 | |
34 // Metadata of a bitstream buffer. | |
35 struct RTCVideoDecoder::BufferData { | |
36 BufferData(int32 bitstream_buffer_id, | |
37 uint32_t timestamp, | |
38 int width, | |
39 int height, | |
40 size_t sisze); | |
Ami GONE FROM CHROMIUM
2013/06/26 00:11:58
typo: sisze
wuchengli
2013/06/28 15:08:44
Done.
| |
41 ~BufferData(); | |
42 int32 bitstream_buffer_id; | |
43 uint32_t timestamp; // in 90KHz | |
44 uint32_t width; | |
45 uint32_t height; | |
46 size_t size; // buffer size | |
47 }; | |
48 | |
49 RTCVideoDecoder::BufferData::BufferData(int32 bitstream_buffer_id, | |
50 uint32_t timestamp, | |
51 int width, | |
52 int height, | |
53 size_t size) | |
54 : bitstream_buffer_id(bitstream_buffer_id), | |
55 timestamp(timestamp), | |
56 width(width), | |
57 height(height), | |
58 size(size) {} | |
59 | |
60 RTCVideoDecoder::BufferData::~BufferData() {} | |
61 | |
62 RTCVideoDecoder::RTCVideoDecoder( | |
63 const scoped_refptr<media::GpuVideoDecoder::Factories>& factories) | |
64 : state_(UNINITIALIZED), | |
65 decode_complete_callback_(NULL), | |
66 weak_factory_(this), | |
67 factories_(factories), | |
68 vda_loop_proxy_(factories_->GetMessageLoop()), | |
69 decoder_texture_target_(0), | |
70 next_picture_buffer_id_(0), | |
71 next_bitstream_buffer_id_(0), | |
72 reset_bitstream_buffer_id_(0) { | |
73 // Initialize directly if |vda_loop_proxy_| is the renderer thread. | |
74 base::WaitableEvent compositor_loop_async_waiter(false, false); | |
75 if (vda_loop_proxy_->BelongsToCurrentThread()) { | |
76 Initialize(&compositor_loop_async_waiter); | |
77 return; | |
78 } | |
79 // Post the task if |vda_loop_proxy_| is the compositor thread. Waiting here | |
80 // is safe because the compositor thread will not be stopped until the | |
81 // renderer thread shuts down. | |
82 vda_loop_proxy_->PostTask(FROM_HERE, | |
83 base::Bind(&RTCVideoDecoder::Initialize, | |
84 base::Unretained(this), | |
85 &compositor_loop_async_waiter)); | |
86 compositor_loop_async_waiter.Wait(); | |
87 } | |
88 | |
89 RTCVideoDecoder::~RTCVideoDecoder() { | |
90 DVLOG(2) << "~RTCVideoDecoder"; | |
91 // Delete vda and remove |this| from the observer if vda thread is alive. | |
92 if (vda_loop_proxy_->BelongsToCurrentThread()) { | |
93 base::MessageLoop::current()->RemoveDestructionObserver(this); | |
94 DestroyVDA(); | |
95 } else { | |
96 // VDA should have been destroyed in WillDestroyCurrentMessageLoop. | |
97 DCHECK(!vda_); | |
98 } | |
99 | |
100 // Delete all shared memories. | |
101 for (size_t i = 0; i < available_shm_segments_.size(); ++i) { | |
102 available_shm_segments_[i]->shm->Close(); | |
103 delete available_shm_segments_[i]; | |
104 } | |
105 available_shm_segments_.clear(); | |
106 for (std::map<int32, SHMBuffer*>::iterator it = | |
107 bitstream_buffers_in_decoder_.begin(); | |
108 it != bitstream_buffers_in_decoder_.end(); | |
109 ++it) { | |
110 it->second->shm->Close(); | |
111 delete it->second; | |
112 } | |
113 bitstream_buffers_in_decoder_.clear(); | |
114 for (std::deque<std::pair<SHMBuffer*, BufferData> >::iterator it = | |
115 buffers_to_be_decoded_.begin(); | |
116 it != buffers_to_be_decoded_.end(); | |
117 ++it) { | |
118 it->first->shm->Close(); | |
119 delete it->first; | |
120 } | |
121 buffers_to_be_decoded_.clear(); | |
122 | |
123 // Delete WebRTC input buffers. | |
124 for (std::deque<std::pair<webrtc::EncodedImage, BufferData> >::iterator it = | |
125 webrtc_buffers_.begin(); | |
126 it != webrtc_buffers_.end(); | |
127 ++it) { | |
128 delete it->first._buffer; | |
129 } | |
130 } | |
131 | |
132 scoped_ptr<RTCVideoDecoder> RTCVideoDecoder::Create( | |
133 const scoped_refptr<media::GpuVideoDecoder::Factories>& factories) { | |
134 scoped_ptr<RTCVideoDecoder> decoder(new RTCVideoDecoder(factories)); | |
135 decoder->vda_.reset(factories->CreateVideoDecodeAccelerator( | |
136 media::VP8PROFILE_MAIN, decoder.get())); | |
137 // vda can be NULL if VP8 is not supported. | |
138 if (decoder->vda_ != NULL) { | |
139 decoder->state_ = INITIALIZED; | |
140 } else { | |
141 factories->GetMessageLoop()->DeleteSoon(FROM_HERE, decoder.release()); | |
142 } | |
143 return decoder.Pass(); | |
144 } | |
145 | |
146 int32_t RTCVideoDecoder::InitDecode(const webrtc::VideoCodec* codecSettings, | |
147 int32_t /*numberOfCores*/) { | |
148 DVLOG(2) << "InitDecode"; | |
149 DCHECK_EQ(codecSettings->codecType, webrtc::kVideoCodecVP8); | |
150 if (codecSettings->codecSpecific.VP8.feedbackModeOn) { | |
151 LOG(ERROR) << "Feedback mode not supported"; | |
152 return WEBRTC_VIDEO_CODEC_ERROR; | |
153 } | |
154 | |
155 base::AutoLock auto_lock(lock_); | |
156 if (state_ == UNINITIALIZED || state_ == DECODE_ERROR) { | |
157 LOG(ERROR) << "VDA is not initialized. state=" << state_; | |
158 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
159 } | |
160 return WEBRTC_VIDEO_CODEC_OK; | |
161 } | |
162 | |
163 int32_t RTCVideoDecoder::Decode( | |
164 const webrtc::EncodedImage& inputImage, | |
165 bool missingFrames, | |
166 const webrtc::RTPFragmentationHeader* /*fragmentation*/, | |
167 const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/, | |
168 int64_t /*renderTimeMs*/) { | |
169 DVLOG(3) << "Decode"; | |
170 | |
171 int bitstream_buffer_id; | |
172 { | |
173 base::AutoLock auto_lock(lock_); | |
174 if (state_ == UNINITIALIZED || decode_complete_callback_ == NULL) { | |
175 LOG(ERROR) << "The decoder has not initialized."; | |
176 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
177 } | |
178 if (state_ == DECODE_ERROR) { | |
179 LOG(ERROR) << "Decoding error occurred."; | |
180 return WEBRTC_VIDEO_CODEC_ERROR; | |
181 } | |
182 bitstream_buffer_id = next_bitstream_buffer_id_; | |
183 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer. | |
184 next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & 0x3FFFFFFF; | |
185 } | |
186 if (missingFrames || !inputImage._completeFrame) { | |
187 DLOG(ERROR) << "Missing or incomplete frames."; | |
188 // Unlike the SW decoder in libvpx, hw decoder cannot handle broken frames. | |
189 // Return an error to request a key frame. | |
190 return WEBRTC_VIDEO_CODEC_ERROR; | |
191 } | |
192 if (inputImage._frameType == webrtc::kKeyFrame) | |
193 frame_size_.SetSize(inputImage._encodedWidth, inputImage._encodedHeight); | |
194 | |
195 // Create buffer metadata. | |
196 BufferData buffer_data(bitstream_buffer_id, | |
197 inputImage._timeStamp, | |
198 frame_size_.width(), | |
199 frame_size_.height(), | |
200 inputImage._length); | |
201 | |
202 SendPendingBuffersForDecode(); | |
Ami GONE FROM CHROMIUM
2013/06/26 00:11:58
This is a strange place to place this call. What'
wuchengli
2013/06/28 15:08:44
The buffers should be sent to VDA in order. If any
| |
203 | |
204 // If the shared memory is available and there are no pending buffers, send | |
205 // the buffer for decode. If not, save the buffer in the queue for decode | |
206 // later. | |
207 SHMBuffer* shm_buffer = NULL; | |
208 if (webrtc_buffers_.size() == 0) | |
209 shm_buffer = GetSHM(inputImage._length); | |
210 if (shm_buffer != NULL) | |
211 SendBufferForDecode(inputImage, shm_buffer, buffer_data); | |
212 else | |
213 SaveToPendingBuffers(inputImage, buffer_data); | |
214 | |
215 return WEBRTC_VIDEO_CODEC_OK; | |
216 } | |
217 | |
218 int32_t RTCVideoDecoder::RegisterDecodeCompleteCallback( | |
219 webrtc::DecodedImageCallback* callback) { | |
220 DVLOG(2) << "RegisterDecodeCompleteCallback"; | |
221 base::AutoLock auto_lock(lock_); | |
222 decode_complete_callback_ = callback; | |
223 return WEBRTC_VIDEO_CODEC_OK; | |
224 } | |
225 | |
226 int32_t RTCVideoDecoder::Release() { | |
227 DVLOG(2) << "Release"; | |
228 // Do not destroy VDA because the decoder will be recycled by | |
229 // RTCVideoDecoderFactory. Just reset VDA. | |
230 return Reset(); | |
231 } | |
232 | |
233 int32_t RTCVideoDecoder::Reset() { | |
234 DVLOG(2) << "Reset"; | |
235 base::AutoLock auto_lock(lock_); | |
236 if (state_ == UNINITIALIZED) { | |
237 LOG(ERROR) << "Decoder not initialized."; | |
238 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
239 } | |
240 reset_bitstream_buffer_id_ = next_bitstream_buffer_id_; | |
241 // If VDA is already resetting, no need to request the reset again. | |
242 if (state_ != RESETTING) { | |
243 state_ = RESETTING; | |
244 vda_loop_proxy_->PostTask( | |
245 FROM_HERE, base::Bind(&RTCVideoDecoder::ResetInternal, weak_this_)); | |
246 } | |
247 return WEBRTC_VIDEO_CODEC_OK; | |
248 } | |
249 | |
250 void RTCVideoDecoder::NotifyInitializeDone() { | |
251 DVLOG(2) << "NotifyInitializeDone"; | |
252 NOTREACHED(); | |
253 } | |
254 | |
255 void RTCVideoDecoder::ProvidePictureBuffers(uint32 count, | |
256 const gfx::Size& size, | |
257 uint32 texture_target) { | |
258 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
259 DVLOG(3) << "ProvidePictureBuffers. texture_target=" << texture_target; | |
260 | |
261 if (!vda_) | |
262 return; | |
263 | |
264 std::vector<uint32> texture_ids; | |
265 decoder_texture_target_ = texture_target; | |
266 if (!factories_->CreateTextures( | |
267 count, size, &texture_ids, decoder_texture_target_)) { | |
268 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); | |
269 return; | |
270 } | |
271 DCHECK_EQ(count, texture_ids.size()); | |
272 | |
273 std::vector<media::PictureBuffer> picture_buffers; | |
274 for (size_t i = 0; i < texture_ids.size(); ++i) { | |
275 picture_buffers.push_back( | |
276 media::PictureBuffer(next_picture_buffer_id_++, size, texture_ids[i])); | |
277 bool inserted = assigned_picture_buffers_.insert(std::make_pair( | |
278 picture_buffers.back().id(), picture_buffers.back())).second; | |
279 DCHECK(inserted); | |
280 } | |
281 vda_->AssignPictureBuffers(picture_buffers); | |
282 } | |
283 | |
284 void RTCVideoDecoder::DismissPictureBuffer(int32 id) { | |
285 DVLOG(3) << "DismissPictureBuffer. id=" << id; | |
286 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
287 | |
288 std::map<int32, media::PictureBuffer>::iterator it = | |
289 assigned_picture_buffers_.find(id); | |
290 if (it == assigned_picture_buffers_.end()) { | |
291 NOTREACHED() << "Missing picture buffer: " << id; | |
292 return; | |
293 } | |
294 | |
295 media::PictureBuffer buffer_to_dismiss = it->second; | |
296 assigned_picture_buffers_.erase(it); | |
297 | |
298 std::set<int32>::iterator at_display_it = | |
299 picture_buffers_at_display_.find(id); | |
300 | |
301 if (at_display_it == picture_buffers_at_display_.end()) { | |
302 // We can delete the texture immediately as it's not being displayed. | |
303 factories_->DeleteTexture(buffer_to_dismiss.texture_id()); | |
304 } else { | |
305 // Texture in display. Postpone deletion until after it's returned to us. | |
306 bool inserted = dismissed_picture_buffers_ | |
307 .insert(std::make_pair(id, buffer_to_dismiss)).second; | |
308 DCHECK(inserted); | |
309 } | |
310 } | |
311 | |
312 void RTCVideoDecoder::PictureReady(const media::Picture& picture) { | |
313 DVLOG(3) << "PictureReady"; | |
314 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
315 | |
316 std::map<int32, media::PictureBuffer>::iterator it = | |
317 assigned_picture_buffers_.find(picture.picture_buffer_id()); | |
318 if (it == assigned_picture_buffers_.end()) { | |
319 NOTREACHED() << "Missing picture buffer: " << picture.picture_buffer_id(); | |
320 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); | |
321 return; | |
322 } | |
323 const media::PictureBuffer& pb = it->second; | |
324 | |
325 // Create a media::VideoFrame. | |
326 uint32_t timestamp = 0, width = 0, height = 0; | |
327 size_t size = 0; | |
328 GetBufferData( | |
329 picture.bitstream_buffer_id(), ×tamp, &width, &height, &size); | |
330 scoped_refptr<media::VideoFrame> frame = | |
331 CreateVideoFrame(picture, pb, timestamp, width, height, size); | |
332 bool inserted = | |
333 picture_buffers_at_display_.insert(picture.picture_buffer_id()).second; | |
334 DCHECK(inserted); | |
335 { | |
336 // WebRTC expects no frame callback after Release. Drop the frame if VDA is | |
Ami GONE FROM CHROMIUM
2013/06/26 00:11:58
Given the frame callback is run on a thread other
wuchengli
2013/06/28 15:08:44
You are right. I need to hold the lock while calli
| |
337 // resetting. | |
338 base::AutoLock auto_lock(lock_); | |
339 if (state_ == RESETTING) | |
340 return; | |
341 } | |
342 | |
343 // Create a webrtc::I420VideoFrame. | |
344 webrtc::I420VideoFrame decoded_image; | |
345 // TODO(wuchengli): remove the malloc. | |
346 decoded_image.CreateEmptyFrame(width, height, width, height / 2, width / 2); | |
347 webrtc::RefCountImpl<NativeHandleImpl>* handle = | |
348 new webrtc::RefCountImpl<NativeHandleImpl>(); | |
349 handle->SetHandle(frame.get()); | |
350 decoded_image.set_native_handle(handle); | |
351 decoded_image.set_timestamp(timestamp); | |
352 | |
353 // Send to decode callback. | |
354 webrtc::DecodedImageCallback* callback; | |
355 { | |
356 base::AutoLock auto_lock(lock_); | |
357 callback = decode_complete_callback_; | |
358 } | |
359 DCHECK(callback != NULL); | |
360 callback->Decoded(decoded_image); | |
361 } | |
362 | |
363 scoped_refptr<media::VideoFrame> RTCVideoDecoder::CreateVideoFrame( | |
364 const media::Picture& picture, | |
365 const media::PictureBuffer& pb, | |
366 uint32_t timestamp, | |
367 uint32_t width, | |
368 uint32_t height, | |
369 size_t size) { | |
370 gfx::Rect visible_rect(width, height); | |
371 gfx::Size natural_size(width, height); | |
372 DCHECK(decoder_texture_target_); | |
373 // Convert timestamp from 90KHz to ms. | |
374 base::TimeDelta timestamp_ms = base::TimeDelta::FromInternalValue( | |
375 base::checked_numeric_cast<uint64_t>(timestamp) * 1000 / 90); | |
376 return media::VideoFrame::WrapNativeTexture( | |
377 pb.texture_id(), | |
378 decoder_texture_target_, | |
379 pb.size(), | |
380 visible_rect, | |
381 natural_size, | |
382 timestamp_ms, | |
383 base::Bind(&media::GpuVideoDecoder::Factories::ReadPixels, | |
384 factories_, | |
385 pb.texture_id(), | |
386 decoder_texture_target_, | |
387 natural_size), | |
388 media::BindToCurrentLoop(base::Bind(&RTCVideoDecoder::ReusePictureBuffer, | |
389 weak_this_, | |
390 picture.picture_buffer_id()))); | |
391 } | |
392 | |
393 void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32 id) { | |
394 DVLOG(3) << "NotifyEndOfBitstreamBuffer. id=" << id; | |
395 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
396 | |
397 std::map<int32, SHMBuffer*>::iterator it = | |
398 bitstream_buffers_in_decoder_.find(id); | |
399 if (it == bitstream_buffers_in_decoder_.end()) { | |
400 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); | |
401 NOTREACHED() << "Missing bitstream buffer: " << id; | |
402 return; | |
403 } | |
404 | |
405 PutSHM(it->second); | |
406 bitstream_buffers_in_decoder_.erase(it); | |
407 | |
408 RequestBufferDecode(); | |
409 } | |
410 | |
411 void RTCVideoDecoder::NotifyFlushDone() { | |
412 DVLOG(3) << "NotifyFlushDone"; | |
413 NOTREACHED() << "Unexpected flush done notification."; | |
414 } | |
415 | |
416 void RTCVideoDecoder::NotifyResetDone() { | |
417 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
418 DVLOG(3) << "NotifyResetDone"; | |
419 | |
420 if (!vda_) | |
421 return; | |
422 | |
423 input_buffer_data_.clear(); | |
424 { | |
425 base::AutoLock auto_lock(lock_); | |
426 state_ = INITIALIZED; | |
427 } | |
428 // Send the pending buffers for decoding. | |
429 RequestBufferDecode(); | |
430 } | |
431 | |
432 void RTCVideoDecoder::NotifyError(media::VideoDecodeAccelerator::Error error) { | |
433 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
434 if (!vda_) | |
435 return; | |
436 | |
437 DLOG(ERROR) << "VDA Error:" << error; | |
438 DestroyVDA(); | |
439 | |
440 base::AutoLock auto_lock(lock_); | |
441 state_ = DECODE_ERROR; | |
442 } | |
443 | |
444 void RTCVideoDecoder::WillDestroyCurrentMessageLoop() { | |
445 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
446 factories_->Abort(); | |
447 weak_factory_.InvalidateWeakPtrs(); | |
448 DestroyVDA(); | |
449 } | |
450 | |
451 void RTCVideoDecoder::Initialize(base::WaitableEvent* waiter) { | |
452 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
453 base::MessageLoop::current()->AddDestructionObserver(this); | |
454 weak_this_ = weak_factory_.GetWeakPtr(); | |
455 waiter->Signal(); | |
456 } | |
457 | |
458 void RTCVideoDecoder::RequestBufferDecode() { | |
459 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
460 if (!vda_) | |
461 return; | |
462 | |
463 while (CanMoreDecodeWorkBeDone()) { | |
464 // Get a buffer and data from the queue. | |
465 std::pair<SHMBuffer*, BufferData>* buffer_pair; | |
466 SHMBuffer* shm_buffer = NULL; | |
467 BufferData* buffer_data = NULL; | |
468 { | |
469 base::AutoLock auto_lock(lock_); | |
470 // Do not request decode if VDA is resetting. | |
471 if (buffers_to_be_decoded_.size() == 0 || state_ == RESETTING) | |
472 return; | |
473 buffer_pair = &buffers_to_be_decoded_.front(); | |
474 buffers_to_be_decoded_.pop_front(); | |
475 shm_buffer = buffer_pair->first; | |
476 buffer_data = &buffer_pair->second; | |
477 // Drop the buffers before Reset. | |
478 if (buffer_data->bitstream_buffer_id < reset_bitstream_buffer_id_) { | |
Ami GONE FROM CHROMIUM
2013/06/26 00:11:58
This doesn't handle the wraparound you have at 30b
wuchengli
2013/06/28 15:08:44
Done. Good catch...
| |
479 available_shm_segments_.push_back(shm_buffer); | |
480 continue; | |
481 } | |
482 } | |
483 | |
484 // Create a BitstreamBuffer and send to VDA to decode. | |
485 media::BitstreamBuffer bitstream_buffer(buffer_data->bitstream_buffer_id, | |
486 shm_buffer->shm->handle(), | |
487 buffer_data->size); | |
488 bool inserted = bitstream_buffers_in_decoder_ | |
489 .insert(std::make_pair(bitstream_buffer.id(), shm_buffer)).second; | |
490 DCHECK(inserted); | |
491 RecordBufferData(*buffer_data); | |
492 vda_->Decode(bitstream_buffer); | |
493 } | |
494 } | |
495 | |
496 // Maximum number of concurrent VDA::Decode() operations RVD will maintain. | |
497 // Higher values allow better pipelining in the GPU, but also require more | |
498 // resources. | |
499 enum { | |
500 kMaxInFlightDecodes = 8 | |
501 }; | |
502 | |
503 bool RTCVideoDecoder::CanMoreDecodeWorkBeDone() { | |
504 return bitstream_buffers_in_decoder_.size() < kMaxInFlightDecodes; | |
505 } | |
506 | |
507 void RTCVideoDecoder::SendBufferForDecode( | |
508 const webrtc::EncodedImage& inputImage, | |
509 SHMBuffer* shm_buffer, | |
510 const BufferData& buffer_data) { | |
511 memcpy(shm_buffer->shm->memory(), inputImage._buffer, inputImage._length); | |
512 std::pair<SHMBuffer*, BufferData> buffer_pair = | |
513 std::make_pair(shm_buffer, buffer_data); | |
514 | |
515 // Store the buffer and the metadata to the queue. | |
516 { | |
517 base::AutoLock auto_lock(lock_); | |
518 buffers_to_be_decoded_.push_back(buffer_pair); | |
519 } | |
520 vda_loop_proxy_->PostTask( | |
521 FROM_HERE, base::Bind(&RTCVideoDecoder::RequestBufferDecode, weak_this_)); | |
522 } | |
523 | |
524 void RTCVideoDecoder::SendPendingBuffersForDecode() { | |
525 while (webrtc_buffers_.size() > 0) { | |
526 // Get a WebRTC buffer from the queue and a shared memory. | |
527 std::pair<webrtc::EncodedImage, BufferData>* buffer_pair = | |
528 &webrtc_buffers_.front(); | |
Ami GONE FROM CHROMIUM
2013/06/26 00:11:58
why * instead of const&?
wuchengli
2013/06/28 15:08:44
No special reason. Is there a preference to use co
Ami GONE FROM CHROMIUM
2013/06/28 17:04:00
Mainly I think what I look for is less ambiguity a
| |
529 webrtc::EncodedImage* input_image = &buffer_pair->first; | |
530 BufferData* buffer_data = &buffer_pair->second; | |
531 SHMBuffer* shm_buffer = GetSHM(input_image->_length); | |
532 if (!shm_buffer) | |
533 return; | |
534 webrtc_buffers_.pop_front(); | |
535 | |
536 SendBufferForDecode(*input_image, shm_buffer, *buffer_data); | |
537 delete input_image->_buffer; | |
538 } | |
539 } | |
540 | |
541 void RTCVideoDecoder::SaveToPendingBuffers( | |
542 const webrtc::EncodedImage& inputImage, | |
543 const BufferData& buffer_data) { | |
544 // Post to the child thread to create shared memory. | |
545 content::ChildThread::current()->message_loop()->PostTask( | |
546 FROM_HERE, | |
547 base::Bind(&RTCVideoDecoder::CreateSHM, weak_this_, inputImage._length)); | |
548 | |
549 // Clone the input image and save it to the queue. | |
Ami GONE FROM CHROMIUM
2013/06/26 00:11:58
Is it unsafe to simply swap _buffer pointers?
wuchengli
2013/06/28 15:08:44
This should be OK because WebRTC will allocate a n
| |
550 uint8_t* buffer = (uint8_t*)malloc(inputImage._length); | |
551 memcpy(buffer, inputImage._buffer, inputImage._length); | |
552 webrtc::EncodedImage encoded_image( | |
553 buffer, inputImage._length, inputImage._length); | |
554 std::pair<webrtc::EncodedImage, BufferData> buffer_pair = | |
555 std::make_pair(encoded_image, buffer_data); | |
556 webrtc_buffers_.push_back(buffer_pair); | |
557 } | |
558 | |
559 void RTCVideoDecoder::ResetInternal() { | |
560 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
561 DVLOG(2) << "ResetInternal"; | |
562 if (vda_) | |
563 vda_->Reset(); | |
564 } | |
565 | |
566 void RTCVideoDecoder::ReusePictureBuffer(int64 picture_buffer_id) { | |
567 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
568 DVLOG(3) << "ReusePictureBuffer. id=" << picture_buffer_id; | |
569 | |
570 if (!vda_) | |
571 return; | |
572 | |
573 CHECK(!picture_buffers_at_display_.empty()); | |
574 | |
575 size_t num_erased = picture_buffers_at_display_.erase(picture_buffer_id); | |
576 DCHECK(num_erased); | |
577 | |
578 std::map<int32, media::PictureBuffer>::iterator it = | |
579 assigned_picture_buffers_.find(picture_buffer_id); | |
580 | |
581 if (it == assigned_picture_buffers_.end()) { | |
582 // This picture was dismissed while in display, so we postponed deletion. | |
583 it = dismissed_picture_buffers_.find(picture_buffer_id); | |
584 DCHECK(it != dismissed_picture_buffers_.end()); | |
585 factories_->DeleteTexture(it->second.texture_id()); | |
586 dismissed_picture_buffers_.erase(it); | |
587 return; | |
588 } | |
589 | |
590 vda_->ReusePictureBuffer(picture_buffer_id); | |
591 } | |
592 | |
593 void RTCVideoDecoder::DestroyTextures() { | |
594 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
595 std::map<int32, media::PictureBuffer>::iterator it; | |
596 | |
597 for (it = assigned_picture_buffers_.begin(); | |
598 it != assigned_picture_buffers_.end(); | |
599 ++it) { | |
600 factories_->DeleteTexture(it->second.texture_id()); | |
601 } | |
602 assigned_picture_buffers_.clear(); | |
603 | |
604 for (it = dismissed_picture_buffers_.begin(); | |
605 it != dismissed_picture_buffers_.end(); | |
606 ++it) { | |
607 factories_->DeleteTexture(it->second.texture_id()); | |
608 } | |
609 dismissed_picture_buffers_.clear(); | |
610 } | |
611 | |
612 void RTCVideoDecoder::DestroyVDA() { | |
613 DVLOG(2) << "DestroyVDA"; | |
614 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
615 if (vda_) | |
616 vda_.release()->Destroy(); | |
617 DestroyTextures(); | |
618 } | |
619 | |
620 // Size of shared-memory segments we allocate. Since we reuse them we let them | |
621 // be on the beefy side. | |
622 static const size_t kSharedMemorySegmentBytes = 100 << 10; | |
623 | |
624 RTCVideoDecoder::SHMBuffer* RTCVideoDecoder::GetSHM(size_t min_size) { | |
625 // Reuse a SHM if possible. | |
626 base::AutoLock auto_lock(lock_); | |
627 if (!available_shm_segments_.empty() && | |
628 available_shm_segments_.back()->size >= min_size) { | |
629 SHMBuffer* ret = available_shm_segments_.back(); | |
630 available_shm_segments_.pop_back(); | |
631 return ret; | |
632 } | |
633 return NULL; | |
634 } | |
635 | |
636 void RTCVideoDecoder::CreateSHM(size_t min_size) { | |
637 DCHECK(base::MessageLoop::current() == | |
638 content::ChildThread::current()->message_loop()); | |
639 DVLOG(2) << "CreateSharedMemory. size=" << min_size; | |
640 size_t size_to_allocate = std::max(min_size, kSharedMemorySegmentBytes); | |
641 // Create three shared memory at once so we don't need to trampoline to the | |
Ami GONE FROM CHROMIUM
2013/06/26 00:11:58
Why 3 and not kMaxInFlightDecodes?
(and method nam
wuchengli
2013/06/28 15:08:44
Because the message can be posted twice or more be
| |
642 // child thread frequently. | |
643 for (int i = 0; i < 3; i++) { | |
644 base::SharedMemory* shm = factories_->CreateSharedMemory(size_to_allocate); | |
645 if (shm != NULL) | |
646 PutSHM(new SHMBuffer(shm, size_to_allocate)); | |
647 } | |
648 } | |
649 | |
650 void RTCVideoDecoder::PutSHM(SHMBuffer* shm_buffer) { | |
651 base::AutoLock auto_lock(lock_); | |
652 available_shm_segments_.push_back(shm_buffer); | |
653 } | |
654 | |
655 void RTCVideoDecoder::RecordBufferData(const BufferData& buffer_data) { | |
656 input_buffer_data_.push_front(buffer_data); | |
657 // Why this value? Because why not. avformat.h:MAX_REORDER_DELAY is 16, but | |
658 // that's too small for some pathological B-frame test videos. The cost of | |
659 // using too-high a value is low (192 bits per extra slot). | |
660 static const size_t kMaxInputBufferDataSize = 128; | |
661 // Pop from the back of the list, because that's the oldest and least likely | |
662 // to be useful in the future data. | |
663 if (input_buffer_data_.size() > kMaxInputBufferDataSize) | |
664 input_buffer_data_.pop_back(); | |
665 } | |
666 | |
667 void RTCVideoDecoder::GetBufferData(int32 bitstream_buffer_id, | |
668 uint32_t* timestamp, | |
669 uint32_t* width, | |
670 uint32_t* height, | |
671 size_t* size) { | |
672 for (std::list<BufferData>::iterator it = input_buffer_data_.begin(); | |
673 it != input_buffer_data_.end(); | |
674 ++it) { | |
675 if (it->bitstream_buffer_id != bitstream_buffer_id) | |
676 continue; | |
677 *timestamp = it->timestamp; | |
678 *width = it->width; | |
679 *height = it->height; | |
680 return; | |
681 } | |
682 NOTREACHED() << "Missing bitstream buffer id: " << bitstream_buffer_id; | |
683 } | |
684 | |
685 } // namespace content | |
OLD | NEW |